From 276bdd67b638985fe48615252dad40f81859ded0 Mon Sep 17 00:00:00 2001 From: Christina Holt <56881914+christinaholtNOAA@users.noreply.github.com> Date: Fri, 31 Mar 2023 14:30:37 -0600 Subject: [PATCH] [develop] Adds a YAML interface for creating a Rocoto XML. (#676) Refactors the creation of a Rocoto XML to use a very generic Jinja2 template that is flexible enough to meet the needs of various workflow configurations supported by SRW. Specifically, it allows for a completely arbitrary workflow to be created under SRW, which includes the addition of completely arbitrary tasks on top of the predefined ones. --------- Co-authored-by: Michael Kavulich --- jobs/JREGIONAL_GET_OBS_CCPA | 2 +- jobs/JREGIONAL_GET_OBS_MRMS | 2 +- jobs/JREGIONAL_GET_OBS_NDAS | 2 +- parm/FV3LAM_wflow.xml | 3029 +---------------- parm/metplus/EnsembleStat_APCP01h.conf | 2 +- parm/metplus/EnsembleStat_REFC.conf | 2 +- parm/metplus/EnsembleStat_RETOP.conf | 2 +- parm/metplus/EnsembleStat_SFC.conf | 2 +- parm/metplus/EnsembleStat_UPA.conf | 2 +- parm/metplus/GridStat_APCP01h.conf | 2 +- parm/metplus/GridStat_APCP03h.conf | 2 +- parm/metplus/GridStat_APCP06h.conf | 2 +- parm/metplus/GridStat_APCP24h.conf | 2 +- parm/metplus/GridStat_REFC.conf | 2 +- parm/metplus/GridStat_RETOP.conf | 2 +- parm/metplus/PointStat_SFC.conf | 4 +- parm/metplus/PointStat_UPA.conf | 4 +- parm/wflow/aqm_all.yaml | 209 ++ parm/wflow/coldstart.yaml | 212 ++ parm/wflow/default_workflow.yaml | 44 + parm/wflow/plot.yaml | 49 + parm/wflow/post.yaml | 81 + parm/wflow/prdgen.yaml | 35 + parm/wflow/prep.yaml | 70 + parm/wflow/verify.yaml | 235 ++ parm/wflow/verify_ensgrid.yaml | 143 + scripts/exregional_aqm_lbcs.sh | 9 +- scripts/exregional_bias_correction_o3.sh | 8 +- scripts/exregional_bias_correction_pm25.sh | 10 +- scripts/exregional_get_extrn_mdl_files.sh | 8 +- scripts/exregional_get_obs_ccpa.sh | 2 +- scripts/exregional_get_obs_mrms.sh | 12 +- scripts/exregional_get_obs_ndas.sh | 90 +- scripts/exregional_make_grid.sh | 10 +- scripts/exregional_make_ics.sh | 2 - scripts/exregional_make_lbcs.sh | 10 +- scripts/exregional_make_sfc_climo.sh | 2 - scripts/exregional_nexus_emission.sh | 11 +- scripts/exregional_nexus_gfs_sfc.sh | 9 +- scripts/exregional_nexus_post_split.sh | 9 +- scripts/exregional_point_source.sh | 8 +- scripts/exregional_post_stat_o3.sh | 8 +- scripts/exregional_pre_post_stat.sh | 14 +- scripts/exregional_run_fcst.sh | 45 +- ...exregional_run_met_ensemblestat_vx_grid.sh | 6 +- ...xregional_run_met_ensemblestat_vx_point.sh | 4 +- scripts/exregional_run_met_gridstat_vx.sh | 14 +- scripts/exregional_run_met_pb2nc_obs.sh | 1 - scripts/exregional_run_met_pcpcombine.sh | 9 +- scripts/exregional_run_met_pointstat_vx.sh | 11 +- ...exregional_run_met_pointstat_vx_ensmean.sh | 2 +- ...exregional_run_met_pointstat_vx_ensprob.sh | 2 +- scripts/exregional_run_post.sh | 1 - scripts/exregional_run_prdgen.sh | 1 - tests/WE2E/machine_suites/comprehensive | 3 +- tests/WE2E/run_WE2E_tests.py | 66 +- ...S_suite_GFS_2017_gfdlmp_regional_plot.yaml | 5 +- ...S_25km_ics_FV3GFS_lbcs_RAP_suite_HRRR.yaml | 13 +- ...S_25km_ics_NAM_lbcs_NAM_suite_GFS_v16.yaml | 13 +- ..._FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta.yaml | 24 +- ..._FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta.yaml | 34 +- ..._ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16.yaml | 7 +- ..._lbcs_FV3GFS_timeoffset_suite_GFS_v16.yaml | 7 +- ...FS_suite_GFS_v15_thompson_mynn_lam3km.yaml | 7 +- ...act_25km_ics_HRRR_lbcs_RAP_suite_HRRR.yaml | 7 +- .../config.GST_release_public_v1.yaml | 7 +- .../config.MET_ensemble_verification.yaml | 2 +- .../config.MET_verification_only_vx.yaml | 58 +- .../config.deactivate_tasks.yaml | 13 +- .../wflow_features/config.nco_ensemble.yaml | 7 +- .../config.pregen_grid_orog_sfc_climo.yaml | 9 +- ush/config.aqm.community.yaml | 29 +- ush/config.aqm.nco.realtime.yaml | 32 +- ush/config.community.yaml | 19 +- ush/config.nco.yaml | 13 +- ush/config_defaults.yaml | 998 +----- ush/fill_jinja_template.py | 8 +- ush/generate_FV3LAM_wflow.py | 143 +- ush/machine/gaea.yaml | 1 + ush/machine/hera.yaml | 15 +- ush/machine/jet.yaml | 11 + ush/machine/linux.yaml | 11 + ush/machine/noaacloud.yaml | 1 + ush/machine/wcoss2.yaml | 18 + ush/python_utils/config_parser.py | 215 +- ush/python_utils/environment.py | 2 + ush/set_vx_fhr_list.sh | 2 +- ush/setup.py | 332 +- ush/valid_param_vals.yaml | 21 - 89 files changed, 2038 insertions(+), 4576 deletions(-) create mode 100644 parm/wflow/aqm_all.yaml create mode 100644 parm/wflow/coldstart.yaml create mode 100644 parm/wflow/default_workflow.yaml create mode 100644 parm/wflow/plot.yaml create mode 100644 parm/wflow/post.yaml create mode 100644 parm/wflow/prdgen.yaml create mode 100644 parm/wflow/prep.yaml create mode 100644 parm/wflow/verify.yaml create mode 100644 parm/wflow/verify_ensgrid.yaml diff --git a/jobs/JREGIONAL_GET_OBS_CCPA b/jobs/JREGIONAL_GET_OBS_CCPA index 722d38a0ac..6666c26432 100755 --- a/jobs/JREGIONAL_GET_OBS_CCPA +++ b/jobs/JREGIONAL_GET_OBS_CCPA @@ -20,7 +20,7 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_get_obs_ccpa" ${GLOBAL_VAR_DEFNS_FP} +source_config_for_task " " ${GLOBAL_VAR_DEFNS_FP} . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_GET_OBS_MRMS b/jobs/JREGIONAL_GET_OBS_MRMS index 33331fe9f5..780da368e5 100755 --- a/jobs/JREGIONAL_GET_OBS_MRMS +++ b/jobs/JREGIONAL_GET_OBS_MRMS @@ -16,7 +16,7 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_get_obs_mrms" ${GLOBAL_VAR_DEFNS_FP} +source_config_for_task " " ${GLOBAL_VAR_DEFNS_FP} . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_GET_OBS_NDAS b/jobs/JREGIONAL_GET_OBS_NDAS index 18faa0fe59..0594f32f0d 100755 --- a/jobs/JREGIONAL_GET_OBS_NDAS +++ b/jobs/JREGIONAL_GET_OBS_NDAS @@ -16,7 +16,7 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_get_obs_ndas" ${GLOBAL_VAR_DEFNS_FP} +source_config_for_task " " ${GLOBAL_VAR_DEFNS_FP} . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- diff --git a/parm/FV3LAM_wflow.xml b/parm/FV3LAM_wflow.xml index 494be43017..f6e30313c0 100644 --- a/parm/FV3LAM_wflow.xml +++ b/parm/FV3LAM_wflow.xml @@ -1,2987 +1,88 @@ -{# - -This is a Jinja-enabled Rocoto XML template. It is filled in using the -fill_template.py script, and is done automatically by the -generate_workflow.sh step of preparing a regional workflow configured -experiment. - -See README.xml_templating.md for information on using the Templating mechanisms. --#} - - - - - - - - - -{%- if sched_native_cmd %} - -{%- else %} - -{%- endif %} - - - - - - - - - -{%- if fcst_len_hrs == -1 %} - - - - -{%- endif%} - - - - - -{%- if run_tasks_metvx_det or run_tasks_metvx_ens %} - - -{%- endif%} -{%- if run_tasks_metvx_det %} - - - - - - - - -{%- endif%} -{%- if run_tasks_metvx_ens %} - - - - - - - - - - - - - - - - - - - - - - -{%- endif%} - -{%- if run_tasks_metvx_det or run_tasks_metvx_ens %} - -{%- endif %} -{%- if run_task_aqm_ics %} - - -{%- endif %} -{%- if run_task_aqm_lbcs %} - -{%- endif %} -{%- if run_task_nexus_gfs_sfc %} - -{%- endif %} -{%- if run_task_nexus_emission %} - - -{%- endif %} -{%- if run_task_fire_emission %} - -{%- endif %} -{%- if run_task_point_source %} - -{%- endif %} -{%- if run_task_pre_post_stat %} - -{%- endif %} -{%- if run_task_post_stat_o3 %} - -{%- endif %} -{%- if run_task_post_stat_pm25 %} - -{%- endif %} -{%- if run_task_bias_correction_o3 %} - -{%- endif %} -{%- if run_task_bias_correction_pm25 %} - -{%- endif %} - - - - - - - - - - - - - - - - - - - - -{%- if run_envir == "nco" %} -{%- if do_ensemble %} -@Y@m@d@H/dyn"> -@Y@m@d@H/phy"> -{%- else %} -@Y@m@d@H/dyn"> -@Y@m@d@H/phy"> -{%- endif %} - -{%- else %} -@Y@m@d@H{{ slash_ensmem_subdir }}/dyn"> -@Y@m@d@H{{ slash_ensmem_subdir }}/phy"> - -{%- endif %} - -{%- if run_envir == "nco" %} -@Y@m@d"> - -{%- else %} - - -{%- endif %} - - - - - - - - - - - -{%- if do_real_time %} - -{%- endif %} - - - -{%- if partition_default is not none %} -&ACCOUNT;&QUEUE_DEFAULT;{{ partition_default }}"> -{%- else %} -&ACCOUNT;&QUEUE_DEFAULT;"> -{%- endif %} -{%- if partition_hpss is not none %} -&ACCOUNT;&QUEUE_HPSS;{{ partition_hpss }}"> -{%- else %} -&ACCOUNT;&QUEUE_HPSS;"> -{%- endif %} -{%- if partition_fcst is not none %} -&ACCOUNT;&QUEUE_FCST;{{ partition_fcst }}"> -{%- else %} -&ACCOUNT;&QUEUE_FCST;"> -{%- endif %} - -]> - - -{# Double quotes are required inside the strftime! Expect an error from reading the template if using single quotes. #} - {{ cdate_first_cycl.strftime("%M %H %d %m %Y *") }} - - {{- date_first_cycl ~ " " ~ date_last_cycl ~ " " ~ cycl_freq -}} - -{%- if cycl_next != date_first_cycl %} - - {{- cycl_next ~ " " ~ date_last_cycl ~ " " ~ cycl_freq -}} - -{%- endif %} -{%- if fcst_len_hrs == -1 %} - {%- if num_fcst_len_cycl >= 1 %} - {{- date_first_cycl ~ " " ~ date_1st_last_cycl ~ " " ~ "24:00:00" -}} - {%- endif %} - {%- if num_fcst_len_cycl >= 2 %} - {{- date_2nd_cycl ~ " " ~ date_2nd_last_cycl ~ " " ~ "24:00:00" -}} - {%- endif %} - {%- if num_fcst_len_cycl >= 3 %} - {{- date_3rd_cycl ~ " " ~ date_3rd_last_cycl ~ " " ~ "24:00:00" -}} - {%- endif %} - {%- if num_fcst_len_cycl >= 4 %} - {{- date_4th_cycl ~ " " ~ date_4th_last_cycl ~ " " ~ "24:00:00" -}} - {%- endif %} -{%- endif %} - - {%- if run_envir == "nco" %} - &LOGDIR;/FV3LAM_wflow.{{ workflow_id }}.log - {%- else %} - &LOGDIR;/FV3LAM_wflow.log - {%- endif %} - - - - - - -{%- if run_task_make_grid %} - - - - &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&TN_MAKE_GRID;" "&JOBSdir;/JREGIONAL_MAKE_GRID" - {{ nnodes_make_grid }}:ppn={{ ppn_make_grid }} - {{ wtime_make_grid }} - &NCORES_PER_NODE; - &SCHED_NATIVE_CMD; - &TN_MAKE_GRID; - &LOGDIR;/&TN_MAKE_GRID;&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - - -{%- endif %} -{%- if run_task_make_orog %} - - - - &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&TN_MAKE_OROG;" "&JOBSdir;/JREGIONAL_MAKE_OROG" - {{ nnodes_make_orog }}:ppn={{ ppn_make_orog }} - {{ wtime_make_orog }} - &NCORES_PER_NODE; - &SCHED_NATIVE_CMD; - &TN_MAKE_OROG; - &LOGDIR;/&TN_MAKE_OROG;&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - - - - - &EXPTDIR;/grid/&TN_MAKE_GRID;&CMPEXT; - &RUN_TASK_MAKE_GRID;FALSE - - - - -{%- endif %} -{%- if run_task_make_sfc_climo %} - - - - &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&TN_MAKE_SFC_CLIMO;" "&JOBSdir;/JREGIONAL_MAKE_SFC_CLIMO" - {{ nnodes_make_sfc_climo }}:ppn={{ ppn_make_sfc_climo }} - {{ wtime_make_sfc_climo }} - &NCORES_PER_NODE; - &SCHED_NATIVE_CMD; - &TN_MAKE_SFC_CLIMO; - &LOGDIR;/&TN_MAKE_SFC_CLIMO;&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - - - - - - &EXPTDIR;/grid/&TN_MAKE_GRID;&CMPEXT; - &RUN_TASK_MAKE_GRID;FALSE - - - - &EXPTDIR;/orog/&TN_MAKE_OROG;&CMPEXT; - &RUN_TASK_MAKE_OROG;FALSE - - - - - -{%- endif %} -{%- if run_task_nexus_gfs_sfc %} - - - - {%- if do_real_time %} - &RSRV_DEFAULT; - {%- else %} - &RSRV_HPSS; - {%- endif %} - &LOAD_MODULES_RUN_TASK_FP; "&TN_NEXUS_GFS_SFC;" "&JOBSdir;/JREGIONAL_NEXUS_GFS_SFC" - {{ nnodes_nexus_gfs_sfc }}:ppn={{ ppn_nexus_gfs_sfc }} - {%- if machine not in ["GAEA", "NOAACLOUD"] %} - {%- if not do_real_time %} - {{ mem_nexus_gfs_sfc }} +{%- macro dependency_tree(dep_dict) %} +{%- if dep_dict is mapping %} + {%- for tag, values in dep_dict.items() %} + {%- set tag_type = tag.split("_")[0] %} + {%- if values is mapping %} + <{{ tag_type -}} {% for attr, val in values.pop("attrs", {}).items() %} {{ attr }}="{{ val }}"{%- endfor -%}{%- if tag_type in ["taskdep", "metataskdep", "taskvalid"] %}/{%- endif %}> + {%- if values.get("text") %} + {{ values.pop("text") }} + + {%- elif values %} + {{- dependency_tree(values)|indent(2) }} + + {%- endif %} + {%- else %} + <{{ tag_type|indent(2) -}}> + {{- values -}} + {%- endif %} - {%- endif %} - {{ wtime_nexus_gfs_sfc }} - &NCORES_PER_NODE; - {%- if machine not in ["WCOSS2"] %} - &SCHED_NATIVE_CMD; - {%- endif %} - &TN_NEXUS_GFS_SFC; - &LOGDIR;/&TN_NEXUS_GFS_SFC;_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - SLASH_ENSMEM_SUBDIR{{ slash_ensmem_subdir }} - -{%- if do_real_time %} - - &COMINgfs;/gfs.@Y@m@d/@H/atmos - -{%- endif %} - - -{%- endif %} -{%- if run_task_nexus_emission %} - - - {% for h in range(0, num_split_nexus) %}{{ " %02d" % h }}{% endfor %} - - &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&TN_NEXUS_EMISSION;" "&JOBSdir;/JREGIONAL_NEXUS_EMISSION" - {%- if machine in ["HERA"] %} - {{ nnodes_nexus_emission }}:ppn={{ ppn_nexus_emission }} - {{ native_nexus_emission }} - {%- elif machine in ["WCOSS2"] %} - {{ nnodes_nexus_emission }}:ppn={{ ppn_nexus_emission }}:tpp={{ omp_num_threads_nexus_emission }} - &SCHED_NATIVE_CMD; - {%- else %} - {{ nnodes_nexus_emission }}:ppn={{ ppn_nexus_emission }} - {%- endif %} - {{ wtime_nexus_emission }} - &NCORES_PER_NODE; - &TN_NEXUS_EMISSION;_#nspt# - &LOGDIR;/&TN_NEXUS_EMISSION;_@Y@m@d@H_s#nspt#&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - PDY@Y@m@d - cyc@H - subcyc@M - nspt#nspt# - LOGDIR&LOGDIR; - SLASH_ENSMEM_SUBDIR{{ slash_ensmem_subdir }} - -{%- if run_task_nexus_gfs_sfc %} - - - -{%- endif %} - - - - &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&TN_NEXUS_POST_SPLIT;" "&JOBSdir;/JREGIONAL_NEXUS_POST_SPLIT" - {{ nnodes_nexus_post_split }}:ppn={{ ppn_nexus_post_split }} - {{ wtime_nexus_post_split }} - &NCORES_PER_NODE; - &TN_NEXUS_POST_SPLIT; - &LOGDIR;/&TN_NEXUS_POST_SPLIT;_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - SLASH_ENSMEM_SUBDIR{{ slash_ensmem_subdir }} - - - - - - -{%- endif %} -{%- if run_task_fire_emission %} - - - - {%- if do_aqm_save_fire %} - &RSRV_HPSS; - {%- else %} - &RSRV_DEFAULT; - {%- endif %} - &LOAD_MODULES_RUN_TASK_FP; "&TN_FIRE_EMISSION;" "&JOBSdir;/JREGIONAL_FIRE_EMISSION" - {{ nnodes_fire_emission }}:ppn={{ ppn_fire_emission }} - {%- if machine not in ["GAEA", "NOAACLOUD"] %} - {%- if not do_real_time %} - {{ mem_fire_emission }} - {%- endif %} - {%- endif %} - {{ wtime_fire_emission }} - &NCORES_PER_NODE; - {%- if machine not in ["WCOSS2"] %} - &SCHED_NATIVE_CMD; - {%- endif %} - &TN_FIRE_EMISSION; - &LOGDIR;/&TN_FIRE_EMISSION;_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - SLASH_ENSMEM_SUBDIR{{ slash_ensmem_subdir }} - -{%- endif %} -{%- if run_task_point_source %} - - - - &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&TN_POINT_SOURCE;" "&JOBSdir;/JREGIONAL_POINT_SOURCE" - {{ nnodes_point_source }}:ppn={{ ppn_point_source }} - {{ wtime_point_source }} - &NCORES_PER_NODE; - &TN_POINT_SOURCE; - &LOGDIR;/&TN_POINT_SOURCE;_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - SLASH_ENSMEM_SUBDIR{{ slash_ensmem_subdir }} - - - - - &EXPTDIR;/grid/&TN_MAKE_GRID;&CMPEXT; - &RUN_TASK_MAKE_GRID;FALSE - - - - -{%- endif %} -{%- if run_task_get_extrn_ics %} - - - - &RSRV_HPSS; - &LOAD_MODULES_RUN_TASK_FP; "&TN_GET_EXTRN_ICS;" "&JOBSdir;/JREGIONAL_GET_EXTRN_MDL_FILES" - {{ nnodes_get_extrn_ics }}:ppn={{ ppn_get_extrn_ics }} - {%- if machine not in ["GAEA", "NOAACLOUD"] %} - {{ mem_get_extrn_ics }} - {%- endif %} - {{ wtime_get_extrn_ics }} - &NCORES_PER_NODE; - {%- if machine not in ["WCOSS2"] %} - &SCHED_NATIVE_CMD; - {%- endif %} - &TN_GET_EXTRN_ICS; - &LOGDIR;/&TN_GET_EXTRN_ICS;_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - ICS_OR_LBCSICS -{%- if do_real_time %} - - &COMINgfs;/gfs.@Y@m@d/@H/atmos - -{%- endif %} - -{%- endif %} -{%- if run_task_get_extrn_lbcs %} - - - - &RSRV_HPSS; - &LOAD_MODULES_RUN_TASK_FP; "&TN_GET_EXTRN_LBCS;" "&JOBSdir;/JREGIONAL_GET_EXTRN_MDL_FILES" - {{ nnodes_get_extrn_lbcs }}:ppn={{ ppn_get_extrn_lbcs }} - {%- if machine not in ["GAEA", "NOAACLOUD"] %} - {{ mem_get_extrn_lbcs }} - {%- endif %} - {{ wtime_get_extrn_lbcs }} - &NCORES_PER_NODE; - {%- if machine not in ["WCOSS2"] %} - &SCHED_NATIVE_CMD; - {%- endif %} - &TN_GET_EXTRN_LBCS; - &LOGDIR;/&TN_GET_EXTRN_LBCS;_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - ICS_OR_LBCSLBCS - -{%- if do_real_time %} - - &COMINgfs;/gfs.@Y@m@d/@H/atmos - -{%- endif %} - - -{%- endif %} - -{%- if do_ensemble %} - - - {% for m in range(1, num_ens_members+1) %}{{ "%03d " % m }}{% endfor %} - -{%- endif %} - -{%- if run_task_make_ics %} - - - &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&TN_MAKE_ICS;" "&JOBSdir;/JREGIONAL_MAKE_ICS" - {{ nnodes_make_ics }}:ppn={{ ppn_make_ics }} - {{ wtime_make_ics }} - &NCORES_PER_NODE; - &SCHED_NATIVE_CMD; - &TN_MAKE_ICS;{{ uscore_ensmem_name }} - &LOGDIR;/&TN_MAKE_ICS;{{ uscore_ensmem_name }}_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - SLASH_ENSMEM_SUBDIR{{ slash_ensmem_subdir }} - ENSMEM_INDX#{{ ensmem_indx_name }}# - - - - - - - &EXPTDIR;/grid/&TN_MAKE_GRID;&CMPEXT; - &RUN_TASK_MAKE_GRID;FALSE - - - - &EXPTDIR;/orog/&TN_MAKE_OROG;&CMPEXT; - &RUN_TASK_MAKE_OROG;FALSE - - - - &EXPTDIR;/sfc_climo/&TN_MAKE_SFC_CLIMO;&CMPEXT; - &RUN_TASK_MAKE_SFC_CLIMO;FALSE - - - - - -{%- endif %} -{%- if run_task_make_lbcs %} - - - - &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&TN_MAKE_LBCS;" "&JOBSdir;/JREGIONAL_MAKE_LBCS" - {{ nnodes_make_lbcs }}:ppn={{ ppn_make_lbcs }} - {{ wtime_make_lbcs }} - &NCORES_PER_NODE; - &SCHED_NATIVE_CMD; - &TN_MAKE_LBCS;{{ uscore_ensmem_name }} - &LOGDIR;/&TN_MAKE_LBCS;{{ uscore_ensmem_name }}_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - SLASH_ENSMEM_SUBDIR{{ slash_ensmem_subdir }} - ENSMEM_INDX#{{ ensmem_indx_name }}# - - - - - - - &EXPTDIR;/grid/&TN_MAKE_GRID;&CMPEXT; - &RUN_TASK_MAKE_GRID;FALSE - - - - &EXPTDIR;/orog/&TN_MAKE_OROG;&CMPEXT; - &RUN_TASK_MAKE_OROG;FALSE - - - - &EXPTDIR;/sfc_climo/&TN_MAKE_SFC_CLIMO;&CMPEXT; - &RUN_TASK_MAKE_SFC_CLIMO;FALSE - - - - - + {%- endfor %} {%- endif %} -{%- if run_task_aqm_ics %} - {%- if not coldstart %} - - - - &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&TN_AQM_ICS;" "&JOBSdir;/JREGIONAL_AQM_ICS" - {{ nnodes_aqm_ics }}:ppn={{ ppn_aqm_ics }} - {{ wtime_aqm_ics }} - &NCORES_PER_NODE; - &TN_AQM_EXTRN_ICS; - &LOGDIR;/&TN_AQM_EXTRN_ICS;_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - PREV_CYCLE_DIR&WARMSTART_CYCLE_DIR; - SLASH_ENSMEM_SUBDIR{{ slash_ensmem_subdir }} - - - - - - &WARMSTART_CYCLE_DIR;/RESTART/@Y@m@d.@H@M@S.fv_tracer.res.tile1.nc - &WARMSTART_CYCLE_DIR;/RESTART/fv_tracer.res.tile1.nc - - - - - - {%- endif %} - - - - &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&TN_AQM_ICS;" "&JOBSdir;/JREGIONAL_AQM_ICS" - {{ nnodes_aqm_ics }}:ppn={{ ppn_aqm_ics }} - {{ wtime_aqm_ics }} - &NCORES_PER_NODE; - &TN_AQM_ICS; - &LOGDIR;/&TN_AQM_ICS;_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - PREV_CYCLE_DIR&COMIN_DIR; - SLASH_ENSMEM_SUBDIR{{ slash_ensmem_subdir }} - - - - - - &COMIN_DIR;/RESTART/@Y@m@d.@H@M@S.fv_tracer.res.tile1.nc - &COMIN_DIR;/RESTART/fv_tracer.res.tile1.nc - - - +{%- endmacro -%} - -{%- endif %} -{%- if run_task_aqm_lbcs %} - - +{%- macro task(name, settings) %} + - &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&TN_AQM_LBCS;" "&JOBSdir;/JREGIONAL_AQM_LBCS" - {{ nnodes_aqm_lbcs }}:ppn={{ ppn_aqm_lbcs }} - {{ wtime_aqm_lbcs }} - &NCORES_PER_NODE; - &TN_AQM_LBCS; - &LOGDIR;/&TN_AQM_LBCS;_@Y@m@d@H&LOGEXT; + {%- for key, value in settings.items() -%} + {%- if key not in ["envars", "attrs", "dependency", "nnodes", "ppn"] %} + <{{ key }}>{{ value }} + {%- endif %} + {%- endfor %} - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - SLASH_ENSMEM_SUBDIR{{ slash_ensmem_subdir }} + {% for var, value in settings.get("envars", {}).items() %} + {{ var }}{{ value }} + {%- endfor %} + {% if settings.get("dependency") -%} - + {{- dependency_tree(dep_dict=settings.get("dependency")) }} - + {%- endif %} -{%- endif %} -{%- if run_task_run_fcst %} - - - - &RSRV_FCST; - &LOAD_MODULES_RUN_TASK_FP; "&TN_RUN_FCST;" "&JOBSdir;/JREGIONAL_RUN_FCST" - {%- if machine in ["JET", "HERA", "LINUX"] %} - {{ ncores_run_fcst }} - {{ native_run_fcst }} - {%- elif machine in ["WCOSS2"] %} - {{ nnodes_run_fcst }}:ppn={{ ppn_run_fcst }}:tpp={{ omp_num_threads_run_fcst }} - &SCHED_NATIVE_CMD; - &NCORES_PER_NODE; - {%- else %} - {{ nnodes_run_fcst }}:ppn={{ ppn_run_fcst }} - &NCORES_PER_NODE; - {%- endif %} - &SCHED_NATIVE_CMD; - {{ wtime_run_fcst }} - &TN_RUN_FCST;{{ uscore_ensmem_name }} - &LOGDIR;/&TN_RUN_FCST;{{ uscore_ensmem_name }}_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - SLASH_ENSMEM_SUBDIR{{ slash_ensmem_subdir }} - ENSMEM_INDX#{{ ensmem_indx_name }}# - - - - - -{%- if run_task_nexus_emission %} - -{%- endif %} -{%- if run_task_fire_emission %} - -{%- endif %} -{%- if run_task_point_source %} - -{%- endif %} -{%- if run_task_aqm_ics %} - - -{%- if not coldstart %} - - - - -{%- else %} - -{%- endif %} - -{%- endif %} -{%- if run_task_aqm_lbcs %} - -{%- endif %} - - +{%- endmacro -%} - -{%- endif %} - -{%- if run_task_run_post %} -{%- if fcst_len_hrs == -1 %} - {%- for icyc in range(num_fcst_len_cycl) %} - - {% for h in range(0, fcst_len_cycl[icyc]+1) %}{{ " %03d" % h }}{% endfor %} - {%- if icyc == 0 %} - - {%- elif icyc == 1 %} - - {%- elif icyc == 2 %} - - {%- elif icyc == 3 %} - +{%- macro metatask(name, settings) %} + + {% for varname, value in settings.get("var", {}).items() %} + {{ value }} + {%- endfor %} + {%- for item, task_settings in settings.items() %} + {%- if item.split("_", 1)[0] == "task" %} + {%- if task_settings.get("command") %} + {{ task(name=item.split("_", 1)[-1], settings=task_settings)|indent(2) }} {%- endif %} - &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&TN_RUN_POST;" "&JOBSdir;/JREGIONAL_RUN_POST" - {{ nnodes_run_post }}:ppn={{ ppn_run_post }} - {{ wtime_run_post }} - &NCORES_PER_NODE; - &SCHED_NATIVE_CMD; - {%- if icyc == 0 %} - &TN_RUN_POST_CYC1;{{ uscore_ensmem_name }}_f#fhr# - {%- elif icyc == 1 %} - &TN_RUN_POST_CYC2;{{ uscore_ensmem_name }}_f#fhr# - {%- elif icyc == 2 %} - &TN_RUN_POST_CYC3;{{ uscore_ensmem_name }}_f#fhr# - {%- elif icyc == 3 %} - &TN_RUN_POST_CYC4;{{ uscore_ensmem_name }}_f#fhr# + {%- elif item.split("_", 1)[0] == "metatask" %} + {{ metatask(name=item.split("_", 1)[-1], settings=task_settings)|indent(2) }} {%- endif %} - &LOGDIR;/&TN_RUN_POST;{{ uscore_ensmem_name }}_f#fhr#_@Y@m@d@H&LOGEXT; - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - fhr#fhr# - SLASH_ENSMEM_SUBDIR{{ slash_ensmem_subdir }} - ENSMEM_INDX#{{ ensmem_indx_name }}# - - - - - - &DYN_DIR;f#fhr#.nc - &PHY_DIR;f#fhr#.nc - - - - - - - {%- endfor %} - -{%- else %} - {%- if sub_hourly_post %} - - - - 000 - 00 - - - &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&TN_RUN_POST;" "&JOBSdir;/JREGIONAL_RUN_POST" - {{ nnodes_run_post }}:ppn={{ ppn_run_post }} - {{ wtime_run_post }} - &NCORES_PER_NODE; - &SCHED_NATIVE_CMD; - &TN_RUN_POST;{{ uscore_ensmem_name }}_f#fhr##fmn# - &LOGDIR;/&TN_RUN_POST;{{ uscore_ensmem_name }}_f#fhr##fmn#_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - fhr#fhr# - fmn#fmn# - SLASH_ENSMEM_SUBDIR{{ slash_ensmem_subdir }} - ENSMEM_INDX#{{ ensmem_indx_name }}# - - - - - - - &DYN_DIR;f{{ first_fv3_file_tstr }}.nc - &PHY_DIR;f{{ first_fv3_file_tstr }}.nc - - - - - - - - - - - - 000 - - - {% for min in range(delta_min, 60, delta_min) %}{{ " %02d" % min }}{% endfor %} - - - &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&TN_RUN_POST;" "&JOBSdir;/JREGIONAL_RUN_POST" - {{ nnodes_run_post }}:ppn={{ ppn_run_post }} - {{ wtime_run_post }} - &NCORES_PER_NODE; - &SCHED_NATIVE_CMD; - &TN_RUN_POST;{{ uscore_ensmem_name }}_f#fhr##fmn# - &LOGDIR;/&TN_RUN_POST;{{ uscore_ensmem_name }}_f#fhr##fmn#_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - fhr#fhr# - fmn#fmn# - SLASH_ENSMEM_SUBDIR{{ slash_ensmem_subdir }} - ENSMEM_INDX#{{ ensmem_indx_name }}# - - - - - - &DYN_DIR;f#fhr#:#fmn#:00.nc - &PHY_DIR;f#fhr#:#fmn#:00.nc - - - - - - - - - - {%- endif %} - - - {%- if sub_hourly_post %} - - {% for h in range(1, fcst_len_hrs) %}{{ " %03d" % h }}{% endfor %} - - {% for min in range(0, 60, delta_min) %}{{ " %02d" % min }}{% endfor %} - - {%- else %} - - {% for h in range(0, fcst_len_hrs+1) %}{{ " %03d" % h }}{% endfor %} - - {%- endif %} - - &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&TN_RUN_POST;" "&JOBSdir;/JREGIONAL_RUN_POST" - {{ nnodes_run_post }}:ppn={{ ppn_run_post }} - {{ wtime_run_post }} - &NCORES_PER_NODE; - &SCHED_NATIVE_CMD; - {%- if sub_hourly_post %} - &TN_RUN_POST;{{ uscore_ensmem_name }}_f#fhr##fmn# - &LOGDIR;/&TN_RUN_POST;{{ uscore_ensmem_name }}_f#fhr##fmn#_@Y@m@d@H&LOGEXT; - {%- else %} - &TN_RUN_POST;{{ uscore_ensmem_name }}_f#fhr# - &LOGDIR;/&TN_RUN_POST;{{ uscore_ensmem_name }}_f#fhr#_@Y@m@d@H&LOGEXT; - {%- endif %} - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - {%- if sub_hourly_post %} - fhr#fhr# - fmn#fmn# - {%- else %} - fhr#fhr# - {%- endif %} - SLASH_ENSMEM_SUBDIR{{ slash_ensmem_subdir }} - ENSMEM_INDX#{{ ensmem_indx_name }}# - - - - - - {%- if sub_hourly_post %} - &DYN_DIR;f#fhr#:#fmn#:00.nc - &PHY_DIR;f#fhr#:#fmn#:00.nc - {%- else %} - &DYN_DIR;f#fhr#.nc - &PHY_DIR;f#fhr#.nc - {%- endif %} - - - - - - - {%- if sub_hourly_post %} - - {%- else %} - {%- endif %} - - - {%- if sub_hourly_post %} - - - - {{ "%03d" % fcst_len_hrs }} - 00 - - - &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&TN_RUN_POST;" "&JOBSdir;/JREGIONAL_RUN_POST" - {{ nnodes_run_post }}:ppn={{ ppn_run_post }} - {{ wtime_run_post }} - &NCORES_PER_NODE; - &SCHED_NATIVE_CMD; - &TN_RUN_POST;{{ uscore_ensmem_name }}_f#fhr##fmn# - &LOGDIR;/&TN_RUN_POST;{{ uscore_ensmem_name }}_f#fhr##fmn#_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - fhr#fhr# - fmn#fmn# - SLASH_ENSMEM_SUBDIR{{ slash_ensmem_subdir }} - ENSMEM_INDX#{{ ensmem_indx_name }}# - - - - - - &DYN_DIR;f#fhr#:#fmn#:00.nc - &PHY_DIR;f#fhr#:#fmn#:00.nc - - - - - - - - {%- endif %} -{%- endif %} -{%- endif %} -{%- if run_task_run_prdgen %} - - - - {% for h in range(0, fcst_len_hrs+1) %}{{ " %03d" % h }}{% endfor %} - - &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&TN_RUN_PRDGEN;" "&JOBSdir;/JREGIONAL_RUN_PRDGEN" - - {{ nnodes_run_prdgen }}:ppn={{ ppn_run_prdgen }} - {{ wtime_run_prdgen }} - &NCORES_PER_NODE; - &SCHED_NATIVE_CMD; - - &TN_RUN_PRDGEN;{{ uscore_ensmem_name }}_f#fhr# - &LOGDIR;/&TN_RUN_PRDGEN;{{ uscore_ensmem_name }}_f#fhr#_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - SLASH_ENSMEM_SUBDIR{{ slash_ensmem_subdir }} - ENSMEM_INDX#{{ ensmem_indx_name }}# - fhr#fhr# - - - - - - + {%- endfor %} -{%- endif %} -{%- if run_task_plot_allvars %} - - +{%- endmacro -%} - &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&TN_PLOT_ALLVARS;" "&JOBSdir;/JREGIONAL_PLOT_ALLVARS" - {{ nnodes_plot_allvars }}:ppn={{ ppn_plot_allvars }} - {{ wtime_plot_allvars }} - &NCORES_PER_NODE; - {%- if machine not in ["WCOSS2"] %} - &SCHED_NATIVE_CMD; - {%- endif %} - &TN_PLOT_ALLVARS; - &LOGDIR;/&TN_PLOT_ALLVARS;_@Y@m@d@H&LOGEXT; + +GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - SLASH_ENSMEM_SUBDIR{{ slash_ensmem_subdir }} - ENSMEM_INDX#{{ ensmem_indx_name }}# +{%- for entity, value in entities.items() %} + +{%- endfor %} - - -{#- Redundant dependency to simplify jinja code. -This dependency will always evaluate to true. It is included because -rocoto does not allow empty , , and other tags. Without -it, we'd have to include more jinja if-statements here. -#} - TRUETRUE - {%- if write_dopost %} - - {%- elif run_task_run_post %} - - {%- endif %} - - +]> + - -{%- endif %} -{%- if run_task_pre_post_stat %} - - - &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&TN_PRE_POST_STAT;" "&JOBSdir;/JREGIONAL_PRE_POST_STAT" - {{ nnodes_pre_post_stat }}:ppn={{ ppn_pre_post_stat }} - {{ wtime_pre_post_stat }} - &NCORES_PER_NODE; - &TN_PRE_POST_STAT; - &LOGDIR;/&TN_PRE_POST_STAT;_@Y@m@d@H&LOGEXT; + {%- for group, cdefs in cycledefs.items() %} + {%- for cdef in cdefs %} + {{ cdef }} + {%- endfor %} + {%- endfor %} - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - SLASH_ENSMEM_SUBDIR{{ slash_ensmem_subdir }} + {{ log }} - -{%- if run_task_run_post %} - {%- if fcst_len_hrs == -1 %} - &COMIN_DIR;/&TN_RUN_POST;_@Y@m@d@H&CMPEXT; - {%- else %} - +{%- for item, settings in tasks.items() %} + {%- if item.split("_", 1)[0] == "task" %} + {{ task(name=item.split("_", 1)[-1], settings=settings ) }} + {%- elif item.split("_", 1)[0] == "metatask" %} + {{ metatask(name=item.split("_", 1)[-1], settings=settings ) }} {%- endif %} -{%- else %} - -{%- endif %} - - -{%- endif %} -{%- if run_task_post_stat_o3 %} - - - &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&TN_POST_STAT_O3;" "&JOBSdir;/JREGIONAL_POST_STAT_O3" - {{ nnodes_post_stat_o3 }}:ppn={{ ppn_post_stat_o3 }} -{%- if machine not in ["GAEA"] %} - {{ mem_post_stat_o3 }} -{%- endif %} - {{ wtime_post_stat_o3 }} - &NCORES_PER_NODE; - &TN_POST_STAT_O3; - &LOGDIR;/&TN_POST_STAT_O3;_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - SLASH_ENSMEM_SUBDIR{{ slash_ensmem_subdir }} - - - - - - -{%- endif %} -{%- if run_task_post_stat_pm25 %} - - - &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&TN_POST_STAT_PM25;" "&JOBSdir;/JREGIONAL_POST_STAT_PM25" - {{ nnodes_post_stat_pm25 }}:ppn={{ ppn_post_stat_pm25 }} -{%- if machine not in ["GAEA"] %} - {{ mem_post_stat_pm25 }} -{%- endif %} - {{ wtime_post_stat_pm25 }} - &NCORES_PER_NODE; - &TN_POST_STAT_PM25; - &LOGDIR;/&TN_POST_STAT_PM25;_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - SLASH_ENSMEM_SUBDIR{{ slash_ensmem_subdir }} - - - - - - -{%- endif %} -{%- if run_task_bias_correction_o3 %} - - - &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&TN_BIAS_CORRECTION_O3;" "&JOBSdir;/JREGIONAL_BIAS_CORRECTION_O3" - {{ nnodes_bias_correction_o3 }}:ppn={{ ppn_bias_correction_o3 }} -{%- if machine not in ["GAEA"] %} - {{ mem_bias_correction_o3 }} -{%- endif %} - {{ wtime_bias_correction_o3 }} - &NCORES_PER_NODE; - &TN_BIAS_CORRECTION_O3; - &LOGDIR;/&TN_BIAS_CORRECTION_O3;_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - SLASH_ENSMEM_SUBDIR{{ slash_ensmem_subdir }} - - - - - - -{%- endif %} -{%- if run_task_bias_correction_pm25 %} - - - &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&TN_BIAS_CORRECTION_PM25;" "&JOBSdir;/JREGIONAL_BIAS_CORRECTION_PM25" - {{ nnodes_bias_correction_pm25 }}:ppn={{ ppn_bias_correction_pm25 }} -{%- if machine not in ["GAEA"] %} - {{ mem_bias_correction_pm25 }} -{%- endif %} - {{ wtime_bias_correction_pm25 }} - &NCORES_PER_NODE; - &TN_BIAS_CORRECTION_PM25; - &LOGDIR;/&TN_BIAS_CORRECTION_PM25;_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - SLASH_ENSMEM_SUBDIR{{ slash_ensmem_subdir }} - - - - - - -{%- endif %} - - - -{#- -Tasks for combining (adding) hourly APCP (accumulated precipitation) from -forecasts to obtain APCP obs for longer accumulation periods (3 hours, -6 hours, etc). These are needed for downstream verification tasks (both -deterministic and ensemble). -#} -{%- if run_tasks_metvx_det or run_tasks_metvx_ens %} - - {%- if ("APCP" in vx_fields) %} - - {%- for accum_hh in vx_apcp_accums_hh -%} - {%- set obtype = "CCPA" %} - {%- set field = "APCP" %} - {%- set accum = accum_hh|int %} - {%- set fieldname = field ~ accum_hh ~ "h" %} - {%- set base_tn = tn_run_met_pcpcombine -%} - {%- set tn = base_tn ~ "_fcst_" ~ fieldname ~ uscore_ensmem_name -%} - {%- set maxtries = maxtries_run_met_pcpcombine_fcst -%} - {%- set nnodes = nnodes_run_met_pcpcombine_fcst -%} - {%- set ppn = ppn_run_met_pcpcombine_fcst -%} - {%- set wtime = wtime_run_met_pcpcombine_fcst -%} - - {%- set is_ens_fcst = false -%} - - {%- if (accum > 1) and (fcst_len_hrs >= accum) %} - - - - &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&VX_LOCAL_MODULE_FN;" "&JOBSdir;/JREGIONAL_RUN_MET_PCPCOMBINE" - {{nnodes}}:ppn={{ppn}} - {{wtime}} - &NCORES_PER_NODE; - &SCHED_NATIVE_CMD; - {{tn}} - &LOGDIR;/{{tn}}_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - LOGDIR&LOGDIR; - PDY@Y@m@d - cyc@H - subcyc@M - VAR{{field}} - ACCUM_HH{{accum_hh}} - obs_or_fcstfcst - OBTYPE{{obtype}} - OBS_DIR&{{obtype}}_OBS_DIR; - USCORE_ENSMEM_NAME_OR_NULL{{uscore_ensmem_name}} - SLASH_ENSMEM_SUBDIR_OR_NULL{{slash_ensmem_subdir}} - {%- if is_ens_fcst or run_tasks_metvx_ens %} - MEM_INDX_OR_NULL#{{ensmem_indx_name}}# - {%- else %} - MEM_INDX_OR_NULL - {%- endif %} - - - -{#- Redundant dependency to simplify jinja code. #} - TRUETRUE -{#- -If the post-processed forecast output needed for verification is being -generated by the TN_RUN_FCST task (by having RUN_TASK_RUN_FCST and -WRITE_DOPOST both set to TRUE, which causes UPP to be called inline, -i.e. from within the weather model), then include a dependency on the -TN_RUN_FCST task. -#} - {%- if run_task_run_fcst and write_dopost %} - -{#- -Otherwise, if UPP is being called separately from the forecast (by -having RUN_TASK_RUN_POST set to TRUE), then inlude a dependency on the -TN_RUN_POST metatask (which runs UPP for all forecast output hours). - -Note that in this case, we have to wait until the whole TN_RUN_POST -metatask is complete before this task can launch, i.e. we cannot launch -this task as the UPP output files for each forecast output hour become -available. This is because the loop over forecast hours for this task -is performed within MET/METplus, not here in rocoto, whereas the loop -over forecast hours for the post-processing is done by rocoto in this -xml. This may be changed in the future. -#} - {%- elif run_task_run_post %} - - {%- endif %} - - - - - {%- endif %} - {%- endfor %} - - {%- endif %} - -{%- endif %} - - - -{%- if run_task_get_obs_ccpa %} - - - - &RSRV_HPSS; - &LOAD_MODULES_RUN_TASK_FP; "&GET_OBS_LOCAL_MODULE_FN;" "&JOBSdir;/JREGIONAL_GET_OBS_CCPA" - {{ nnodes_get_obs_ccpa }}:ppn={{ ppn_get_obs_ccpa }} - {%- if machine not in ["GAEA", "NOAACLOUD"] %} - {{ mem_get_obs_ccpa }} - {%- endif %} - {{ wtime_get_obs_ccpa }} - &NCORES_PER_NODE; - &SCHED_NATIVE_CMD; - &TN_GET_OBS_CCPA; - &LOGDIR;/&TN_GET_OBS_CCPA;_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - OBS_DIR&CCPA_OBS_DIR; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - FHR {% for h in range(0, fcst_len_hrs+1) %}{{ " %02d" % h }}{% endfor %} - ACCUM01 - - -{%- endif %} - -{%- if run_task_get_obs_mrms %} - - - - &RSRV_HPSS; - &LOAD_MODULES_RUN_TASK_FP; "&GET_OBS_LOCAL_MODULE_FN;" "&JOBSdir;/JREGIONAL_GET_OBS_MRMS" - {{ nnodes_get_obs_mrms }}:ppn={{ ppn_get_obs_mrms }} - {%- if machine not in ["GAEA", "NOAACLOUD"] %} - {{ mem_get_obs_mrms }} - {%- endif %} - {{ wtime_get_obs_mrms }} - &NCORES_PER_NODE; - &SCHED_NATIVE_CMD; - &TN_GET_OBS_MRMS; - &LOGDIR;/&TN_GET_OBS_MRMS;_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - OBS_DIR&MRMS_OBS_DIR; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - FHR {% for h in range(0, fcst_len_hrs+1) %}{{ " %02d" % h }}{% endfor %} - SCRIPTSdir&SCRIPTSdir; - VARREFC RETOP - - -{%- endif %} - -{%- if run_task_get_obs_ndas %} - - - - &RSRV_HPSS; - &LOAD_MODULES_RUN_TASK_FP; "&GET_OBS_LOCAL_MODULE_FN;" "&JOBSdir;/JREGIONAL_GET_OBS_NDAS" - {{ nnodes_get_obs_ndas }}:ppn={{ ppn_get_obs_ndas }} - {%- if machine not in ["GAEA", "NOAACLOUD"] %} - {{ mem_get_obs_ndas }} - {%- endif %} - {{ wtime_get_obs_ndas }} - &NCORES_PER_NODE; - &SCHED_NATIVE_CMD; - &TN_GET_OBS_NDAS; - &LOGDIR;/&TN_GET_OBS_NDAS;_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - OBS_DIR&NDAS_OBS_DIR; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - FHR {% for h in range(0, fcst_len_hrs+1) %}{{ " %02d" % h }}{% endfor %} - - -{%- endif %} - - - - -{#- -Obs processing tasks that must be run if either deterministic or ensemble -verification tasks are going to be run. -#} -{%- if run_tasks_metvx_det or run_tasks_metvx_ens %} - -{#- -Task for pre-processing of NDAS observations to convert prep-buffer files -to NetCDF format. - {%- set obtype = "NDAS" %} -#} - {%- if ("SFC" in vx_fields) or ("UPA" in vx_fields) %} -{#- -This for-loop isn't strictly necessary because it loops over only one -item, but having it allows for the use of the "set" tag to define new -variables within the loop (in Jinja, it is not possible to define variables -outside a loop unless one uses a namespace, which would be overkill in -this case). So here we opt for a redundant for-loop. -#} - {%- for obtype in ["NDAS"] -%} - {%- set tn = tn_run_met_pb2nc_obs -%} - {%- set maxtries = maxtries_run_met_pb2nc_obs -%} - {%- set nnodes = nnodes_run_met_pb2nc_obs -%} - {%- set ppn = ppn_run_met_pb2nc_obs -%} - {%- set wtime = wtime_run_met_pb2nc_obs -%} - - - - &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&VX_LOCAL_MODULE_FN;" "&JOBSdir;/JREGIONAL_RUN_MET_PB2NC_OBS" - {{nnodes}}:ppn={{ppn}} - {{wtime}} - &NCORES_PER_NODE; - &SCHED_NATIVE_CMD; - {{tn}} - &LOGDIR;/{{tn}}_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - LOGDIR&LOGDIR; - PDY@Y@m@d - cyc@H - subcyc@M - VARSFC - ACCUM_HH01 - obs_or_fcstobs - OBTYPE{{obtype}} - OBS_DIR&{{obtype}}_OBS_DIR; - - - - {%- if run_task_get_obs_ndas %} - - {%- else %} -{#- -We only check for the existence of the NDAS observations directory, not -for individual prepbufr files within. This is because this and other -downstream vx tasks can complete successfully even when some obs files -are missing (the check for individual files is done by the scripts that -this task calls). -#} - &NDAS_OBS_DIR; - {%- endif %} - - - - - {%- endfor %} - - {%- endif %} - -{#- -Tasks for combining (adding) hourly APCP (accumulated precipitation) from -CCPA observations to obtain APCP obs for longer accumulation periods -(3 hours, 6 hours, etc). -#} - {%- if ("APCP" in vx_fields) %} - - {%- for accum_hh in vx_apcp_accums_hh -%} - {%- set obtype = "CCPA" %} - {%- set field = "APCP" -%} - {%- set accum = accum_hh|int %} - {%- set fieldname = field ~ accum_hh ~ "h" -%} - {%- set base_tn = tn_run_met_pcpcombine -%} - {%- set tn = base_tn ~ "_obs_" ~ fieldname -%} - {%- set maxtries = maxtries_run_met_pcpcombine_obs -%} - {%- set nnodes = nnodes_run_met_pcpcombine_obs -%} - {%- set ppn = ppn_run_met_pcpcombine_obs -%} - {%- set wtime = wtime_run_met_pcpcombine_obs -%} - - {%- if (accum > 1) and (fcst_len_hrs >= accum) %} - - - - &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&VX_LOCAL_MODULE_FN;" "&JOBSdir;/JREGIONAL_RUN_MET_PCPCOMBINE" - {{nnodes}}:ppn={{ppn}} - {{wtime}} - &NCORES_PER_NODE; - &SCHED_NATIVE_CMD; - {{tn}} - &LOGDIR;/{{tn}}_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - LOGDIR&LOGDIR; - PDY@Y@m@d - cyc@H - subcyc@M - VAR{{field}} - ACCUM_HH{{accum_hh}} - obs_or_fcstobs - OBTYPE{{obtype}} - OBS_DIR&{{obtype}}_OBS_DIR; - USCORE_ENSMEM_NAME_OR_NULL - SLASH_ENSMEM_SUBDIR_OR_NULL - - - - {%- if run_task_get_obs_ccpa %} - - {%- else %} -{#- -We only check for the existence of the top-level CCPA observations -directory, not the individual daily subdirectories within. This is -because this and other downstream vx tasks can complete successfully -even when some obs files are missing (the check for individual files is -done by the scripts that this task calls). -#} - &CCPA_OBS_DIR; - {%- endif %} - - - - - {%- endif %} - - {%- endfor %} - {%- endif %} - -{%- endif %} - - - - - -{%- if run_task_vx_gridstat %} - - - - &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&VX_LOCAL_MODULE_FN;" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_VX" - {{ nnodes_run_met_gridstat_vx_apcp01h }}:ppn={{ ppn_run_met_gridstat_vx_apcp01h }} - {%- if machine not in ["GAEA", "NOAACLOUD"] %} - {{ mem_run_met_gridstat_vx_apcp01h }} - {%- endif %} - {{ wtime_run_met_gridstat_vx_apcp01h }} - &NCORES_PER_NODE; - &SCHED_NATIVE_CMD; - &TN_RUN_MET_GRIDSTAT_VX_APCP01H;{{ uscore_ensmem_name }} - &LOGDIR;/&TN_RUN_MET_GRIDSTAT_VX_APCP01H;{{ uscore_ensmem_name }}_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - OBS_DIR&CCPA_OBS_DIR; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - FHR {% for h in range(1, fcst_len_hrs+1) %}{{ " %02d" % h }}{% endfor %} - VARAPCP - ACCUM_HH01 - USCORE_ENSMEM_NAME_OR_NULL{{uscore_ensmem_name}} - SLASH_ENSMEM_SUBDIR_OR_NULL{{ slash_ensmem_subdir }} - {%- if do_ensemble %} - ENSMEM_INDX#{{ ensmem_indx_name }}# - {%- endif %} - - - -{#- Redundant dependency to simplify jinja code. #} - TRUETRUE - {%- if run_task_get_obs_ccpa %} - - {%- endif %} - {%- if write_dopost %} - - {%- elif run_task_run_post %} - - {%- endif %} - - - - - - {%- if fcst_len_hrs >= 3 %} - - - - &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&VX_LOCAL_MODULE_FN;" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_VX" - {{ nnodes_run_met_gridstat_vx_apcp03h }}:ppn={{ ppn_run_met_gridstat_vx_apcp03h }} - {%- if machine not in ["GAEA", "NOAACLOUD"] %} - {{ mem_run_met_gridstat_vx_apcp03h }} - {%- endif %} - {{ wtime_run_met_gridstat_vx_apcp03h }} - &NCORES_PER_NODE; - &SCHED_NATIVE_CMD; - &TN_RUN_MET_GRIDSTAT_VX_APCP03H;{{ uscore_ensmem_name }} - &LOGDIR;/&TN_RUN_MET_GRIDSTAT_VX_APCP03H;{{ uscore_ensmem_name }}_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - OBS_DIR&CCPA_OBS_DIR; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - FHR {% for h in range(3, fcst_len_hrs+1, 3) %}{{ " %02d" % h }}{% endfor %} - VARAPCP - ACCUM_HH03 - USCORE_ENSMEM_NAME_OR_NULL{{uscore_ensmem_name}} - SLASH_ENSMEM_SUBDIR_OR_NULL{{ slash_ensmem_subdir }} - {%- if do_ensemble %} - ENSMEM_INDX#{{ ensmem_indx_name }}# - {%- endif %} - - - - - - - - - - {%- endif %} - - {%- if fcst_len_hrs >= 6 %} - - - - &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&VX_LOCAL_MODULE_FN;" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_VX" - {{ nnodes_run_met_gridstat_vx_apcp06h }}:ppn={{ ppn_run_met_gridstat_vx_apcp06h }} - {%- if machine not in ["GAEA", "NOAACLOUD"] %} - {{ mem_run_met_gridstat_vx_apcp06h }} - {%- endif %} - {{ wtime_run_met_gridstat_vx_apcp06h }} - &NCORES_PER_NODE; - &SCHED_NATIVE_CMD; - &TN_RUN_MET_GRIDSTAT_VX_APCP06H;{{ uscore_ensmem_name }} - &LOGDIR;/&TN_RUN_MET_GRIDSTAT_VX_APCP06H;{{ uscore_ensmem_name }}_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - OBS_DIR&CCPA_OBS_DIR; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - FHR {% for h in range(6, fcst_len_hrs+1, 6) %}{{ " %02d" % h }}{% endfor %} - VARAPCP - ACCUM_HH06 - USCORE_ENSMEM_NAME_OR_NULL{{uscore_ensmem_name}} - SLASH_ENSMEM_SUBDIR_OR_NULL{{ slash_ensmem_subdir }} - {%- if do_ensemble %} - ENSMEM_INDX#{{ ensmem_indx_name }}# - {%- endif %} - - - - - - - - - - {%- endif %} - - {%- if fcst_len_hrs >= 24 %} - - - - &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&VX_LOCAL_MODULE_FN;" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_VX" - {{ nnodes_run_met_gridstat_vx_apcp24h }}:ppn={{ ppn_run_met_gridstat_vx_apcp24h }} - {%- if machine not in ["GAEA", "NOAACLOUD"] %} - {{ mem_run_met_gridstat_vx_apcp24h }} - {%- endif %} - {{ wtime_run_met_gridstat_vx_apcp24h }} - &NCORES_PER_NODE; - &SCHED_NATIVE_CMD; - &TN_RUN_MET_GRIDSTAT_VX_APCP24H;{{ uscore_ensmem_name }} - &LOGDIR;/&TN_RUN_MET_GRIDSTAT_VX_APCP24H;{{ uscore_ensmem_name }}_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - OBS_DIR&CCPA_OBS_DIR; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - FHR {% for h in range(24, fcst_len_hrs+1, 24) %}{{ " %02d" % h }}{% endfor %} - VARAPCP - ACCUM_HH24 - USCORE_ENSMEM_NAME_OR_NULL{{uscore_ensmem_name}} - SLASH_ENSMEM_SUBDIR_OR_NULL{{ slash_ensmem_subdir }} - {%- if do_ensemble %} - ENSMEM_INDX#{{ ensmem_indx_name }}# - {%- endif %} - - - - - - - - - - {%- endif %} - - - - - &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&VX_LOCAL_MODULE_FN;" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_VX" - {{ nnodes_run_met_gridstat_vx_refc }}:ppn={{ ppn_run_met_gridstat_vx_refc }} - {%- if machine not in ["GAEA", "NOAACLOUD"] %} - {{ mem_run_met_gridstat_vx_refc }} - {%- endif %} - {{ wtime_run_met_gridstat_vx_refc }} - &NCORES_PER_NODE; - &SCHED_NATIVE_CMD; - &TN_RUN_MET_GRIDSTAT_VX_REFC;{{ uscore_ensmem_name }} - &LOGDIR;/&TN_RUN_MET_GRIDSTAT_VX_REFC;{{ uscore_ensmem_name }}_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - OBS_DIR&MRMS_OBS_DIR; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - FHR {% for h in range(1, fcst_len_hrs+1) %}{{ " %02d" % h }}{% endfor %} - VARREFC - USCORE_ENSMEM_NAME_OR_NULL{{uscore_ensmem_name}} - SLASH_ENSMEM_SUBDIR_OR_NULL{{ slash_ensmem_subdir }} - {%- if do_ensemble %} - ENSMEM_INDX#{{ ensmem_indx_name }}# - {%- endif %} - - - -{#- Redundant dependency to simplify jinja code. #} - TRUETRUE - {%- if run_task_get_obs_mrms %} - - {%- endif %} - {%- if write_dopost %} - - {%- elif run_task_run_post %} - - {%- endif %} - - - - - - - - - &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&VX_LOCAL_MODULE_FN;" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_VX" - {{ nnodes_run_met_gridstat_vx_retop }}:ppn={{ ppn_run_met_gridstat_vx_retop }} - {%- if machine not in ["GAEA", "NOAACLOUD"] %} - {{ mem_run_met_gridstat_vx_retop }} - {%- endif %} - {{ wtime_run_met_gridstat_vx_retop }} - &NCORES_PER_NODE; - &SCHED_NATIVE_CMD; - &TN_RUN_MET_GRIDSTAT_VX_RETOP;{{ uscore_ensmem_name }} - &LOGDIR;/&TN_RUN_MET_GRIDSTAT_VX_RETOP;{{ uscore_ensmem_name }}_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - OBS_DIR&MRMS_OBS_DIR; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - FHR {% for h in range(1, fcst_len_hrs+1) %}{{ " %02d" % h }}{% endfor %} - VARRETOP - USCORE_ENSMEM_NAME_OR_NULL{{uscore_ensmem_name}} - SLASH_ENSMEM_SUBDIR_OR_NULL{{ slash_ensmem_subdir }} - {%- if do_ensemble %} - ENSMEM_INDX#{{ ensmem_indx_name }}# - {%- endif %} - - - -{#- Redundant dependency to simplify jinja code. #} - TRUETRUE - {%- if run_task_get_obs_mrms %} - - {%- endif %} - {%- if write_dopost %} - - {%- elif run_task_run_post %} - - {%- endif %} - - - - -{%- endif %} - -{%- if run_task_vx_pointstat %} - - - &RSRV_DEFAULT; - - &LOAD_MODULES_RUN_TASK_FP; "&VX_LOCAL_MODULE_FN;" "&JOBSdir;/JREGIONAL_RUN_MET_POINTSTAT_VX" - {{ nnodes_run_met_pointstat_vx_sfc }}:ppn={{ ppn_run_met_pointstat_vx_sfc }} - {%- if machine not in ["GAEA", "NOAACLOUD"] %} - {{ mem_run_met_pointstat_vx_sfc }} - {%- endif %} - {{ wtime_run_met_pointstat_vx_sfc }} - &NCORES_PER_NODE; - &SCHED_NATIVE_CMD; - &TN_RUN_MET_POINTSTAT_VX_SFC;{{ uscore_ensmem_name }} - &LOGDIR;/&TN_RUN_MET_POINTSTAT_VX_SFC;{{ uscore_ensmem_name }}_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - OBS_DIR&NDAS_OBS_DIR; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - VARSFC - ACCUM_HH - USCORE_ENSMEM_NAME_OR_NULL{{uscore_ensmem_name}} - SLASH_ENSMEM_SUBDIR_OR_NULL{{ slash_ensmem_subdir }} - {%- if do_ensemble %} - ENSMEM_INDX#{{ ensmem_indx_name }}# - {%- endif %} - - - -{#- Redundant dependency to simplify jinja code. #} - TRUETRUE - {%- if run_task_get_obs_ndas %} - - {%- endif %} - {%- if write_dopost %} - - {%- elif run_task_run_post %} - - {%- endif %} - - - - - - - - &RSRV_DEFAULT; - - &LOAD_MODULES_RUN_TASK_FP; "&VX_LOCAL_MODULE_FN;" "&JOBSdir;/JREGIONAL_RUN_MET_POINTSTAT_VX" - {{ nnodes_run_met_pointstat_vx_upa }}:ppn={{ ppn_run_met_pointstat_vx_upa }} - {%- if machine not in ["GAEA", "NOAACLOUD"] %} - {{ mem_run_met_pointstat_vx_upa }} - {%- endif %} - {{ wtime_run_met_pointstat_vx_upa }} - &NCORES_PER_NODE; - &SCHED_NATIVE_CMD; - &TN_RUN_MET_POINTSTAT_VX_UPA;{{ uscore_ensmem_name }} - &LOGDIR;/&TN_RUN_MET_POINTSTAT_VX_UPA;{{ uscore_ensmem_name }}_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - OBS_DIR&NDAS_OBS_DIR; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - VARUPA - ACCUM_HH - USCORE_ENSMEM_NAME_OR_NULL{{uscore_ensmem_name}} - SLASH_ENSMEM_SUBDIR_OR_NULL{{ slash_ensmem_subdir }} - {%- if do_ensemble %} - ENSMEM_INDX#{{ ensmem_indx_name }}# - {%- endif %} - - - -{#- Redundant dependency to simplify jinja code. #} - TRUETRUE - {%- if run_task_get_obs_ndas %} - - {%- endif %} - {%- if write_dopost %} - - {%- elif run_task_run_post %} - - {%- endif %} - - - - - -{%- endif %} - -{%- if do_ensemble %} - -{%- endif %} - -{%- if run_task_vx_ensgrid %} - - - - &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&VX_LOCAL_MODULE_FN;" "&JOBSdir;/JREGIONAL_RUN_MET_ENSEMBLESTAT_VX_GRID" - {{ nnodes_run_met_ensemblestat_vx_apcp01h }}:ppn={{ ppn_run_met_ensemblestat_vx_apcp01h }} - {%- if machine not in ["GAEA", "NOAACLOUD"] %} - {{ mem_run_met_ensemblestat_vx_apcp01h }} - {%- endif %} - {{ wtime_run_met_ensemblestat_vx_apcp01h }} - &NCORES_PER_NODE; - &SCHED_NATIVE_CMD; - &TN_RUN_MET_ENSEMBLESTAT_VX_APCP01H; - &LOGDIR;/&TN_RUN_MET_ENSEMBLESTAT_VX_APCP01H;_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - OBS_DIR&CCPA_OBS_DIR; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - FHR {% for h in range(1, fcst_len_hrs+1) %}{{ " %02d" % h }}{% endfor %} - VARAPCP - ACCUM_HH01 - - - - - - - - {%- if fcst_len_hrs >= 3 %} - - - - &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&VX_LOCAL_MODULE_FN;" "&JOBSdir;/JREGIONAL_RUN_MET_ENSEMBLESTAT_VX_GRID" - {{ nnodes_run_met_ensemblestat_vx_apcp03h }}:ppn={{ ppn_run_met_ensemblestat_vx_apcp03h }} - {%- if machine not in ["GAEA", "NOAACLOUD"] %} - {{ mem_run_met_ensemblestat_vx_apcp03h }} - {%- endif %} - {{ wtime_run_met_ensemblestat_vx_apcp03h }} - &NCORES_PER_NODE; - &SCHED_NATIVE_CMD; - &TN_RUN_MET_ENSEMBLESTAT_VX_APCP03H; - &LOGDIR;/&TN_RUN_MET_ENSEMBLESTAT_VX_APCP03H;_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - OBS_DIR&CCPA_OBS_DIR; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - FHR {% for h in range(3, fcst_len_hrs+1, 3) %}{{ " %02d" % h }}{% endfor %} - VARAPCP - ACCUM_HH03 - - - - - - - {%- endif %} - - {%- if fcst_len_hrs >= 6 %} - - - - &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&VX_LOCAL_MODULE_FN;" "&JOBSdir;/JREGIONAL_RUN_MET_ENSEMBLESTAT_VX_GRID" - {{ nnodes_run_met_ensemblestat_vx_apcp06h }}:ppn={{ ppn_run_met_ensemblestat_vx_apcp06h }} - {%- if machine not in ["GAEA", "NOAACLOUD"] %} - {{ mem_run_met_ensemblestat_vx_apcp06h }} - {%- endif %} - {{ wtime_run_met_ensemblestat_vx_apcp06h }} - &NCORES_PER_NODE; - &SCHED_NATIVE_CMD; - &TN_RUN_MET_ENSEMBLESTAT_VX_APCP06H; - &LOGDIR;/&TN_RUN_MET_ENSEMBLESTAT_VX_APCP06H;_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - OBS_DIR&CCPA_OBS_DIR; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - FHR {% for h in range(6, fcst_len_hrs+1, 6) %}{{ " %02d" % h }}{% endfor %} - VARAPCP - ACCUM_HH06 - - - - - - - {%- endif %} - - {%- if fcst_len_hrs >= 24 %} - - - - &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&VX_LOCAL_MODULE_FN;" "&JOBSdir;/JREGIONAL_RUN_MET_ENSEMBLESTAT_VX_GRID" - {{ nnodes_run_met_ensemblestat_vx_apcp24h }}:ppn={{ ppn_run_met_ensemblestat_vx_apcp24h }} - {%- if machine not in ["GAEA", "NOAACLOUD"] %} - {{ mem_run_met_ensemblestat_vx_apcp24h }} - {%- endif %} - {{ wtime_run_met_ensemblestat_vx_apcp24h }} - &NCORES_PER_NODE; - &SCHED_NATIVE_CMD; - &TN_RUN_MET_ENSEMBLESTAT_VX_APCP24H; - &LOGDIR;/&TN_RUN_MET_ENSEMBLESTAT_VX_APCP24H;_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - OBS_DIR&CCPA_OBS_DIR; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - FHR {% for h in range(24, fcst_len_hrs+1, 24) %}{{ " %02d" % h }}{% endfor %} - VARAPCP - ACCUM_HH24 - - - - - - - {%- endif %} - - - - - &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&VX_LOCAL_MODULE_FN;" "&JOBSdir;/JREGIONAL_RUN_MET_ENSEMBLESTAT_VX_GRID" - {{ nnodes_run_met_ensemblestat_vx_refc }}:ppn={{ ppn_run_met_ensemblestat_vx_refc }} - {%- if machine not in ["GAEA", "NOAACLOUD"] %} - {{ mem_run_met_ensemblestat_vx_refc }} - {%- endif %} - {{ wtime_run_met_ensemblestat_vx_refc }} - &NCORES_PER_NODE; - &SCHED_NATIVE_CMD; - &TN_RUN_MET_ENSEMBLESTAT_VX_REFC; - &LOGDIR;/&TN_RUN_MET_ENSEMBLESTAT_VX_REFC;_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - OBS_DIR&MRMS_OBS_DIR; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - FHR {% for h in range(1, fcst_len_hrs+1) %}{{ " %02d" % h }}{% endfor %} - VARREFC - - - - - - - - - - - &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&VX_LOCAL_MODULE_FN;" "&JOBSdir;/JREGIONAL_RUN_MET_ENSEMBLESTAT_VX_GRID" - {{ nnodes_run_met_ensemblestat_vx_retop }}:ppn={{ ppn_run_met_ensemblestat_vx_retop }} - {%- if machine not in ["GAEA", "NOAACLOUD"] %} - {{ mem_run_met_ensemblestat_vx_retop }} - {%- endif %} - {{ wtime_run_met_ensemblestat_vx_retop }} - &NCORES_PER_NODE; - &SCHED_NATIVE_CMD; - &TN_RUN_MET_ENSEMBLESTAT_VX_RETOP; - &LOGDIR;/&TN_RUN_MET_ENSEMBLESTAT_VX_RETOP;_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - OBS_DIR&MRMS_OBS_DIR; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - FHR {% for h in range(1, fcst_len_hrs+1) %}{{ " %02d" % h }}{% endfor %} - VARRETOP - - - - - - -{%- endif %} - -{%- if run_task_vx_enspoint %} - - - - &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&VX_LOCAL_MODULE_FN;" "&JOBSdir;/JREGIONAL_RUN_MET_ENSEMBLESTAT_VX_POINT" - {{ nnodes_run_met_ensemblestat_vx_sfc }}:ppn={{ ppn_run_met_ensemblestat_vx_sfc }} - {%- if machine not in ["GAEA", "NOAACLOUD"] %} - {{ mem_run_met_ensemblestat_vx_sfc }} - {%- endif %} - {{ wtime_run_met_ensemblestat_vx_sfc }} - &NCORES_PER_NODE; - &SCHED_NATIVE_CMD; - &TN_RUN_MET_ENSEMBLESTAT_VX_SFC; - &LOGDIR;/&TN_RUN_MET_ENSEMBLESTAT_VX_SFC;_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - OBS_DIR&NDAS_OBS_DIR; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - FHR {% for h in range(0, fcst_len_hrs+1) %}{{ " %02d" % h }}{% endfor %} - VARSFC - - - - - - - - - - - - - &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&VX_LOCAL_MODULE_FN;" "&JOBSdir;/JREGIONAL_RUN_MET_ENSEMBLESTAT_VX_POINT" - {{ nnodes_run_met_ensemblestat_vx_upa }}:ppn={{ ppn_run_met_ensemblestat_vx_upa }} - {%- if machine not in ["GAEA", "NOAACLOUD"] %} - {{ mem_run_met_ensemblestat_vx_upa }} - {%- endif %} - {{ wtime_run_met_ensemblestat_vx_upa }} - &NCORES_PER_NODE; - &SCHED_NATIVE_CMD; - &TN_RUN_MET_ENSEMBLESTAT_VX_UPA; - &LOGDIR;/&TN_RUN_MET_ENSEMBLESTAT_VX_UPA;_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - OBS_DIR&NDAS_OBS_DIR; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - FHR {% for h in range(0, fcst_len_hrs+1) %}{{ " %02d" % h }}{% endfor %} - VARUPA - - - - - - - - - -{%- endif %} - -{%- if run_task_vx_ensgrid %} - - - - &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&VX_LOCAL_MODULE_FN;" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_VX_ENSMEAN" - {{ nnodes_run_met_gridstat_vx_ensmean_apcp01h }}:ppn={{ ppn_run_met_gridstat_vx_ensmean_apcp01h }} - {%- if machine not in ["GAEA", "NOAACLOUD"] %} - {{ mem_run_met_gridstat_vx_ensmean_apcp01h }} - {%- endif %} - {{ wtime_run_met_gridstat_vx_ensmean_apcp01h }} - &NCORES_PER_NODE; - &SCHED_NATIVE_CMD; - &TN_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP01H; - &LOGDIR;/&TN_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP01H;_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - OBS_DIR&CCPA_OBS_DIR; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - FHR {% for h in range(1, fcst_len_hrs+1) %}{{ " %02d" % h }}{% endfor %} - VARAPCP - ACCUM_HH01 - - - - - - - - {%- if fcst_len_hrs >= 3 %} - - - - &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&VX_LOCAL_MODULE_FN;" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_VX_ENSMEAN" - {{ nnodes_run_met_gridstat_vx_ensmean_apcp03h }}:ppn={{ ppn_run_met_gridstat_vx_ensmean_apcp03h }} - {%- if machine not in ["GAEA", "NOAACLOUD"] %} - {{ mem_run_met_gridstat_vx_ensmean_apcp03h }} - {%- endif %} - {{ wtime_run_met_gridstat_vx_ensmean_apcp03h }} - &NCORES_PER_NODE; - &SCHED_NATIVE_CMD; - &TN_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP03H; - &LOGDIR;/&TN_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP03H;_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - OBS_DIR&CCPA_OBS_DIR; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - FHR {% for h in range(3, fcst_len_hrs+1, 3) %}{{ " %02d" % h }}{% endfor %} - VARAPCP - ACCUM_HH03 - - - - - - - {%- endif %} - - {%- if fcst_len_hrs >= 6 %} - - - - &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&VX_LOCAL_MODULE_FN;" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_VX_ENSMEAN" - {{ nnodes_run_met_gridstat_vx_ensmean_apcp06h }}:ppn={{ ppn_run_met_gridstat_vx_ensmean_apcp06h }} - {%- if machine not in ["GAEA", "NOAACLOUD"] %} - {{ mem_run_met_gridstat_vx_ensmean_apcp06h }} - {%- endif %} - {{ wtime_run_met_gridstat_vx_ensmean_apcp06h }} - &NCORES_PER_NODE; - &SCHED_NATIVE_CMD; - &TN_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP06H; - &LOGDIR;/&TN_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP06H;_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - OBS_DIR&CCPA_OBS_DIR; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - FHR {% for h in range(6, fcst_len_hrs+1, 6) %}{{ " %02d" % h }}{% endfor %} - VARAPCP - ACCUM_HH06 - - - - - - - {%- endif %} - - {%- if fcst_len_hrs >= 24 %} - - - - &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&VX_LOCAL_MODULE_FN;" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_VX_ENSMEAN" - {{ nnodes_run_met_gridstat_vx_ensmean_apcp24h }}:ppn={{ ppn_run_met_gridstat_vx_ensmean_apcp24h }} - {%- if machine not in ["GAEA", "NOAACLOUD"] %} - {{ mem_run_met_gridstat_vx_ensmean_apcp24h }} - {%- endif %} - {{ wtime_run_met_gridstat_vx_ensmean_apcp24h }} - &NCORES_PER_NODE; - &SCHED_NATIVE_CMD; - &TN_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP24H; - &LOGDIR;/&TN_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP24H;_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - OBS_DIR&CCPA_OBS_DIR; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - FHR {% for h in range(24, fcst_len_hrs+1, 24) %}{{ " %02d" % h }}{% endfor %} - VARAPCP - ACCUM_HH24 - - - - - - - {%- endif %} -{%- endif %} - -{%- if run_task_vx_enspoint %} - - - - &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&VX_LOCAL_MODULE_FN;" "&JOBSdir;/JREGIONAL_RUN_MET_POINTSTAT_VX_ENSMEAN" - {{ nnodes_run_met_pointstat_vx_ensmean_sfc }}:ppn={{ ppn_run_met_pointstat_vx_ensmean_sfc }} - {%- if machine not in ["GAEA", "NOAACLOUD"] %} - {{ mem_run_met_pointstat_vx_ensmean_sfc }} - {%- endif %} - {{ wtime_run_met_pointstat_vx_ensmean_sfc }} - &NCORES_PER_NODE; - &SCHED_NATIVE_CMD; - &TN_RUN_MET_POINTSTAT_VX_ENSMEAN_SFC; - &LOGDIR;/&TN_RUN_MET_POINTSTAT_VX_ENSMEAN_SFC;_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - OBS_DIR&NDAS_OBS_DIR; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - VARSFC - ACCUM_HH - - - - - - - - - - &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&VX_LOCAL_MODULE_FN;" "&JOBSdir;/JREGIONAL_RUN_MET_POINTSTAT_VX_ENSMEAN" - {{ nnodes_run_met_pointstat_vx_ensmean_upa }}:ppn={{ ppn_run_met_pointstat_vx_ensmean_upa }} - {%- if machine not in ["GAEA", "NOAACLOUD"] %} - {{ mem_run_met_pointstat_vx_ensmean_upa }} - {%- endif %} - {{ wtime_run_met_pointstat_vx_ensmean_upa }} - &NCORES_PER_NODE; - &SCHED_NATIVE_CMD; - &TN_RUN_MET_POINTSTAT_VX_ENSMEAN_UPA; - &LOGDIR;/&TN_RUN_MET_POINTSTAT_VX_ENSMEAN_UPA;_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - OBS_DIR&NDAS_OBS_DIR; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - VARUPA - ACCUM_HH - - - - - - -{%- endif %} - -{%- if run_task_vx_ensgrid %} - - - - &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&VX_LOCAL_MODULE_FN;" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_VX_ENSPROB" - {{ nnodes_run_met_gridstat_vx_ensprob_apcp01h }}:ppn={{ ppn_run_met_gridstat_vx_ensprob_apcp01h }} - {%- if machine not in ["GAEA", "NOAACLOUD"] %} - {{ mem_run_met_gridstat_vx_ensprob_apcp01h }} - {%- endif %} - {{ wtime_run_met_gridstat_vx_ensprob_apcp01h }} - &NCORES_PER_NODE; - &SCHED_NATIVE_CMD; - &TN_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP01H; - &LOGDIR;/&TN_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP01H;_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - OBS_DIR&CCPA_OBS_DIR; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - FHR {% for h in range(1, fcst_len_hrs+1) %}{{ " %02d" % h }}{% endfor %} - VARAPCP - ACCUM_HH01 - - - - - - - - {%- if fcst_len_hrs >= 3 %} - - - - &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&VX_LOCAL_MODULE_FN;" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_VX_ENSPROB" - {{ nnodes_run_met_gridstat_vx_ensprob_apcp03h }}:ppn={{ ppn_run_met_gridstat_vx_ensprob_apcp03h }} - {%- if machine not in ["GAEA", "NOAACLOUD"] %} - {{ mem_run_met_gridstat_vx_ensprob_apcp03h }} - {%- endif %} - {{ wtime_run_met_gridstat_vx_ensprob_apcp03h }} - &NCORES_PER_NODE; - &SCHED_NATIVE_CMD; - &TN_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP03H; - &LOGDIR;/&TN_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP03H;_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - OBS_DIR&CCPA_OBS_DIR; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - FHR {% for h in range(3, fcst_len_hrs+1, 3) %}{{ " %02d" % h }}{% endfor %} - VARAPCP - ACCUM_HH03 - - - - - - - {%- endif %} - - {%- if fcst_len_hrs >= 6 %} - - - - &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&VX_LOCAL_MODULE_FN;" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_VX_ENSPROB" - {{ nnodes_run_met_gridstat_vx_ensprob_apcp06h }}:ppn={{ ppn_run_met_gridstat_vx_ensprob_apcp06h }} - {%- if machine not in ["GAEA", "NOAACLOUD"] %} - {{ mem_run_met_gridstat_vx_ensprob_apcp06h }} - {%- endif %} - {{ wtime_run_met_gridstat_vx_ensprob_apcp06h }} - &NCORES_PER_NODE; - &SCHED_NATIVE_CMD; - &TN_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP06H; - &LOGDIR;/&TN_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP06H;_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - OBS_DIR&CCPA_OBS_DIR; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - FHR {% for h in range(6, fcst_len_hrs+1, 6) %}{{ " %02d" % h }}{% endfor %} - VARAPCP - ACCUM_HH06 - - - - - - - {%- endif %} - - {%- if fcst_len_hrs >= 24 %} - - - - &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&VX_LOCAL_MODULE_FN;" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_VX_ENSPROB" - {{ nnodes_run_met_gridstat_vx_ensprob_apcp24h }}:ppn={{ ppn_run_met_gridstat_vx_ensprob_apcp24h }} - {%- if machine not in ["GAEA", "NOAACLOUD"] %} - {{ mem_run_met_gridstat_vx_ensprob_apcp24h }} - {%- endif %} - {{ wtime_run_met_gridstat_vx_ensprob_apcp24h }} - &NCORES_PER_NODE; - &SCHED_NATIVE_CMD; - &TN_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP24H; - &LOGDIR;/&TN_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP24H;_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - OBS_DIR&CCPA_OBS_DIR; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - FHR {% for h in range(24, fcst_len_hrs+1, 24) %}{{ " %02d" % h }}{% endfor %} - VARAPCP - ACCUM_HH24 - - - - - - - {%- endif %} - - - - - &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&VX_LOCAL_MODULE_FN;" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_VX_ENSPROB" - {{ nnodes_run_met_gridstat_vx_ensprob_refc }}:ppn={{ ppn_run_met_gridstat_vx_ensprob_refc }} - {%- if machine not in ["GAEA", "NOAACLOUD"] %} - {{ mem_run_met_gridstat_vx_ensprob_refc }} - {%- endif %} - {{ wtime_run_met_gridstat_vx_ensprob_refc }} - &NCORES_PER_NODE; - &SCHED_NATIVE_CMD; - &TN_RUN_MET_GRIDSTAT_VX_ENSPROB_REFC; - &LOGDIR;/&TN_RUN_MET_GRIDSTAT_VX_ENSPROB_REFC;_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - OBS_DIR&MRMS_OBS_DIR; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - FHR {% for h in range(1, fcst_len_hrs+1) %}{{ " %02d" % h }}{% endfor %} - VARREFC - - - - - - - - - - - &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&VX_LOCAL_MODULE_FN;" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_VX_ENSPROB" - {{ nnodes_run_met_gridstat_vx_ensprob_retop }}:ppn={{ ppn_run_met_gridstat_vx_ensprob_retop }} - {%- if machine not in ["GAEA", "NOAACLOUD"] %} - {{ mem_run_met_gridstat_vx_ensprob_retop }} - {%- endif %} - {{ wtime_run_met_gridstat_vx_ensprob_retop }} - &NCORES_PER_NODE; - &SCHED_NATIVE_CMD; - &TN_RUN_MET_GRIDSTAT_VX_ENSPROB_RETOP; - &LOGDIR;/&TN_RUN_MET_GRIDSTAT_VX_ENSPROB_RETOP;_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - OBS_DIR&MRMS_OBS_DIR; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - FHR {% for h in range(1, fcst_len_hrs+1) %}{{ " %02d" % h }}{% endfor %} - VARRETOP - - - - - - -{%- endif %} - -{%- if run_task_vx_enspoint %} - - - - &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&VX_LOCAL_MODULE_FN;" "&JOBSdir;/JREGIONAL_RUN_MET_POINTSTAT_VX_ENSPROB" - {{ nnodes_run_met_pointstat_vx_ensprob_sfc }}:ppn={{ ppn_run_met_pointstat_vx_ensprob_sfc }} - {%- if machine not in ["GAEA", "NOAACLOUD"] %} - {{ mem_run_met_pointstat_vx_ensprob_sfc }} - {%- endif %} - {{ wtime_run_met_pointstat_vx_ensprob_sfc }} - &NCORES_PER_NODE; - &SCHED_NATIVE_CMD; - &TN_RUN_MET_POINTSTAT_VX_ENSPROB_SFC; - &LOGDIR;/&TN_RUN_MET_POINTSTAT_VX_ENSPROB_SFC;_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - OBS_DIR&NDAS_OBS_DIR; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - VARSFC - ACCUM_HH - - - - - - - - - - &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&VX_LOCAL_MODULE_FN;" "&JOBSdir;/JREGIONAL_RUN_MET_POINTSTAT_VX_ENSPROB" - {{ nnodes_run_met_pointstat_vx_ensprob_upa }}:ppn={{ ppn_run_met_pointstat_vx_ensprob_upa }} - {%- if machine not in ["GAEA", "NOAACLOUD"] %} - {{ mem_run_met_pointstat_vx_ensprob_upa }} - {%- endif %} - {{ wtime_run_met_pointstat_vx_ensprob_upa }} - &NCORES_PER_NODE; - &SCHED_NATIVE_CMD; - &TN_RUN_MET_POINTSTAT_VX_ENSPROB_UPA; - &LOGDIR;/&TN_RUN_MET_POINTSTAT_VX_ENSPROB_UPA;_@Y@m@d@H&LOGEXT; - - GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; - USHdir&USHdir; - OBS_DIR&NDAS_OBS_DIR; - PDY@Y@m@d - cyc@H - subcyc@M - LOGDIR&LOGDIR; - VARUPA - ACCUM_HH - - - - - - -{%- endif %} +{%- endfor %} diff --git a/parm/metplus/EnsembleStat_APCP01h.conf b/parm/metplus/EnsembleStat_APCP01h.conf index f0de205047..a61677674a 100644 --- a/parm/metplus/EnsembleStat_APCP01h.conf +++ b/parm/metplus/EnsembleStat_APCP01h.conf @@ -221,7 +221,7 @@ STAGING_DIR = {OUTPUT_BASE}/stage/APCP_01 # FCST_ENSEMBLE_STAT_INPUT_TEMPLATE - comma separated list of ensemble members # or a single line, - filename wildcard characters may be used, ? or *. -FCST_ENSEMBLE_STAT_INPUT_TEMPLATE = {ENV[MEM_STAR]}{ENV[POSTPRD]}{ENV[NET]}.t{init?fmt=%H}z*.prslev.f{lead?fmt=%HHH}.{ENV[POST_OUTPUT_DOMAIN_NAME]}.grib2 +FCST_ENSEMBLE_STAT_INPUT_TEMPLATE = {ENV[NET]}.t{init?fmt=%H}z*.prslev.f{lead?fmt=%HHH}.{ENV[POST_OUTPUT_DOMAIN_NAME]}.grib2 # Template to look for point observations. # Example precip24_2010010112.nc diff --git a/parm/metplus/EnsembleStat_REFC.conf b/parm/metplus/EnsembleStat_REFC.conf index 4ccab0c980..71b1945199 100644 --- a/parm/metplus/EnsembleStat_REFC.conf +++ b/parm/metplus/EnsembleStat_REFC.conf @@ -220,7 +220,7 @@ STAGING_DIR = {OUTPUT_BASE}/stage/REFC # FCST_ENSEMBLE_STAT_INPUT_TEMPLATE - comma separated list of ensemble members # or a single line, - filename wildcard characters may be used, ? or *. -FCST_ENSEMBLE_STAT_INPUT_TEMPLATE = {ENV[MEM_STAR]}{ENV[POSTPRD]}{ENV[NET]}.t{init?fmt=%H}z*.prslev.f{lead?fmt=%HHH}.{ENV[POST_OUTPUT_DOMAIN_NAME]}.grib2 +FCST_ENSEMBLE_STAT_INPUT_TEMPLATE = {ENV[NET]}.t{init?fmt=%H}z*.prslev.f{lead?fmt=%HHH}.{ENV[POST_OUTPUT_DOMAIN_NAME]}.grib2 # Template to look for point observations. # Example precip24_2010010112.nc diff --git a/parm/metplus/EnsembleStat_RETOP.conf b/parm/metplus/EnsembleStat_RETOP.conf index e05fbeffc7..506f95456b 100644 --- a/parm/metplus/EnsembleStat_RETOP.conf +++ b/parm/metplus/EnsembleStat_RETOP.conf @@ -223,7 +223,7 @@ STAGING_DIR = {OUTPUT_BASE}/stage/RETOP # FCST_ENSEMBLE_STAT_INPUT_TEMPLATE - comma separated list of ensemble members # or a single line, - filename wildcard characters may be used, ? or *. -FCST_ENSEMBLE_STAT_INPUT_TEMPLATE = {ENV[MEM_STAR]}{ENV[POSTPRD]}{ENV[NET]}.t{init?fmt=%H}z*.prslev.f{lead?fmt=%HHH}.{ENV[POST_OUTPUT_DOMAIN_NAME]}.grib2 +FCST_ENSEMBLE_STAT_INPUT_TEMPLATE = {ENV[NET]}.t{init?fmt=%H}z*.prslev.f{lead?fmt=%HHH}.{ENV[POST_OUTPUT_DOMAIN_NAME]}.grib2 # Template to look for point observations. # Example precip24_2010010112.nc diff --git a/parm/metplus/EnsembleStat_SFC.conf b/parm/metplus/EnsembleStat_SFC.conf index 78ded851a3..125fae1781 100644 --- a/parm/metplus/EnsembleStat_SFC.conf +++ b/parm/metplus/EnsembleStat_SFC.conf @@ -273,7 +273,7 @@ STAGING_DIR = {OUTPUT_BASE}/stage/SFC # FCST_ENSEMBLE_STAT_INPUT_DIR. # FCST_ENSEMBLE_STAT_INPUT_TEMPLATE = - {ENV[MEM_STAR]}{ENV[POSTPRD]}{ENV[NET]}.t{init?fmt=%H}z*.prslev.f{lead?fmt=%HHH}.{ENV[POST_OUTPUT_DOMAIN_NAME]}.grib2 + {ENV[NET]}.t{init?fmt=%H}z*.prslev.f{lead?fmt=%HHH}.{ENV[POST_OUTPUT_DOMAIN_NAME]}.grib2 # Template to look for point observations. # Example precip24_2010010112.nc diff --git a/parm/metplus/EnsembleStat_UPA.conf b/parm/metplus/EnsembleStat_UPA.conf index 41823a1984..a825834030 100644 --- a/parm/metplus/EnsembleStat_UPA.conf +++ b/parm/metplus/EnsembleStat_UPA.conf @@ -343,7 +343,7 @@ STAGING_DIR = {OUTPUT_BASE}/stage/UPA # FCST_ENSEMBLE_STAT_INPUT_DIR. # FCST_ENSEMBLE_STAT_INPUT_TEMPLATE = - {ENV[MEM_STAR]}{ENV[POSTPRD]}{ENV[NET]}.t{init?fmt=%H}z*.prslev.f{lead?fmt=%HHH}.{ENV[POST_OUTPUT_DOMAIN_NAME]}.grib2 + {ENV[NET]}.t{init?fmt=%H}z*.prslev.f{lead?fmt=%HHH}.{ENV[POST_OUTPUT_DOMAIN_NAME]}.grib2 # Template to look for point observations. # Example precip24_2010010112.nc diff --git a/parm/metplus/GridStat_APCP01h.conf b/parm/metplus/GridStat_APCP01h.conf index 7a5b61ed5e..08c60fe915 100644 --- a/parm/metplus/GridStat_APCP01h.conf +++ b/parm/metplus/GridStat_APCP01h.conf @@ -76,7 +76,7 @@ GRID_STAT_REGRID_SHAPE = SQUARE #GRID_STAT_GRID_WEIGHT_FLAG = # Name to identify model (forecast) data in output -MODEL = {ENV[VX_FCST_MODEL_NAME]}{ENV[USCORE_ENSMEM_NAME_OR_NULL]} +MODEL = {ENV[VX_FCST_MODEL_NAME]}_mem{ENV[ENSMEM_INDX]} FCST_NATIVE_DATA_TYPE = GRIB # Name to identify observation data in output diff --git a/parm/metplus/GridStat_APCP03h.conf b/parm/metplus/GridStat_APCP03h.conf index f861ea584c..c16d875e8b 100644 --- a/parm/metplus/GridStat_APCP03h.conf +++ b/parm/metplus/GridStat_APCP03h.conf @@ -85,7 +85,7 @@ GRID_STAT_REGRID_SHAPE = SQUARE # suffix to MODEL that identifies the forecast ensemble member. This # makes it easier to identify each curve. # -MODEL = {ENV[VX_FCST_MODEL_NAME]}{ENV[USCORE_ENSMEM_NAME_OR_NULL]} +MODEL = {ENV[VX_FCST_MODEL_NAME]}_mem{ENV[ENSMEM_INDX]} FCST_NATIVE_DATA_TYPE = GRIB # diff --git a/parm/metplus/GridStat_APCP06h.conf b/parm/metplus/GridStat_APCP06h.conf index 6c02a0dfdf..eb33a5ab8c 100644 --- a/parm/metplus/GridStat_APCP06h.conf +++ b/parm/metplus/GridStat_APCP06h.conf @@ -85,7 +85,7 @@ GRID_STAT_REGRID_SHAPE = SQUARE # suffix to MODEL that identifies the forecast ensemble member. This # makes it easier to identify each curve. # -MODEL = {ENV[VX_FCST_MODEL_NAME]}{ENV[USCORE_ENSMEM_NAME_OR_NULL]} +MODEL = {ENV[VX_FCST_MODEL_NAME]}_mem{ENV[ENSMEM_INDX]} FCST_NATIVE_DATA_TYPE = GRIB # diff --git a/parm/metplus/GridStat_APCP24h.conf b/parm/metplus/GridStat_APCP24h.conf index f45ec4037a..2d284f123f 100644 --- a/parm/metplus/GridStat_APCP24h.conf +++ b/parm/metplus/GridStat_APCP24h.conf @@ -85,7 +85,7 @@ GRID_STAT_REGRID_SHAPE = SQUARE # suffix to MODEL that identifies the forecast ensemble member. This # makes it easier to identify each curve. # -MODEL = {ENV[VX_FCST_MODEL_NAME]}{ENV[USCORE_ENSMEM_NAME_OR_NULL]} +MODEL = {ENV[VX_FCST_MODEL_NAME]}_mem{ENV[ENSMEM_INDX]} FCST_NATIVE_DATA_TYPE = GRIB # diff --git a/parm/metplus/GridStat_REFC.conf b/parm/metplus/GridStat_REFC.conf index 5d0c20693d..156b450e30 100644 --- a/parm/metplus/GridStat_REFC.conf +++ b/parm/metplus/GridStat_REFC.conf @@ -76,7 +76,7 @@ GRID_STAT_INTERP_TYPE_WIDTH = 1 GRID_STAT_GRID_WEIGHT_FLAG = NONE # Name to identify model (forecast) data in output -MODEL = {ENV[VX_FCST_MODEL_NAME]}{ENV[USCORE_ENSMEM_NAME_OR_NULL]} +MODEL = {ENV[VX_FCST_MODEL_NAME]}_mem{ENV[ENSMEM_INDX]} FCST_NATIVE_DATA_TYPE = GRIB # Name to identify observation data in output diff --git a/parm/metplus/GridStat_RETOP.conf b/parm/metplus/GridStat_RETOP.conf index 1851623361..77ec2fb76a 100644 --- a/parm/metplus/GridStat_RETOP.conf +++ b/parm/metplus/GridStat_RETOP.conf @@ -76,7 +76,7 @@ GRID_STAT_INTERP_TYPE_WIDTH = 1 GRID_STAT_GRID_WEIGHT_FLAG = NONE # Name to identify model (forecast) data in output -MODEL = {ENV[VX_FCST_MODEL_NAME]}{ENV[USCORE_ENSMEM_NAME_OR_NULL]} +MODEL = {ENV[VX_FCST_MODEL_NAME]}_mem{ENV[ENSMEM_INDX]} FCST_NATIVE_DATA_TYPE = GRIB # Name to identify observation data in output diff --git a/parm/metplus/PointStat_SFC.conf b/parm/metplus/PointStat_SFC.conf index 07f96a1558..6517bb4740 100644 --- a/parm/metplus/PointStat_SFC.conf +++ b/parm/metplus/PointStat_SFC.conf @@ -49,7 +49,7 @@ LOG_POINT_STAT_VERBOSITY = 2 # # Specify the name of the METplus log file. # -LOG_METPLUS = {LOG_DIR}/metplus.log.{ENV[LOG_SUFFIX]}_SFC{ENV[USCORE_ENSMEM_NAME_OR_NULL]}_{ENV[CDATE]} +LOG_METPLUS = {LOG_DIR}/metplus.log.{ENV[LOG_SUFFIX]}_SFC_mem{ENV[ENSMEM_INDX]}_{ENV[CDATE]} # # Specify the location and name of the final METplus conf file. # @@ -121,7 +121,7 @@ POINT_STAT_OFFSETS = 0 # suffix to MODEL that identifies the forecast ensemble member. This # makes it easier to identify each curve. # -MODEL = {ENV[VX_FCST_MODEL_NAME]}{ENV[USCORE_ENSMEM_NAME_OR_NULL]} +MODEL = {ENV[VX_FCST_MODEL_NAME]}_mem{ENV[ENSMEM_INDX]} POINT_STAT_DESC = NA # diff --git a/parm/metplus/PointStat_UPA.conf b/parm/metplus/PointStat_UPA.conf index 8479d102f8..693e74c1a9 100644 --- a/parm/metplus/PointStat_UPA.conf +++ b/parm/metplus/PointStat_UPA.conf @@ -48,7 +48,7 @@ LOG_POINT_STAT_VERBOSITY = 2 # # Specify the name of the METplus log file. # -LOG_METPLUS = {LOG_DIR}/metplus.log.{ENV[LOG_SUFFIX]}_UPA{ENV[USCORE_ENSMEM_NAME_OR_NULL]}_{ENV[CDATE]} +LOG_METPLUS = {LOG_DIR}/metplus.log.{ENV[LOG_SUFFIX]}_UPA_mem{ENV[ENSMEM_INDX]}_{ENV[CDATE]} # # Specify the location and name of the final METplus conf file. # @@ -120,7 +120,7 @@ POINT_STAT_OFFSETS = 0 # suffix to MODEL that identifies the forecast ensemble member. This # makes it easier to identify each curve. # -MODEL = {ENV[VX_FCST_MODEL_NAME]}{ENV[USCORE_ENSMEM_NAME_OR_NULL]} +MODEL = {ENV[VX_FCST_MODEL_NAME]}_mem{ENV[ENSMEM_INDX]} POINT_STAT_DESC = NA # diff --git a/parm/wflow/aqm_all.yaml b/parm/wflow/aqm_all.yaml new file mode 100644 index 0000000000..b41d7e39be --- /dev/null +++ b/parm/wflow/aqm_all.yaml @@ -0,0 +1,209 @@ +default_aqm_task: &default_aqm + account: '&ACCOUNT;' + attrs: + cycledefs: forecast + maxtries: '2' + envars: &default_vars + GLOBAL_VAR_DEFNS_FP: '&GLOBAL_VAR_DEFNS_FP;' + USHdir: '&USHdir;' + PDY: !cycstr "@Y@m@d" + cyc: !cycstr "@H" + nprocs: '{{ parent.nnodes * parent.ppn // 1 }}' + subcyc: !cycstr "@M" + LOGDIR: !cycstr "&LOGDIR;" + SLASH_ENSMEM_SUBDIR: '&SLASH_ENSMEM_SUBDIR;' + memory: 2G + native: '{{ platform.SCHED_NATIVE_CMD }}' + nnodes: 1 + nodes: '{{ nnodes }}:ppn={{ ppn }}' + nodesize: "&NCORES_PER_NODE;" + partition: '{% if platform.get("PARTITION_DEFAULT") %}&PARTITION_DEFAULT;{% else %}None{% endif %}' + ppn: 1 + queue: '&QUEUE_DEFAULT;' + walltime: 00:30:00 + +task_nexus_gfs_sfc: + <<: *default_aqm + command: '&LOAD_MODULES_RUN_TASK_FP; "nexus_gfs_sfc" "&JOBSdir;/JREGIONAL_NEXUS_GFS_SFC"' + partition: '{% if platform.get("PARTITION_HPSS") %}&PARTITION_HPSS;{% else %}None{% endif %}' + join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' + dependency: + or: + and: + streq: + left: do_real_time + right: '{% if workflow.DO_REAL_TIME %}do_real_time{% endif %}' + datadep_gfs: + attrs: + age: 00:00:00:05 + text: '&COMINgfs;/gfs.@Y@m@d/@H/atmos' + streq: + left: retro + right: '{% if not workflow.DO_REAL_TIME %}retro{% endif %}' + +metatask_nexus_emission: + var: + nspt: '{% for h in range(0, cpl_aqm_parm.NUM_SPLIT_NEXUS) %}{{ " %02d" % h }}{% endfor %}' + task_nexus_emission_#nspt#: + <<: *default_aqm + command: '&LOAD_MODULES_RUN_TASK_FP; "nexus_emission" "&JOBSdir;/JREGIONAL_NEXUS_EMISSION"' + join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' + nnodes: 4 + ppn: '{{ task_nexus_emission.PPN_NEXUS_EMISSION // 1 }}' + walltime: 01:00:00 + envars: + <<: *default_vars + nspt: "#nspt#" + dependency: + taskdep: + attrs: + task: nexus_gfs_sfc + +task_nexus_post_split: + <<: *default_aqm + command: '&LOAD_MODULES_RUN_TASK_FP; "nexus_post_split" "&JOBSdir;/JREGIONAL_NEXUS_POST_SPLIT"' + join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' + dependency: + metataskdep: + attrs: + metatask: nexus_emission + +task_fire_emission: + <<: *default_aqm + command: '&LOAD_MODULES_RUN_TASK_FP; "fire_emission" "&JOBSdir;/JREGIONAL_FIRE_EMISSION"' + join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' + +task_point_source: + <<: *default_aqm + command: '&LOAD_MODULES_RUN_TASK_FP; "point_source" "&JOBSdir;/JREGIONAL_POINT_SOURCE"' + join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' + walltime: 01:00:00 + dependency: + or_make_grid: + datadep_grid: + attrs: + age: 00:00:00:05 + text: '{{ task_make_grid.GRID_DIR }}/make_grid_task_complete.txt' + streq: + left: staged_grid + right: '{% if not rocoto.get("tasks", {}).get("task_make_grid") %}staged_grid{% endif %}' + +task_aqm_ics_ext: + <<: *default_aqm + attrs: + cycledefs: at_start + maxtries: '2' + command: '&LOAD_MODULES_RUN_TASK_FP; "aqm_ics" "&JOBSdir;/JREGIONAL_AQM_ICS"' + envars: + <<: *default_vars + PREV_CYCLE_DIR: '&WARMSTART_CYCLE_DIR;' + join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' + dependency: + and: + taskdep: + attrs: + task: make_ics_mem000 + or: + datadep_date_tag_tracer: + attrs: + age: 00:00:00:05 + text: !cycstr '&WARMSTART_CYCLE_DIR;/RESTART/@Y@m@d.@H@M@S.fv_tracer.res.tile1.nc' + datadep_tracer: + attrs: + age: 00:00:00:05 + text: !cycstr '&WARMSTART_CYCLE_DIR;/RESTART/fv_tracer.res.tile1.nc' + +task_aqm_ics: + <<: *default_aqm + attrs: + cycledefs: cycled + maxtries: '2' + command: '&LOAD_MODULES_RUN_TASK_FP; "aqm_ics" "&JOBSdir;/JREGIONAL_AQM_ICS"' + envars: + <<: *default_vars + PREV_CYCLE_DIR: '&COMIN_DIR;' + join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' + dependency: + and: + taskdep: + attrs: + task: make_ics_mem000 + or: + datadep_date_tag_tracer: + attrs: + age: 00:00:00:05 + text: '&COMIN_DIR;/RESTART/@Y@m@d.@H@M@S.fv_tracer.res.tile1.nc' + datadep_tracer: + attrs: + age: 00:00:00:05 + text: &COMIN_DIR;/RESTART/fv_tracer.res.tile1.nc + +task_aqm_lbcs: + <<: *default_aqm + command: '&LOAD_MODULES_RUN_TASK_FP; "aqm_lbcs" "&JOBSdir;/JREGIONAL_AQM_LBCS"' + join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' + ppn: 24 + dependency: + taskdep: + attrs: + task: make_lbcs + +task_pre_post_stat: + <<: *default_aqm + command: '&LOAD_MODULES_RUN_TASK_FP; "pre_post_stat" "&JOBSdir;/JREGIONAL_PRE_POST_STAT"' + join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' + dependency: + taskdep: + attrs: + task: run_fcst_mem000 + #or_do_post: + # and_inline_post: # If inline post ran, wait on the forecast task to complete + # not: + # taskvalid: + # attrs: + # task: run_post_mem000_f000 + # and_run_post: # If post was meant to run, wait on the whole post metatask + # taskvalid: + # attrs: + # task: run_post_mem000_f000 + # metataskdep: + # attrs: + # metatask: run_post_mem000 + +task_post_stat_o3: + <<: *default_aqm + command: '&LOAD_MODULES_RUN_TASK_FP; "post_stat_o3" "&JOBSdir;/JREGIONAL_POST_STAT_O3"' + join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' + memory: 120G + dependency: + taskdep: + attrs: + task: pre_post_stat + +task_post_stat_pm25: + <<: *default_aqm + command: '&LOAD_MODULES_RUN_TASK_FP; "post_stat_pm25" "&JOBSdir;/JREGIONAL_POST_STAT_PM25"' + join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' + memory: 120G + dependency: + attrs: + task: pre_post_stat + +task_bias_correction_o3: + <<: *default_aqm + command: '&LOAD_MODULES_RUN_TASK_FP; "bias_correction_o3" "&JOBSdir;/JREGIONAL_BIAS_CORRECTION_O3"' + join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' + memory: 120G + dependency: + attrs: + task: pre_post_stat + +task_bias_correction_pm25: + <<: *default_aqm + command: '&LOAD_MODULES_RUN_TASK_FP; "bias_correction_pm25" "&JOBSdir;/JREGIONAL_BIAS_CORRECTION_PM25"' + join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' + memory: 120G + dependency: + attrs: + task: pre_post_stat + diff --git a/parm/wflow/coldstart.yaml b/parm/wflow/coldstart.yaml new file mode 100644 index 0000000000..aae31a07a7 --- /dev/null +++ b/parm/wflow/coldstart.yaml @@ -0,0 +1,212 @@ +default_task: &default_task + account: '&ACCOUNT;' + attrs: + cycledefs: forecast + maxtries: '1' + envars: &default_vars + GLOBAL_VAR_DEFNS_FP: '&GLOBAL_VAR_DEFNS_FP;' + USHdir: '&USHdir;' + PDY: !cycstr "@Y@m@d" + cyc: !cycstr "@H" + nprocs: '{{ parent.nnodes * parent.ppn }}' + subcyc: !cycstr "@M" + LOGDIR: !cycstr "&LOGDIR;" + ENSMEM_INDX: '#mem#' + native: '{{ platform.SCHED_NATIVE_CMD }}' + nodes: '{{ nnodes }}:ppn={{ ppn }}' + nodesize: "&NCORES_PER_NODE;" + partition: '{% if platform.get("PARTITION_DEFAULT") %}&PARTITION_DEFAULT;{% else %}None{% endif %}' + queue: '&QUEUE_DEFAULT;' + walltime: 00:30:00 + +task_get_extrn_ics: + <<: *default_task + command: '&LOAD_MODULES_RUN_TASK_FP; "get_extrn_ics" "&JOBSdir;/JREGIONAL_GET_EXTRN_MDL_FILES"' + envars: + <<: *default_vars + ICS_OR_LBCS: ICS + join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' + memory: 2G + nnodes: 1 + partition: '{% if platform.get("PARTITION_HPSS") %}&PARTITION_HPSS;{% else %}None{% endif %}' + ppn: 1 + queue: '&QUEUE_HPSS;' + walltime: 00:45:00 + dependency: + or: + and: + streq: + left: do_real_time + right: '{% if workflow.DO_REAL_TIME %}do_real_time{% endif %}' + datadep_gfs: + attrs: + age: 00:00:00:05 + text: '&COMINgfs;/gfs.@Y@m@d/@H/atmos' + streq: + left: retro + right: '{% if not workflow.DO_REAL_TIME %}retro{% endif %}' + +task_get_extrn_lbcs: + <<: *default_task + command: '&LOAD_MODULES_RUN_TASK_FP; "get_extrn_lbcs" "&JOBSdir;/JREGIONAL_GET_EXTRN_MDL_FILES"' + envars: + <<: *default_vars + ICS_OR_LBCS: LBCS + join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' + memory: 2G + nnodes: 1 + partition: '{% if platform.get("PARTITION_HPSS") %}&PARTITION_HPSS;{% else %}None{% endif %}' + ppn: 1 + queue: '&QUEUE_HPSS;' + walltime: 00:45:00 + dependency: + or: + and: + streq: + left: do_real_time + right: '{% if workflow.DO_REAL_TIME %}do_real_time{% endif %}' + datadep_gfs: + attrs: + age: 00:00:00:05 + text: '&COMINgfs;/gfs.@Y@m@d/@H/atmos' + streq: + left: retro + right: '{% if not workflow.DO_REAL_TIME %}retro{% endif %}' + +metatask_run_ensemble: + var: + mem: '{% if global.DO_ENSEMBLE %}{%- for m in range(1, global.NUM_ENS_MEMBERS+1) -%}{{ "%03d "%m }}{%- endfor -%} {% else %}{{ "000"|string }}{% endif %}' + task_make_ics_mem#mem#: + <<: *default_task + command: '&LOAD_MODULES_RUN_TASK_FP; "make_ics" "&JOBSdir;/JREGIONAL_MAKE_ICS"' + envars: + <<: *default_vars + SLASH_ENSMEM_SUBDIR: '&SLASH_ENSMEM_SUBDIR;' + join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' + nnodes: 4 + ppn: 12 + dependency: + and: &make_ics_deps + taskdep_get_extrn: + attrs: + task: get_extrn_ics + or_grid: + datadep_grid: + attrs: + age: 00:00:00:05 + text: '{{ task_make_grid.GRID_DIR }}/make_grid_task_complete.txt' + streq: + left: staged_grid + right: '{% if not rocoto.get("tasks", {}).get("task_make_grid") %}staged_grid{% endif %}' + or_orog: + datadep_orog: + attrs: + age: 00:00:00:05 + text: '{{ task_make_orog.OROG_DIR }}/make_orog_task_complete.txt' + streq: + left: staged_orog + right: '{% if not rocoto.get("tasks", {}).get("task_make_orog") %}staged_orog{% endif %}' + or_sfc_climo: + datadep_sfc_climo: + attrs: + age: 00:00:00:05 + text: '{{ task_make_sfc_climo.SFC_CLIMO_DIR }}/make_sfc_climo_task_complete.txt' + streq: + left: staged_sfc_climo + right: '{% if not rocoto.get("tasks", {}).get("task_make_sfc_climo") %}staged_sfc_climo{% endif %}' + + task_make_lbcs_mem#mem#: + <<: *default_task + command: '&LOAD_MODULES_RUN_TASK_FP; "make_lbcs" "&JOBSdir;/JREGIONAL_MAKE_LBCS"' + envars: + <<: *default_vars + SLASH_ENSMEM_SUBDIR: '&SLASH_ENSMEM_SUBDIR;' + join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' + nnodes: 4 + ppn: 12 + dependency: + and: + <<: *make_ics_deps + taskdep_get_extrn: + attrs: + task: get_extrn_lbcs + + task_run_fcst_mem#mem#: + <<: *default_task + command: '&LOAD_MODULES_RUN_TASK_FP; "run_fcst" "&JOBSdir;/JREGIONAL_RUN_FCST"' + envars: + <<: *default_vars + SLASH_ENSMEM_SUBDIR: '&SLASH_ENSMEM_SUBDIR;' + nprocs: + join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' + nodesize: '&NCORES_PER_NODE;' + nnodes: '{{ task_run_fcst.NNODES_RUN_FCST // 1 }}' + partition: '{% if platform.get("PARTITION_FCST") %}&PARTITION_FCST;{% else %}None{% endif %}' + ppn: '{{ task_run_fcst.PPN_RUN_FCST // 1 }}' + queue: '&QUEUE_FCST;' + walltime: 04:30:00 + dependency: + and: + taskdep_make_ics: + attrs: + task: make_ics_mem#mem# + taskdep_make_lbcs: + attrs: + task: make_lbcs_mem#mem# + or_nexus_emission: + not: + taskvalid: + attrs: + task: nexus_post_split + taskdep: + attrs: + task: nexus_post_split + or_fire_emission: + not: + taskvalid: + attrs: + task: fire_emission + taskdep: + attrs: + task: fire_emission + or_point_source: + not: + taskvalid: + attrs: + task: point_source + taskdep: + attrs: + task: point_source + or_aqm_ics: + and_no_aqm_ics: + not: + taskvalid: + attrs: + task: aqm_ics_ext + not: + taskvalid: + attrs: + task: aqm_ics + and_aqm_atstart: + taskvalid: + attrs: + task: aqm_ics_ext + taskdep: + attrs: + task: aqm_ics_ext + and_aqm_cycled: + taskvalid: + attrs: + task: aqm_ics + taskdep: + attrs: + task: aqm_ics + or_aqm_lbcs: + not: + taskvalid: + attrs: + task: aqm_lbcs + taskdep: + attrs: + task: aqm_lbcs + diff --git a/parm/wflow/default_workflow.yaml b/parm/wflow/default_workflow.yaml new file mode 100644 index 0000000000..fa64250320 --- /dev/null +++ b/parm/wflow/default_workflow.yaml @@ -0,0 +1,44 @@ +# +# The defaults that cover a majority of our workflow test experiments. +# +rocoto: + entities: + ACCOUNT: '{{ user.ACCOUNT }}' + CCPA_OBS_DIR: '{{ platform.CCPA_OBS_DIR }}' + COMIN_DIR: '{% if user.RUN_ENVIR == "nco" %}{{ nco.COMIN_BASEDIR }}/{{ nco.RUN }}.@Y@m@d/@H {% else %}{{ nco.COMIN_BASEDIR }}/@Y@m@d@H{% endif %}' + COMINgfs: '{{ platform.get("COMINgfs") }}' + FCST_DIR: '{{ nco.DATAROOT }}/run_fcst.{{ workflow.WORKFLOW_ID }}_@Y@m@d@H' + GLOBAL_VAR_DEFNS_FP: '{{ workflow.GLOBAL_VAR_DEFNS_FP }}' + JOBSdir: '{{ user.JOBSdir }}' + LOAD_MODULES_RUN_TASK_FP: '{{ workflow.LOAD_MODULES_RUN_TASK_FP }}' + LOGDIR: '{% if user.RUN_ENVIR == "nco" %}{{"{}/@Y@m@d".format(nco.LOGBASEDIR)}}{% else %}{{nco.LOGBASEDIR }}{% endif %}' + LOGEXT: '{% if user.RUN_ENVIR == "nco" %}{{".{}.log".format(workflow.WORKFLOW_ID)}}{% else %}{{".log"}}{% endif %}' + MRMS_OBS_DIR: '{{ platform.MRMS_OBS_DIR }}' + NCORES_PER_NODE: '{{ platform.NCORES_PER_NODE }}' + NDAS_OBS_DIR: '{{ platform.NDAS_OBS_DIR }}' + NET: '{{ nco.NET }}' + PARTITION_DEFAULT: '{{ platform.get("PARTITION_DEFAULT") }}' + PARTITION_FCST: '{{ platform.get("PARTITION_FCST") }}' + PARTITION_HPSS: '{{ platform.get("PARTITION_HPSS") }}' + QUEUE_DEFAULT: '{{ platform.get("QUEUE_DEFAULT") }}' + QUEUE_FCST: '{{ platform.get("QUEUE_FCST") }}' + QUEUE_HPSS: '{{ platform.get("QUEUE_HPSS") }}' + RUN: '{{ nco.RUN }}' + SCRIPTSdir: '{{ user.SCRIPTSdir }}' + SLASH_ENSMEM_SUBDIR: '{% if global.DO_ENSEMBLE %}{{ "/mem#mem#" }}{% else %}{{ "/" }}{% endif %}' + USHdir: '{{ user.USHdir }}' + WARMSTART_CYCLE_DIR: '{{ workflow.WARMSTART_CYCLE_DIR }}' + WORKFLOW_ID: '{{ workflow.WORKFLOW_ID }}' + attrs: + cyclethrottle: "20" + realtime: "F" + scheduler: '{{ platform.SCHED }}' + taskthrottle: "1000" + cycledefs: + at_start: + - !startstopfreq ['{{workflow.DATE_FIRST_CYCL}}', '{{workflow.DATE_FIRST_CYCL}}', '{{workflow.INCR_CYCL_FREQ}}'] + forecast: + - !startstopfreq ['{{workflow.DATE_FIRST_CYCL}}', '{{workflow.DATE_LAST_CYCL}}', '{{workflow.INCR_CYCL_FREQ}}'] + log: !cycstr '&LOGDIR;/FV3LAM_wflow.{% if user.RUN_ENVIR == "nco" %}{{ workflow.WORKFLOW_ID + "." }}{% endif %}log' + tasks: + taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml"]|include }}' diff --git a/parm/wflow/plot.yaml b/parm/wflow/plot.yaml new file mode 100644 index 0000000000..7433d49362 --- /dev/null +++ b/parm/wflow/plot.yaml @@ -0,0 +1,49 @@ +# Settings that will run tasks needed per-experiment to create "fix +# files" for the stationary grid. + +default_task_plot: &default_task + account: '&ACCOUNT;' + attrs: + cycledefs: forecast + maxtries: '1' + envars: &default_envars + GLOBAL_VAR_DEFNS_FP: '&GLOBAL_VAR_DEFNS_FP;' + USHdir: '&USHdir;' + PDY: !cycstr "@Y@m@d" + cyc: !cycstr "@H" + subcyc: !cycstr "@M" + LOGDIR: !cycstr "&LOGDIR;" + SLASH_ENSMEM_SUBDIR: '&SLASH_ENSMEM_SUBDIR;' + ENSMEM_INDX: '#mem#' + nprocs: '{{ nnodes * ppn }}' + native: '{{ platform.SCHED_NATIVE_CMD }}' + nnodes: 1 + nodes: '{{ nnodes }}:ppn={{ ppn }}' + nodesize: "&NCORES_PER_NODE;" + partition: '{% if platform.get("PARTITION_DEFAULT") %}&PARTITION_DEFAULT;{% else %}None{% endif %}' + ppn: 24 + queue: '&QUEUE_DEFAULT;' + walltime: 01:00:00 + +task_plot_allvars: + <<: *default_task + command: '&LOAD_MODULES_RUN_TASK_FP; "plot_allvars" "&JOBSdir;/JREGIONAL_MAKE_GRID"' + join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' + dependency: + or_do_post: &post_files_exist + and_run_post: # If post was meant to run, wait on the whole post metatask + taskvalid: + attrs: + task: run_post_mem000_f000 + metataskdep: + attrs: + metatask: run_ens_post + and_inline_post: # If inline post ran, wait on the forecast task to complete + not: + taskvalid: + attrs: + task: run_post_mem000_f000 + taskdep: + attrs: + task: run_fcst_mem000 + diff --git a/parm/wflow/post.yaml b/parm/wflow/post.yaml new file mode 100644 index 0000000000..0f80981a8c --- /dev/null +++ b/parm/wflow/post.yaml @@ -0,0 +1,81 @@ +default_task_post: &default_task + account: '&ACCOUNT;' + attrs: + cycledefs: '#cycledef#' + maxtries: '2' + command: '&LOAD_MODULES_RUN_TASK_FP; "run_post" "&JOBSdir;/JREGIONAL_RUN_POST"' + envars: &default_vars + GLOBAL_VAR_DEFNS_FP: '&GLOBAL_VAR_DEFNS_FP;' + USHdir: '&USHdir;' + PDY: !cycstr '@Y@m@d' + cyc: !cycstr '@H' + fhr: '#fhr#' + SLASH_ENSMEM_SUBDIR: '&SLASH_ENSMEM_SUBDIR;' + ENSMEM_INDX: '#mem#' + nprocs: '{{ parent.nnodes * parent.ppn }}' + join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' + native: '{{ platform.SCHED_NATIVE_CMD }}' + nodes: '{{ nnodes }}:ppn={{ ppn }}' + nnodes: 2 + ppn: 24 + nodesize: "&NCORES_PER_NODE;" + partition: '{% if platform.get("PARTITION_DEFAULT") %}&PARTITION_DEFAULT;{% else %}None{% endif %}' + queue: '&QUEUE_DEFAULT;' + walltime: 00:15:00 + +metatask_run_ens_post: + var: + mem: '{% if global.DO_ENSEMBLE %}{%- for m in range(1, global.NUM_ENS_MEMBERS+1) -%}{{ "%03d "%m }}{%- endfor -%} {% else %}{{ "000"|string }}{% endif %}' + metatask_run_post: + var: + fhr: '{% for h in range(0, workflow.LONG_FCST_LEN+1) %}{{ " %03d" % h }}{% endfor %}' + cycledef: '{% for h in range(0, workflow.LONG_FCST_LEN+1) %}{% if h <= workflow.FCST_LEN_HRS %}forecast {% else %}long_forecast {% endif %}{% endfor %}' + task_run_post_mem#mem#_f#fhr#: + <<: *default_task + dependency: + or: + taskdep: + attrs: + task: run_fcst_mem#mem# + and: + datadep_dyn: + text: !cycstr '&FCST_DIR;&SLASH_ENSMEM_SUBDIR;/dynf#fhr#.nc' + attrs: + age: '05:00' + datadep_phy: + text: !cycstr '&FCST_DIR;&SLASH_ENSMEM_SUBDIR;/phyf#fhr#.nc' + attrs: + age: '05:00' + + metatask_run_sub_hourly_post: + var: + fhr: '{% for h in range(workflow.FCST_LEN_HRS) %}{{ " %03d" % h }}{% endfor %}' + metatask_sub_hourly_post: + var: + fmn: '{% for min in range(0, 60, task_run_post.DT_SUBHOURLY_POST_MNTS) %}{{ " %02d" % min }}{% endfor %}' + task_run_post_mem#mem#_f#fhr##fmn#: &subhourly_run_post_task + <<: *default_task + envars: + <<: *default_vars + fmn: '#fmn#' + dependency: + or: + taskdep: + attrs: + task: run_fcst_mem#mem# + and: + datadep_dyn: + text: !cycstr '&FCST_DIR;&SLASH_ENSMEM_SUBDIR;/dynf#fhr#:#fmn#:00.nc' + attrs: + age: '05:00' + datadep_phy: + text: !cycstr '&FCST_DIR;&SLASH_ENSMEM_SUBDIR;/phyf#fhr#:#fmn#:00.nc' + attrs: + age: '05:00' + + metatask_sub_hourly_last_hour_post: + var: + fhr: '{{ " %03d" % workflow.FCST_LEN_HRS }}' + fmn: '00' + task_run_post_mem#mem#_f#fhr##fmn#: + <<: *subhourly_run_post_task diff --git a/parm/wflow/prdgen.yaml b/parm/wflow/prdgen.yaml new file mode 100644 index 0000000000..c88455ad4c --- /dev/null +++ b/parm/wflow/prdgen.yaml @@ -0,0 +1,35 @@ +metatask_run_prdgen: + var: + mem: '{% if global.DO_ENSEMBLE %}{%- for m in range(1, global.NUM_ENS_MEMBERS+1) -%}{{ "%03d "%m }}{%- endfor -%} {% else %}{{ "000"|string }}{% endif %}' + metatask_run_prdgen_hrs_mem#mem#: + var: + fhr: '{% for h in range(0, workflow.LONG_FCST_LEN+1) %}{{ " %03d" % h }}{% endfor %}' + cycledef: '{% for h in range(0, workflow.LONG_FCST_LEN+1) %}{% if h <= workflow.FCST_LEN_HRS %}forecast {% else %}long_forecast {% endif %}{% endfor %}' + task_run_prdgen_mem#mem#_f#fhr#: + account: '&ACCOUNT;' + attrs: + cycledefs: '#cycledef#' + maxtries: 1 + command: '&LOAD_MODULES_RUN_TASK_FP; "run_prdgen" "&JOBSdir;/JREGIONAL_RUN_PRDGEN"' + envars: + GLOBAL_VAR_DEFNS_FP: '&GLOBAL_VAR_DEFNS_FP;' + USHdir: '&USHdir;' + PDY: !cycstr '@Y@m@d' + cyc: !cycstr '@H' + fhr: '#fhr#' + SLASH_ENSMEM_SUBDIR: '&SLASH_ENSMEM_SUBDIR;' + ENSMEM_INDX: '#mem#' + nprocs: '{{ parent.nnodes * parent.ppn }}' + join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' + native: '{{ platform.SCHED_NATIVE_CMD }}' + nodes: '{{ nnodes }}:ppn={{ ppn }}' + nodesize: "&NCORES_PER_NODE;" + nnodes: 1 + partition: '{% if platform.get("PARTITION_DEFAULT") %}&PARTITION_DEFAULT;{% else %}None{% endif %}' + ppn: 22 + queue: '&QUEUE_DEFAULT;' + walltime: 00:30:00 + dependency: + taskdep: + attrs: + task: run_post_mem#mem#_f#fhr# diff --git a/parm/wflow/prep.yaml b/parm/wflow/prep.yaml new file mode 100644 index 0000000000..e22fa2dd52 --- /dev/null +++ b/parm/wflow/prep.yaml @@ -0,0 +1,70 @@ +# Settings that will run tasks needed per-experiment to create "fix +# files" for the stationary grid. + +default_task_prep: &default_task + account: '&ACCOUNT;' + attrs: + cycledefs: at_start + maxtries: '2' + envars: &default_envars + GLOBAL_VAR_DEFNS_FP: '&GLOBAL_VAR_DEFNS_FP;' + USHdir: '&USHdir;' + PDY: !cycstr "@Y@m@d" + cyc: !cycstr "@H" + subcyc: !cycstr "@M" + LOGDIR: !cycstr "&LOGDIR;" + nprocs: '{{ parent.nnodes * parent.ppn }}' + native: '{{ platform.SCHED_NATIVE_CMD }}' + nodes: '{{ nnodes }}:ppn={{ ppn }}' + nnodes: 1 + nodesize: "&NCORES_PER_NODE;" + ppn: 24 + partition: '{% if platform.get("PARTITION_DEFAULT") %}&PARTITION_DEFAULT;{% else %}None{% endif %}' + queue: '&QUEUE_DEFAULT;' + walltime: 00:20:00 + +task_make_grid: + <<: *default_task + command: '&LOAD_MODULES_RUN_TASK_FP; "make_grid" "&JOBSdir;/JREGIONAL_MAKE_GRID"' + join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' + +task_make_orog: + <<: *default_task + command: '&LOAD_MODULES_RUN_TASK_FP; "make_orog" "&JOBSdir;/JREGIONAL_MAKE_OROG"' + join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' + dependency: + or: &make_grid_satisfied + and: + taskvalid: + attrs: + task: make_grid + taskdep_make_grid: + attrs: + task: make_grid + datadep: + attrs: + age: 00:00:00:05 + text: '{{ task_make_grid.GRID_DIR }}/make_grid_task_complete.txt' + +task_make_sfc_climo: + <<: *default_task + command: '&LOAD_MODULES_RUN_TASK_FP; "make_sfc_climo" "&JOBSdir;/JREGIONAL_MAKE_SFC_CLIMO"' + join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' + nnodes: 2 + dependency: + and: + or_make_grid: + <<: *make_grid_satisfied + or_make_orog: + and: + taskvalid: + attrs: + task: make_orog + taskdep_make_orog: + attrs: + task: make_orog + datadep: + attrs: + age: 00:00:00:05 + text: '{{ task_make_orog.OROG_DIR }}/make_orog_task_complete.txt' + diff --git a/parm/wflow/verify.yaml b/parm/wflow/verify.yaml new file mode 100644 index 0000000000..325c5ab2ab --- /dev/null +++ b/parm/wflow/verify.yaml @@ -0,0 +1,235 @@ +default_task_verify: &default_task + account: '&ACCOUNT;' + attrs: + cycledefs: forecast + maxtries: '1' + envars: &default_vars + GLOBAL_VAR_DEFNS_FP: '&GLOBAL_VAR_DEFNS_FP;' + USHdir: '&USHdir;' + PDY: !cycstr '@Y@m@d' + cyc: !cycstr "@H" + subcyc: !cycstr "@M" + LOGDIR: !cycstr "&LOGDIR;" + nprocs: '{{ parent.nnodes * parent.ppn }}' + FHR: '{% for h in range(0, workflow.FCST_LEN_HRS+1) %}{{ " %02d" % h }}{% endfor %}' + SLASH_ENSMEM_SUBDIR: '&SLASH_ENSMEM_SUBDIR;' + ENSMEM_INDX: "#mem#" + join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' + memory: '{% if user.MACHINE not in ["WCOSS2", "NOAACLOUD"] %}{{ "2G" }}{% endif %}' + native: '{{ platform.SCHED_NATIVE_CMD }}' + nnodes: 1 + nodes: '{{ nnodes }}:ppn={{ ppn }}' + nodesize: '&NCORES_PER_NODE;' + partition: '{% if platform.get("PARTITION_DEFAULT") %}&PARTITION_DEFAULT;{% else %}None{% endif %}' + ppn: 1 + queue: '&QUEUE_DEFAULT;' + walltime: 00:30:00 + +task_get_obs_ccpa: + <<: *default_task + command: '&LOAD_MODULES_RUN_TASK_FP; "get_obs" "&JOBSdir;/JREGIONAL_GET_OBS_CCPA"' + envars: + <<: *default_vars + ACCUM_HH: '01' + OBS_DIR: '&CCPA_OBS_DIR;' + partition: '{% if platform.get("PARTITION_HPSS") %}&PARTITION_HPSS;{% else %}None{% endif %}' + queue: "&QUEUE_HPSS;" + walltime: 00:45:00 + +task_get_obs_mrms: + <<: *default_task + command: '&LOAD_MODULES_RUN_TASK_FP; "get_obs" "&JOBSdir;/JREGIONAL_GET_OBS_MRMS"' + envars: + <<: *default_vars + OBS_DIR: '&MRMS_OBS_DIR;' + VAR: 'REFC RETOP' + partition: '{% if platform.get("PARTITION_HPSS") %}&PARTITION_HPSS;{% else %}None{% endif %}' + queue: "&QUEUE_HPSS;" + walltime: 00:45:00 + +task_get_obs_ndas: + <<: *default_task + envars: + <<: *default_vars + OBS_DIR: '&NDAS_OBS_DIR;' + command: '&LOAD_MODULES_RUN_TASK_FP; "get_obs" "&JOBSdir;/JREGIONAL_GET_OBS_NDAS"' + queue: "&QUEUE_HPSS;" + partition: '{% if platform.get("PARTITION_HPSS") %}&PARTITION_HPSS;{% else %}None{% endif %}' + walltime: 02:00:00 + +task_run_MET_Pb2nc_obs: + <<: *default_task + attrs: + cycledefs: forecast + maxtries: '2' + command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_PB2NC_OBS"' + envars: + <<: *default_vars + VAR: SFC + ACCUM_HH: '01' + obs_or_fcst: obs + OBTYPE: NDAS + OBS_DIR: '&NDAS_OBS_DIR;' + dependency: + and: + datadep: + text: "&NDAS_OBS_DIR;" + or: + not: + taskvalid: + attrs: + task: get_obs_ndas + taskdep: + attrs: + task: get_obs_ndas + +metatask_vx_ens_member: + attrs: + mode: serial + var: + mem: '{% if global.DO_ENSEMBLE %}{% for m in range(1, global.NUM_ENS_MEMBERS+1) %}{{ "%03d "%m }}{%- endfor -%} {% else %}{{ "000"|string }}{% endif %}' + metatask_GridStat_MRMS_mem#mem#: + var: + VAR: REFC RETOP + task_run_MET_GridStat_vx_#VAR#_mem#mem#: + <<: *default_task + command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_VX"' + envars: + <<: *default_vars + OBS_DIR: '&MRMS_OBS_DIR;' + VAR: '#VAR#' + ACCUM_HH: '01' + walltime: 02:00:00 + dependency: + and: + or_get_obs: # Ensure get_obs task is complete if it's turned on + not: + taskvalid: + attrs: + task: get_obs_mrms + and: + taskvalid: + attrs: + task: get_obs_mrms + taskdep: + attrs: + task: get_obs_mrms + or_do_post: &post_files_exist + and_run_post: # If post was meant to run, wait on the whole post metatask + taskvalid: + attrs: + task: run_post_mem#mem#_f000 + metataskdep: + attrs: + metatask: run_ens_post + and_inline_post: # If inline post ran, wait on the forecast task to complete + not: + taskvalid: + attrs: + task: run_post_mem#mem#_f000 + taskdep: + attrs: + task: run_fcst_mem#mem# + + metatask_PointStat_mem#mem#: + var: + VAR: SFC UPA + task_run_MET_PointStat_vx_#VAR#_mem#mem#: + <<: *default_task + command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_POINTSTAT_VX"' + envars: + <<: *default_vars + FHR: '#FCST_HR_LIST#' + OBS_DIR: '&NDAS_OBS_DIR;' + VAR: '#VAR#' + walltime: 01:00:00 + dependency: + and: + or_do_post: + <<: *post_files_exist + or_get_obs: # Get obs complete if turned on + not: + taskvalid: + attrs: + task: get_obs_ndas + and: + taskvalid: + attrs: + task: get_obs_ndas + taskdep: + attrs: + task: get_obs_ndas + +metatask_GridStat_APCP_acc: + attrs: + mode: serial + var: + ACCUM_HR: '{% for ah in [1, 3, 6, 24] %}{% if workflow.FCST_LEN_HRS >= ah %}{{ "%02d " % ah }}{% endif %}{% endfor %}' + FCST_HR_LIST: '{% for ah in [1, 3, 6, 24] %}{% if workflow.FCST_LEN_HRS >= ah %}{% for h in range(ah, workflow.FCST_LEN_HRS+1, ah) %}{{ "%02d," % h }}{% endfor %}{% endif %} {% endfor %}' + + + task_run_MET_PcpCombine_obs_APCP#ACCUM_HR#h: + <<: *default_task + attrs: + cycledefs: forecast + maxtries: '2' + command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_PCPCOMBINE"' + envars: + <<: *default_vars + VAR: APCP + ACCUM_HH: '#ACCUM_HR#' + obs_or_fcst: obs + OBTYPE: CCPA + OBS_DIR: '&CCPA_OBS_DIR;' + dependency: + or: + not: + taskvalid: + attrs: + task: get_obs_ccpa + and: + taskdep: + attrs: + task: get_obs_ccpa + datadep: + text: "&CCPA_OBS_DIR;" + + metatask_vx_ens_member_acc: + var: + mem: '{% if global.DO_ENSEMBLE %}{% for m in range(1, global.NUM_ENS_MEMBERS+1) %}{{ "%03d "%m }}{%- endfor -%} {% else %}{{ "000"|string }}{% endif %}' + + task_run_MET_PcpCombine_fcst_APCP#ACCUM_HR#h_mem#mem#: + <<: *default_task + attrs: + cycledefs: forecast + maxtries: '2' + command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_PCPCOMBINE"' + envars: + <<: *default_vars + VAR: APCP + ACCUM_HH: '#ACCUM_HR#' + obs_or_fcst: fcst + OBTYPE: CCPA + OBS_DIR: '&CCPA_OBS_DIR;' + dependency: + or_do_post: + <<: *post_files_exist + + task_run_MET_GridStat_vx_APCP#ACCUM_HR#h_mem#mem#: + <<: *default_task + command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_VX"' + envars: + <<: *default_vars + FHR: '#FCST_HR_LIST#' + OBS_DIR: '&CCPA_OBS_DIR;' + VAR: APCP + ACCUM_HH: '#ACCUM_HR#' + walltime: 02:00:00 + dependency: + and: + taskdep_obs: + attrs: + task: run_MET_PcpCombine_obs_APCP#ACCUM_HR#h + taskdep_fcst: + attrs: + task: run_MET_PcpCombine_fcst_APCP#ACCUM_HR#h_mem#mem# diff --git a/parm/wflow/verify_ensgrid.yaml b/parm/wflow/verify_ensgrid.yaml new file mode 100644 index 0000000000..bb26d85367 --- /dev/null +++ b/parm/wflow/verify_ensgrid.yaml @@ -0,0 +1,143 @@ +default_task_verify_ens: &default_task_verify_ens + account: '&ACCOUNT;' + attrs: + cycledefs: forecast + maxtries: '1' + envars: &default_vars + GLOBAL_VAR_DEFNS_FP: '&GLOBAL_VAR_DEFNS_FP;' + USHdir: '&USHdir;' + PDY: !cycstr '@Y@m@d' + cyc: !cycstr "@H" + subcyc: !cycstr "@M" + LOGDIR: !cycstr "&LOGDIR;" + FHR: '{% for h in range(0, workflow.FCST_LEN_HRS+1) %}{{ " %02d" % h }}{% endfor %}' + SLASH_ENSMEM_SUBDIR: '&SLASH_ENSMEM_SUBDIR;' + nprocs: '{{ parent.nnodes * parent.ppn }}' + join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' + memory: '{% if user.MACHINE not in ["WCOSS2", "NOAACLOUD"] %}{{ "2G" }}{% endif %}' + native: '{{ platform.SCHED_NATIVE_CMD }}' + nnodes: 1 + nodes: '{{ nnodes }}:ppn={{ ppn }}' + nodesize: '&NCORES_PER_NODE;' + partition: '{% if platform.get("PARTITION_DEFAULT") %}&PARTITION_DEFAULT;{% else %}None{% endif %}' + ppn: 1 + queue: '&QUEUE_DEFAULT;' + walltime: 01:00:00 + +metatask_EnsembleStat: + var: + VAR: REFC RETOP + task_run_MET_EnsembleStat_vx_#VAR#: + <<: *default_task_verify_ens + command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_ENSEMBLESTAT_VX_GRID"' + envars: + <<: *default_vars + ACCUM_HH: '01' + OBS_DIR: '&MRMS_OBS_DIR;' + VAR: '#VAR#' + dependency: + metataskdep: + attrs: + metatask: run_ens_post + +metatask_GridStat_ensprob: + var: + VAR: REFC RETOP + task_run_MET_GridStat_vx_ensprob_#VAR#: + <<: *default_task_verify_ens + command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_VX_ENSPROB"' + envars: + <<: *default_vars + ACCUM_HH: '01' + OBS_DIR: '&MRMS_OBS_DIR;' + VAR: '#VAR#' + dependency: + taskdep: + attrs: + task: run_MET_EnsembleStat_vx_#VAR# + +metatask_EnsembleStat_acc: + var: + ACCUM_HR: '{% for ah in [1, 3, 6, 24] %}{% if workflow.FCST_LEN_HRS >= ah %}{{ "%02d " % ah }}{% endif %}{% endfor %}' + FCST_HR_LIST: '{% for ah in [1, 3, 6, 24] %}{% if workflow.FCST_LEN_HRS >= ah %}{% for h in range(ah, workflow.FCST_LEN_HRS+1, ah) %}{{ "%02d," % h }}{% endfor %}{% endif %} {% endfor %}' + task_run_MET_EnsembleStat_vx_APCP#ACCUM_HR#h: + <<: *default_task_verify_ens + command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_ENSEMBLESTAT_VX_GRID"' + envars: + <<: *default_vars + ACCUM_HH: '#ACCUM_HR#' + FHR: '#FCST_HR_LIST#' + OBS_DIR: '&CCPA_OBS_DIR;' + VAR: APCP + dependency: + and: + or_do_post: &post_files_exist + and_run_post: # If post was meant to run, wait on the whole post metatask + taskvalid: + attrs: + task: run_post_mem001_f000 + metataskdep: + attrs: + metatask: run_ens_post + and_inline_post: # If inline post ran, wait on the forecast ensemble to complete + not: + taskvalid: + attrs: + task: run_post_mem001_f000 + metataskdep: + attrs: + metatask: run_ensemble + metataskdep: + attrs: + metatask: GridStat_APCP_acc + +metatask_ensemble_vx_stats: + var: + stat: MEAN PROB + statlc: mean prob + metatask_GridStat_ens_acc: + var: + ACCUM_HR: '{% for ah in [1, 3, 6, 24] %}{% if workflow.FCST_LEN_HRS >= ah %}{{ "%02d " % ah }}{% endif %}{% endfor %}' + FCST_HR_LIST: '{% for ah in [1, 3, 6, 24] %}{% if workflow.FCST_LEN_HRS >= ah %}{% for h in range(ah, workflow.FCST_LEN_HRS+1, ah) %}{{ "%02d," % h }}{% endfor %}{% endif %} {% endfor %}' + task_run_MET_GridStat_vx_ens#statlc#_APCP#ACCUM_HR#h: + <<: *default_task_verify_ens + command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_VX_ENS#stat#"' + envars: + <<: *default_vars + ACCUM_HH: '#ACCUM_HR#' + FHR: '#FCST_HR_LIST#' + OBS_DIR: '&CCPA_OBS_DIR;' + VAR: APCP + dependency: + taskdep: + attrs: + task: run_MET_EnsembleStat_vx_APCP#ACCUM_HR#h + metatask_PointStat_ens: + var: + VAR: SFC UPA + task_run_MET_PointStat_vx_ens#statlc#_#VAR#: + <<: *default_task_verify_ens + command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_POINTSTAT_VX_ENS#stat#"' + envars: + <<: *default_vars + OBS_DIR: '&NDAS_OBS_DIR;' + VAR: '#VAR#' + dependency: + taskdep: + attrs: + task: run_MET_EnsembleStat_vx_#VAR# + +metatask_EnsembleStat_point: + var: + VAR: SFC UPA + task_run_MET_EnsembleStat_vx_#VAR#: + <<: *default_task_verify_ens + command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_ENSEMBLESTAT_VX_POINT"' + envars: + <<: *default_vars + OBS_DIR: '&NDAS_OBS_DIR;' + VAR: '#VAR#' + dependency: + metataskdep: + attrs: + metatask: run_ens_post diff --git a/scripts/exregional_aqm_lbcs.sh b/scripts/exregional_aqm_lbcs.sh index 79dbd1663f..71f72d2045 100755 --- a/scripts/exregional_aqm_lbcs.sh +++ b/scripts/exregional_aqm_lbcs.sh @@ -94,13 +94,10 @@ cd_vrfy $DATA yyyymmdd="${PDY}" mm="${PDY:4:2}" + if [ "${FCST_LEN_HRS}" = "-1" ]; then - for i_cdate in "${!ALL_CDATES[@]}"; do - if [ "${ALL_CDATES[$i_cdate]}" = "${PDY}${cyc}" ]; then - FCST_LEN_HRS="${FCST_LEN_CYCL_ALL[$i_cdate]}" - break - fi - done + CYCLE_IDX=$(( ${cyc} / ${INCR_CYCL_FREQ} )) + FCST_LEN_HRS=${FCST_LEN_CYCL[$CYCLE_IDX]} fi LBC_SPEC_FCST_HRS=() for i_lbc in $(seq ${LBC_SPEC_INTVL_HRS} ${LBC_SPEC_INTVL_HRS} ${FCST_LEN_HRS} ); do diff --git a/scripts/exregional_bias_correction_o3.sh b/scripts/exregional_bias_correction_o3.sh index 7a40658700..e265851c49 100755 --- a/scripts/exregional_bias_correction_o3.sh +++ b/scripts/exregional_bias_correction_o3.sh @@ -107,12 +107,8 @@ if [ "${PREDEF_GRID_NAME}" = "AQM_NA_13km" ]; then fi if [ "${FCST_LEN_HRS}" = "-1" ]; then - for i_cdate in "${!ALL_CDATES[@]}"; do - if [ "${ALL_CDATES[$i_cdate]}" = "${PDY}${cyc}" ]; then - FCST_LEN_HRS="${FCST_LEN_CYCL_ALL[$i_cdate]}" - break - fi - done + CYCLE_IDX=$(( ${cyc} / ${INCR_CYCL_FREQ} )) + FCST_LEN_HRS=${FCST_LEN_CYCL[$CYCLE_IDX]} fi #----------------------------------------------------------------------------- diff --git a/scripts/exregional_bias_correction_pm25.sh b/scripts/exregional_bias_correction_pm25.sh index ca3b130154..57fc639952 100755 --- a/scripts/exregional_bias_correction_pm25.sh +++ b/scripts/exregional_bias_correction_pm25.sh @@ -105,16 +105,12 @@ yyyymm_m3=${PDYm3:0:6} if [ "${PREDEF_GRID_NAME}" = "AQM_NA_13km" ]; then id_domain=793 fi - if [ "${FCST_LEN_HRS}" = "-1" ]; then - for i_cdate in "${!ALL_CDATES[@]}"; do - if [ "${ALL_CDATES[$i_cdate]}" = "${PDY}${cyc}" ]; then - FCST_LEN_HRS="${FCST_LEN_CYCL_ALL[$i_cdate]}" - break - fi - done + CYCLE_IDX=$(( ${cyc} / ${INCR_CYCL_FREQ} )) + FCST_LEN_HRS=${FCST_LEN_CYCL[$CYCLE_IDX]} fi + #----------------------------------------------------------------------------- # STEP 1: Retrieve AIRNOW observation data #----------------------------------------------------------------------------- diff --git a/scripts/exregional_get_extrn_mdl_files.sh b/scripts/exregional_get_extrn_mdl_files.sh index ed0984b6d1..9b43bb34fe 100755 --- a/scripts/exregional_get_extrn_mdl_files.sh +++ b/scripts/exregional_get_extrn_mdl_files.sh @@ -72,12 +72,8 @@ elif [ "${ICS_OR_LBCS}" = "LBCS" ]; then file_set="fcst" first_time=$((TIME_OFFSET_HRS + LBC_SPEC_INTVL_HRS)) if [ "${FCST_LEN_HRS}" = "-1" ]; then - for i_cdate in "${!ALL_CDATES[@]}"; do - if [ "${ALL_CDATES[$i_cdate]}" = "${PDY}${cyc}" ]; then - FCST_LEN_HRS="${FCST_LEN_CYCL_ALL[$i_cdate]}" - break - fi - done + CYCLE_IDX=$(( ${cyc} / ${INCR_CYCL_FREQ} )) + FCST_LEN_HRS=${FCST_LEN_CYCL[$CYCLE_IDX]} fi last_time=$((TIME_OFFSET_HRS + FCST_LEN_HRS)) fcst_hrs="${first_time} ${last_time} ${LBC_SPEC_INTVL_HRS}" diff --git a/scripts/exregional_get_obs_ccpa.sh b/scripts/exregional_get_obs_ccpa.sh index ccb7e78297..d3f53c8d5a 100755 --- a/scripts/exregional_get_obs_ccpa.sh +++ b/scripts/exregional_get_obs_ccpa.sh @@ -8,7 +8,7 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_get_obs_ccpa" ${GLOBAL_VAR_DEFNS_FP} +source_config_for_task " " ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_get_obs_mrms.sh b/scripts/exregional_get_obs_mrms.sh index 89ea7a7a19..2f2d7afb4f 100755 --- a/scripts/exregional_get_obs_mrms.sh +++ b/scripts/exregional_get_obs_mrms.sh @@ -8,7 +8,7 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_get_obs_mrms" ${GLOBAL_VAR_DEFNS_FP} +source_config_for_task " " ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- # @@ -61,12 +61,12 @@ s_dd=`echo ${start_valid} | cut -c7-8` # day (DD) of start time s_hh=`echo ${start_valid} | cut -c9-10` # hour (HH) of start time start_valid_ut=`$DATE_UTIL -ud ''${s_yyyy}-${s_mm}-${s_dd}' UTC '${s_hh}':00:00' +%s` # convert start time to universal time -end_fcst_sec=`expr ${fcst_length} \* 3600` # convert last forecast lead hour to seconds -end_valid_ut=`expr ${start_valid_ut} + ${end_fcst_sec}` # calculate current forecast time in universal time +end_fcst_sec=$(( ${fcst_length} * 3600 )) # convert last forecast lead hour to seconds +end_valid_ut=$(( ${start_valid_ut} + ${end_fcst_sec} )) # calculate current forecast time in universal time cur_ut=${start_valid_ut} current_fcst=0 -fcst_sec=`expr ${current_fcst} \* 3600` # convert forecast lead hour to seconds +fcst_sec=$(( ${current_fcst} * 3600 )) # convert forecast lead hour to seconds while [[ ${cur_ut} -le ${end_valid_ut} ]]; do cur_time=`$DATE_UTIL -ud '1970-01-01 UTC '${cur_ut}' seconds' +%Y%m%d%H` # convert universal time to standard time @@ -155,8 +155,8 @@ Did you forget to run \"module load hpss\"?\ # Increment current_fcst=$((${current_fcst} + 1)) # hourly increment - fcst_sec=`expr ${current_fcst} \* 3600` # convert forecast lead hour to seconds - cur_ut=`expr ${start_valid_ut} + ${fcst_sec}` + fcst_sec=$(( ${current_fcst} * 3600 )) # convert forecast lead hour to seconds + cur_ut=$(( ${start_valid_ut} + ${fcst_sec} )) done # diff --git a/scripts/exregional_get_obs_ndas.sh b/scripts/exregional_get_obs_ndas.sh index 6c5f4ed31c..401013a9ce 100755 --- a/scripts/exregional_get_obs_ndas.sh +++ b/scripts/exregional_get_obs_ndas.sh @@ -8,7 +8,7 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_get_obs_ndas" ${GLOBAL_VAR_DEFNS_FP} +source_config_for_task " " ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- # @@ -26,7 +26,6 @@ source_config_for_task "task_get_obs_ndas" ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- # - # Top-level NDAS directory ndas_dir=${OBS_DIR}/.. if [[ ! -d "$ndas_dir" ]]; then @@ -58,68 +57,27 @@ fcst_length=${fhr_last} current_fcst=00 while [[ ${current_fcst} -le ${fcst_length} ]]; do - fcst_sec=`expr ${current_fcst} \* 3600` # convert forecast lead hour to seconds - yyyy=`echo ${init} | cut -c1-4` # year (YYYY) of initialization time - mm=`echo ${init} | cut -c5-6` # month (MM) of initialization time - dd=`echo ${init} | cut -c7-8` # day (DD) of initialization time - hh=`echo ${init} | cut -c9-10` # hour (HH) of initialization time + fcst_sec=$(( ${current_fcst} * 3600 )) # convert forecast lead hour to seconds + yyyy=${init:0:4} # year (YYYY) of initialization time + mm=${init:4:2} # month (MM) of initialization time + dd=${init:6:2} # day (DD) of initialization time + hh=${init:8:2} # hour (HH) of initialization time init_ut=`$DATE_UTIL -ud ''${yyyy}-${mm}-${dd}' UTC '${hh}':00:00' +%s` # convert initialization time to universal time - vdate_ut=`expr ${init_ut} + ${fcst_sec}` # calculate current forecast time in universal time + vdate_ut=$(( ${init_ut} + ${fcst_sec} )) # calculate current forecast time in universal time vdate=`$DATE_UTIL -ud '1970-01-01 UTC '${vdate_ut}' seconds' +%Y%m%d%H` # convert universal time to standard time - vyyyymmdd=`echo ${vdate} | cut -c1-8` # forecast time (YYYYMMDD) - vyyyy=`echo ${vdate} | cut -c1-4` # year (YYYY) of valid time - vmm=`echo ${vdate} | cut -c5-6` # month (MM) of valid time - vdd=`echo ${vdate} | cut -c7-8` # day (DD) of valid time - vhh=`echo ${vdate} | cut -c9-10` # forecast hour (HH) + vyyyymmdd=${vdate:0:8} # forecast time (YYYYMMDD) + vyyyy=${vdate:0:4} # year (YYYY) of valid time + vmm=${vdate:4:2} # month (MM) of valid time + vdd=${vdate:6:2} # day (DD) of valid time + vhh=${vdate:8:2} # forecast hour (HH) + vhh_noZero=$(( ${vhh} + 0 )) echo "yyyy mm dd hh= $yyyy $mm $dd $hh" echo "vyyyy vmm vdd vhh= $vyyyy $vmm $vdd $vhh" - - vdate_ut_m1h=`expr ${vdate_ut} - 3600` # calculate current forecast time in universal time - vdate_m1h=`$DATE_UTIL -ud '1970-01-01 UTC '${vdate_ut_m1h}' seconds' +%Y%m%d%H` # convert universal time to standard time - vyyyymmdd_m1h=`echo ${vdate_m1h} | cut -c1-8` # forecast time (YYYYMMDD) - vyyyy_m1h=`echo ${vdate_m1h} | cut -c1-4` # year (YYYY) of valid time - vmm_m1h=`echo ${vdate_m1h} | cut -c5-6` # month (MM) of valid time - vdd_m1h=`echo ${vdate_m1h} | cut -c7-8` # day (DD) of valid time - vhh_m1h=`echo ${vdate_m1h} | cut -c9-10` # forecast hour (HH) - - vdate_ut_m2h=`expr ${vdate_ut} - 7200` # calculate current forecast time in universal time - vdate_m2h=`$DATE_UTIL -ud '1970-01-01 UTC '${vdate_ut_m2h}' seconds' +%Y%m%d%H` # convert universal time to standard time - vyyyymmdd_m2h=`echo ${vdate_m2h} | cut -c1-8` # forecast time (YYYYMMDD) - vyyyy_m2h=`echo ${vdate_m2h} | cut -c1-4` # year (YYYY) of valid time - vmm_m2h=`echo ${vdate_m2h} | cut -c5-6` # month (MM) of valid time - vdd_m2h=`echo ${vdate_m2h} | cut -c7-8` # day (DD) of valid time - vhh_m2h=`echo ${vdate_m2h} | cut -c9-10` # forecast hour (HH) - - vdate_ut_m3h=`expr ${vdate_ut} - 10800` # calculate current forecast time in universal time - vdate_m3h=`$DATE_UTIL -ud '1970-01-01 UTC '${vdate_ut_m3h}' seconds' +%Y%m%d%H` # convert universal time to standard time - vyyyymmdd_m3h=`echo ${vdate_m3h} | cut -c1-8` # forecast time (YYYYMMDD) - vyyyy_m3h=`echo ${vdate_m3h} | cut -c1-4` # year (YYYY) of valid time - vmm_m3h=`echo ${vdate_m3h} | cut -c5-6` # month (MM) of valid time - vdd_m3h=`echo ${vdate_m3h} | cut -c7-8` # day (DD) of valid time - vhh_m3h=`echo ${vdate_m3h} | cut -c9-10` # forecast hour (HH) - - vdate_ut_m4h=`expr ${vdate_ut} - 14400` # calculate current forecast time in universal time - vdate_m4h=`$DATE_UTIL -ud '1970-01-01 UTC '${vdate_ut_m4h}' seconds' +%Y%m%d%H` # convert universal time to standard time - vyyyymmdd_m4h=`echo ${vdate_m4h} | cut -c1-8` # forecast time (YYYYMMDD) - vyyyy_m4h=`echo ${vdate_m4h} | cut -c1-4` # year (YYYY) of valid time - vmm_m4h=`echo ${vdate_m4h} | cut -c5-6` # month (MM) of valid time - vdd_m4h=`echo ${vdate_m4h} | cut -c7-8` # day (DD) of valid time - vhh_m4h=`echo ${vdate_m4h} | cut -c9-10` # forecast hour (HH) - - vdate_ut_m5h=`expr ${vdate_ut} - 18000` # calculate current forecast time in universal time - vdate_m5h=`$DATE_UTIL -ud '1970-01-01 UTC '${vdate_ut_m5h}' seconds' +%Y%m%d%H` # convert universal time to standard time - vyyyymmdd_m5h=`echo ${vdate_m5h} | cut -c1-8` # forecast time (YYYYMMDD) - vyyyy_m5h=`echo ${vdate_m5h} | cut -c1-4` # year (YYYY) of valid time - vmm_m5h=`echo ${vdate_m5h} | cut -c5-6` # month (MM) of valid time - vdd_m5h=`echo ${vdate_m5h} | cut -c7-8` # day (DD) of valid time - vhh_m5h=`echo ${vdate_m5h} | cut -c9-10` # forecast hour (HH) - - vhh_noZero=$(expr ${vhh} + 0) - -echo "vyyyymmdd_m1h vhh_m1h=$vyyyymmdd_m1h $vhh_m1h" echo "vhh_noZero=$vhh_noZero" + + # Check if file exists on disk ndas_file="$ndas_proc/prepbufr.ndas.${vyyyymmdd}${vhh}" echo "NDAS PB FILE:${ndas_file}" @@ -127,7 +85,7 @@ echo "vhh_noZero=$vhh_noZero" if [[ ! -f "${ndas_file}" ]]; then if [[ ! -d "$ndas_raw/${vyyyymmdd}${vhh}" ]]; then mkdir_vrfy -p $ndas_raw/${vyyyymmdd}${vhh} - fi + fi cd_vrfy $ndas_raw/${vyyyymmdd}${vhh} # Name of NDAS tar file on HPSS is dependent on date. Logic accounts for files from 2019 until July 2020. @@ -158,13 +116,15 @@ echo "vhh_noZero=$vhh_noZero" fi if [[ ${vhh_noZero} -eq 0 || ${vhh} -eq 6 || ${vhh} -eq 12 || ${vhh} -eq 18 ]]; then - #echo "$ndas_raw/${vyyyymmdd}${vhh}/nam.t${vhh}z.prepbufr.tm00.nr $ndas_proc/prepbufr.ndas.${vyyyymmdd}${vhh}" - cp_vrfy $ndas_raw/${vyyyymmdd}${vhh}/nam.t${vhh}z.prepbufr.tm00.nr $ndas_proc/prepbufr.ndas.${vyyyymmdd}${vhh} - cp_vrfy $ndas_raw/${vyyyymmdd}${vhh}/nam.t${vhh}z.prepbufr.tm01.nr $ndas_proc/prepbufr.ndas.${vyyyymmdd_m1h}${vhh_m1h} - cp_vrfy $ndas_raw/${vyyyymmdd}${vhh}/nam.t${vhh}z.prepbufr.tm02.nr $ndas_proc/prepbufr.ndas.${vyyyymmdd_m2h}${vhh_m2h} - cp_vrfy $ndas_raw/${vyyyymmdd}${vhh}/nam.t${vhh}z.prepbufr.tm03.nr $ndas_proc/prepbufr.ndas.${vyyyymmdd_m3h}${vhh_m3h} - cp_vrfy $ndas_raw/${vyyyymmdd}${vhh}/nam.t${vhh}z.prepbufr.tm04.nr $ndas_proc/prepbufr.ndas.${vyyyymmdd_m4h}${vhh_m4h} - cp_vrfy $ndas_raw/${vyyyymmdd}${vhh}/nam.t${vhh}z.prepbufr.tm05.nr $ndas_proc/prepbufr.ndas.${vyyyymmdd_m5h}${vhh_m5h} + # copy files from the previous 6 hours + for tm in $(seq 0 5); do + vdate_ut_tm=$(( ${vdate_ut} - $tm * 3600 )) + vdate_tm=$($DATE_UTIL -ud '1970-01-01 UTC '${vdate_ut_tm}' seconds' +%Y%m%d%H) + vyyyymmddhh_tm=${vdate_tm:0:10} + tm2=$(echo $tm | awk '{printf "%02d\n", $0;}') + + cp_vrfy $ndas_raw/${vyyyymmdd}${vhh}/nam.t${vhh}z.prepbufr.tm${tm2}.nr $ndas_proc/prepbufr.ndas.${vyyyymmddhh_tm} + done fi fi current_fcst=$((${current_fcst} + 6)) diff --git a/scripts/exregional_make_grid.sh b/scripts/exregional_make_grid.sh index eef3518056..88472a8343 100755 --- a/scripts/exregional_make_grid.sh +++ b/scripts/exregional_make_grid.sh @@ -394,11 +394,11 @@ mv_vrfy "${grid_fp_orig}" "${grid_fp}" #----------------------------------------------------------------------- # # If there are pre-existing orography or climatology files that we will -# be using (i.e. if RUN_TASK_MAKE_OROG or RUN_TASK_MAKE_SURF_CLIMO is set -# to "FALSE", in which case RES_IN_FIXLAM_FILENAMES will not be set to a -# null string), check that the grid resolution contained in the variable -# CRES set above matches the resolution appearing in the names of the -# preexisting orography and/or surface climatology files. +# be using (i.e. if task_make_grid or task_make_sfc_climo ran in the +# experiment, RES_IN_FIXLAM_FILENAMES will not be set to a null string), +# check that the grid resolution contained in the variable CRES set +# above matches the resolution appearing in the names of the preexisting +# orography and/or surface climatology files. # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_make_ics.sh b/scripts/exregional_make_ics.sh index 3fa42a8f93..25c4a03f95 100755 --- a/scripts/exregional_make_ics.sh +++ b/scripts/exregional_make_ics.sh @@ -65,8 +65,6 @@ export OMP_STACKSIZE=${OMP_STACKSIZE_MAKE_ICS} # eval ${PRE_TASK_CMDS} -nprocs=$(( NNODES_MAKE_ICS*PPN_MAKE_ICS )) - if [ -z "${RUN_CMD_UTILS:-}" ] ; then print_err_msg_exit "\ Run command was not set in machine file. \ diff --git a/scripts/exregional_make_lbcs.sh b/scripts/exregional_make_lbcs.sh index cf32af3f0b..bb43a22714 100755 --- a/scripts/exregional_make_lbcs.sh +++ b/scripts/exregional_make_lbcs.sh @@ -65,8 +65,6 @@ export OMP_STACKSIZE=${OMP_STACKSIZE_MAKE_LBCS} # eval ${PRE_TASK_CMDS} -nprocs=$(( NNODES_MAKE_LBCS*PPN_MAKE_LBCS )) - if [ -z "${RUN_CMD_UTILS:-}" ] ; then print_err_msg_exit "\ Run command was not set in machine file. \ @@ -103,12 +101,8 @@ mkdir_vrfy -p "$DATA" cd_vrfy $DATA if [ "${FCST_LEN_HRS}" = "-1" ]; then - for i_cdate in "${!ALL_CDATES[@]}"; do - if [ "${ALL_CDATES[$i_cdate]}" = "${PDY}${cyc}" ]; then - FCST_LEN_HRS="${FCST_LEN_CYCL_ALL[$i_cdate]}" - break - fi - done + CYCLE_IDX=$(( ${cyc} / ${INCR_CYCL_FREQ} )) + FCST_LEN_HRS=${FCST_LEN_CYCL[$CYCLE_IDX]} fi LBC_SPEC_FCST_HRS=() for i_lbc in $(seq ${LBC_SPEC_INTVL_HRS} ${LBC_SPEC_INTVL_HRS} $(( FCST_LEN_HRS+LBC_SPEC_INTVL_HRS )) ); do diff --git a/scripts/exregional_make_sfc_climo.sh b/scripts/exregional_make_sfc_climo.sh index ad00e307ef..d641fd6267 100755 --- a/scripts/exregional_make_sfc_climo.sh +++ b/scripts/exregional_make_sfc_climo.sh @@ -108,8 +108,6 @@ EOF # eval ${PRE_TASK_CMDS} -nprocs=$(( NNODES_MAKE_SFC_CLIMO*PPN_MAKE_SFC_CLIMO )) - if [ -z "${RUN_CMD_UTILS:-}" ] ; then print_err_msg_exit "\ Run command was not set in machine file. \ diff --git a/scripts/exregional_nexus_emission.sh b/scripts/exregional_nexus_emission.sh index e0698650ce..9d3f04b86f 100755 --- a/scripts/exregional_nexus_emission.sh +++ b/scripts/exregional_nexus_emission.sh @@ -64,8 +64,6 @@ export OMP_STACKSIZE=${OMP_STACKSIZE_NEXUS_EMISSION} set -x eval ${PRE_TASK_CMDS} -nprocs=$(( NNODES_NEXUS_EMISSION*PPN_NEXUS_EMISSION )) -ppn_run_aqm="${PPN_NEXUS_EMISSION}" omp_num_threads_run_aqm="${OMP_NUM_THREADS_NEXUS_EMISSION}" if [ -z "${RUN_CMD_AQM:-}" ] ; then @@ -136,13 +134,10 @@ hh="${cyc}" yyyymmdd="${PDY}" NUM_SPLIT_NEXUS=$( printf "%02d" ${NUM_SPLIT_NEXUS} ) + if [ "${FCST_LEN_HRS}" = "-1" ]; then - for i_cdate in "${!ALL_CDATES[@]}"; do - if [ "${ALL_CDATES[$i_cdate]}" = "${PDY}${cyc}" ]; then - FCST_LEN_HRS="${FCST_LEN_CYCL_ALL[$i_cdate]}" - break - fi - done + CYCLE_IDX=$(( ${cyc} / ${INCR_CYCL_FREQ} )) + FCST_LEN_HRS=${FCST_LEN_CYCL[$CYCLE_IDX]} fi if [ "${NUM_SPLIT_NEXUS}" = "01" ]; then diff --git a/scripts/exregional_nexus_gfs_sfc.sh b/scripts/exregional_nexus_gfs_sfc.sh index 98329ca018..adf3ef5f2d 100755 --- a/scripts/exregional_nexus_gfs_sfc.sh +++ b/scripts/exregional_nexus_gfs_sfc.sh @@ -63,13 +63,10 @@ yyyymmdd=${GFS_SFC_CDATE:0:8} yyyymm=${GFS_SFC_CDATE:0:6} yyyy=${GFS_SFC_CDATE:0:4} hh=${GFS_SFC_CDATE:8:2} + if [ "${FCST_LEN_HRS}" = "-1" ]; then - for i_cdate in "${!ALL_CDATES[@]}"; do - if [ "${ALL_CDATES[$i_cdate]}" = "${PDY}${cyc}" ]; then - FCST_LEN_HRS="${FCST_LEN_CYCL_ALL[$i_cdate]}" - break - fi - done + CYCLE_IDX=$(( ${cyc} / ${INCR_CYCL_FREQ} )) + FCST_LEN_HRS=${FCST_LEN_CYCL[$CYCLE_IDX]} fi # #----------------------------------------------------------------------- diff --git a/scripts/exregional_nexus_post_split.sh b/scripts/exregional_nexus_post_split.sh index 207169a730..29fc57e314 100755 --- a/scripts/exregional_nexus_post_split.sh +++ b/scripts/exregional_nexus_post_split.sh @@ -70,13 +70,10 @@ hh="${cyc}" yyyymmdd="${PDY}" NUM_SPLIT_NEXUS=$( printf "%02d" ${NUM_SPLIT_NEXUS} ) + if [ "${FCST_LEN_HRS}" = "-1" ]; then - for i_cdate in "${!ALL_CDATES[@]}"; do - if [ "${ALL_CDATES[$i_cdate]}" = "${PDY}${cyc}" ]; then - FCST_LEN_HRS="${FCST_LEN_CYCL_ALL[$i_cdate]}" - break - fi - done + CYCLE_IDX=$(( ${cyc} / ${INCR_CYCL_FREQ} )) + FCST_LEN_HRS=${FCST_LEN_CYCL[$CYCLE_IDX]} fi start_date=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC" "+%Y%m%d%H" ) end_date=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC + ${FCST_LEN_HRS} hours" "+%Y%m%d%H" ) diff --git a/scripts/exregional_point_source.sh b/scripts/exregional_point_source.sh index 1b2bd54265..a5191a1647 100755 --- a/scripts/exregional_point_source.sh +++ b/scripts/exregional_point_source.sh @@ -54,12 +54,8 @@ This is the ex-script for the task that runs PT_SOURCE. eval ${PRE_TASK_CMDS} if [ "${FCST_LEN_HRS}" = "-1" ]; then - for i_cdate in "${!ALL_CDATES[@]}"; do - if [ "${ALL_CDATES[$i_cdate]}" = "${PDY}${cyc}" ]; then - FCST_LEN_HRS="${FCST_LEN_CYCL_ALL[$i_cdate]}" - break - fi - done + CYCLE_IDX=$(( ${cyc} / ${INCR_CYCL_FREQ} )) + FCST_LEN_HRS=${FCST_LEN_CYCL[$CYCLE_IDX]} fi nstep=$(( FCST_LEN_HRS+1 )) yyyymmddhh="${PDY}${cyc}" diff --git a/scripts/exregional_post_stat_o3.sh b/scripts/exregional_post_stat_o3.sh index dd1e02473b..402f2a064e 100755 --- a/scripts/exregional_post_stat_o3.sh +++ b/scripts/exregional_post_stat_o3.sh @@ -115,12 +115,8 @@ Call to executable to run AQM_POST_GRIB2 returned with nonzero exit code." POST_STEP if [ "${FCST_LEN_HRS}" = "-1" ]; then - for i_cdate in "${!ALL_CDATES[@]}"; do - if [ "${ALL_CDATES[$i_cdate]}" = "${PDY}${cyc}" ]; then - FCST_LEN_HRS="${FCST_LEN_CYCL_ALL[$i_cdate]}" - break - fi - done + CYCLE_IDX=$(( ${cyc} / ${INCR_CYCL_FREQ} )) + FCST_LEN_HRS=${FCST_LEN_CYCL[$CYCLE_IDX]} fi fhr=01 diff --git a/scripts/exregional_pre_post_stat.sh b/scripts/exregional_pre_post_stat.sh index a651884576..12bebfd345 100755 --- a/scripts/exregional_pre_post_stat.sh +++ b/scripts/exregional_pre_post_stat.sh @@ -65,14 +65,12 @@ mkdir_vrfy -p "$DATA" cd_vrfy $DATA if [ "${FCST_LEN_HRS}" = "-1" ]; then - for i_cdate in "${!ALL_CDATES[@]}"; do - if [ "${ALL_CDATES[$i_cdate]}" = "${PDY}${cyc}" ]; then - FCST_LEN_HRS="${FCST_LEN_CYCL_ALL[$i_cdate]}" - break - fi - done - if [ "${RUN_TASK_RUN_POST}" = "TRUE" ]; then - rm_vrfy -f "${COMIN}/${TN_RUN_POST}_${PDY}${cyc}_task_complete.txt" + CYCLE_IDX=$(( ${cyc} / ${INCR_CYCL_FREQ} )) + FCST_LEN_HRS=${FCST_LEN_CYCL[$CYCLE_IDX]} + + post_complete_file=${COMIN}/${TN_RUN_POST}_${PDY}${cyc}_task_complete.txt + if [ -f ${post_complete_file} ] ; then + rm_vrfy -f ${post_complete_file} fi fi diff --git a/scripts/exregional_run_fcst.sh b/scripts/exregional_run_fcst.sh index 6ae2447e87..7708c5d217 100755 --- a/scripts/exregional_run_fcst.sh +++ b/scripts/exregional_run_fcst.sh @@ -64,8 +64,6 @@ export OMP_STACKSIZE=${OMP_STACKSIZE_RUN_FCST} # eval ${PRE_TASK_CMDS} -nprocs=$(( NNODES_RUN_FCST*PPN_RUN_FCST )) - if [ -z "${RUN_CMD_FCST:-}" ] ; then print_err_msg_exit "\ Run command was not set in machine file. \ @@ -76,12 +74,8 @@ else fi if [ "${FCST_LEN_HRS}" = "-1" ]; then - for i_cdate in "${!ALL_CDATES[@]}"; do - if [ "${ALL_CDATES[$i_cdate]}" = "${PDY}${cyc}" ]; then - FCST_LEN_HRS="${FCST_LEN_CYCL_ALL[$i_cdate]}" - break - fi - done + CYCLE_IDX=$(( ${cyc} / ${INCR_CYCL_FREQ} )) + FCST_LEN_HRS=${FCST_LEN_CYCL[$CYCLE_IDX]} fi # @@ -113,7 +107,7 @@ cd_vrfy ${DATA}/INPUT # in this case, there isn't really an advantage to using relative symlinks, # so we use symlinks with absolute paths. # -if [ "${RUN_TASK_MAKE_GRID}" = "TRUE" ]; then +if [[ -d "${EXPTDIR}/grid" ]]; then relative_link_flag="TRUE" else relative_link_flag="FALSE" @@ -126,16 +120,6 @@ symlink="grid_spec.nc" create_symlink_to_file target="$target" symlink="$symlink" \ relative="${relative_link_flag}" -## Symlink to halo-3 grid file with "halo3" stripped from name. -#target="${FIXlam}/${CRES}${DOT_OR_USCORE}grid.tile${TILE_RGNL}.halo${NH3}.nc" -#if [ "${RUN_TASK_MAKE_SFC_CLIMO}" = "TRUE" ] && \ -# [ "${GRID_GEN_METHOD}" = "GFDLgrid" ] && \ -# [ "${GFDLgrid_USE_NUM_CELLS_IN_FILENAMES}" = "FALSE" ]; then -# symlink="C${GFDLgrid_NUM_CELLS}${DOT_OR_USCORE}grid.tile${TILE_RGNL}.nc" -#else -# symlink="${CRES}${DOT_OR_USCORE}grid.tile${TILE_RGNL}.nc" -#fi - # Symlink to halo-3 grid file with "halo3" stripped from name. mosaic_fn="grid_spec.nc" grid_fn=$( get_charvar_from_netcdf "${mosaic_fn}" "gridfiles" ) @@ -168,7 +152,7 @@ create_symlink_to_file target="$target" symlink="$symlink" \ # the orography files, use relative paths if running the TN_MAKE_OROG # task and absolute paths otherwise. # -if [ "${RUN_TASK_MAKE_OROG}" = "TRUE" ]; then +if [ -d "${EXPTDIR}/orog" ]; then relative_link_flag="TRUE" else relative_link_flag="FALSE" @@ -277,8 +261,8 @@ if [ "${CPL_AQM}" = "TRUE" ]; then relative="${relative_link_flag}" # create symlink to PT for point source in Online-CMAQ - if [ "${RUN_TASK_POINT_SOURCE}" = "TRUE" ]; then - target="${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.PT.nc" + target="${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.PT.nc" + if [ -f ${target} ]; then symlink="PT.nc" create_symlink_to_file target="$target" symlink="$symlink" \ relative="${relative_link_flag}" @@ -597,12 +581,6 @@ if [ "${CPL_AQM}" = "TRUE" ]; then mv_vrfy ${DATA}/${AQM_RC_PRODUCT_FN} ${COMOUT}/${NET}.${cycle}${dot_ensmem}.${AQM_RC_PRODUCT_FN} - if [ "${RUN_TASK_RUN_POST}" = "FALSE" ] && [ "${WRITE_DOPOST}" = "FALSE" ]; then - for fhr in $(seq -f "%03g" 0 ${FCST_LEN_HRS}); do - mv_vrfy ${DATA}/dynf${fhr}.nc ${COMIN}/${NET}.${cycle}${dot_ensmem}.dyn.f${fhr}.nc - mv_vrfy ${DATA}/phyf${fhr}.nc ${COMIN}/${NET}.${cycle}${dot_ensmem}.phy.f${fhr}.nc - done - fi fi # #----------------------------------------------------------------------- @@ -664,13 +642,16 @@ if [ ${WRITE_DOPOST} = "TRUE" ]; then fi done - if [ "${CPL_AQM}" = "TRUE" ]; then - mv_vrfy ${DATA}/dynf${fhr}.nc ${COMIN}/${NET}.${cycle}${dot_ensmem}.dyn.f${fhr}.nc - mv_vrfy ${DATA}/phyf${fhr}.nc ${COMIN}/${NET}.${cycle}${dot_ensmem}.phy.f${fhr}.nc - fi done fi +if [ "${CPL_AQM}" = "TRUE" ]; then + for fhr in $(seq -f "%03g" 0 ${FCST_LEN_HRS}); do + mv_vrfy ${DATA}/dynf${fhr}.nc ${COMIN}/${NET}.${cycle}${dot_ensmem}.dyn.f${fhr}.nc + mv_vrfy ${DATA}/phyf${fhr}.nc ${COMIN}/${NET}.${cycle}${dot_ensmem}.phy.f${fhr}.nc + done +fi + # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_run_met_ensemblestat_vx_grid.sh b/scripts/exregional_run_met_ensemblestat_vx_grid.sh index 75b52676af..9191bb7a0b 100755 --- a/scripts/exregional_run_met_ensemblestat_vx_grid.sh +++ b/scripts/exregional_run_met_ensemblestat_vx_grid.sh @@ -54,7 +54,7 @@ gridded data. #----------------------------------------------------------------------- # print_info_msg "$VERBOSE" "Starting ensemble-stat verification" - +set -x # #----------------------------------------------------------------------- # @@ -96,7 +96,9 @@ if [ $RUN_ENVIR = "nco" ]; then export MEM_CUSTOM= export DOT_MEM_CUSTOM=".{custom?fmt=%s}" else - export INPUT_BASE=${VX_FCST_INPUT_BASEDIR}/$CDATE + ENSMEM_INDX='ZZZ' + INPUT_BASE=$( eval echo ${VX_FCST_INPUT_DIR} ) + export INPUT_BASE=${INPUT_BASE/ZZZ/'*'} export OUTPUT_BASE=$EXPTDIR export MEM_BASE=$EXPTDIR/$CDATE export LOG_DIR=${EXPTDIR}/log diff --git a/scripts/exregional_run_met_ensemblestat_vx_point.sh b/scripts/exregional_run_met_ensemblestat_vx_point.sh index 6476848608..b1866dc621 100755 --- a/scripts/exregional_run_met_ensemblestat_vx_point.sh +++ b/scripts/exregional_run_met_ensemblestat_vx_point.sh @@ -92,7 +92,9 @@ if [ $RUN_ENVIR = "nco" ]; then export MEM_CUSTOM= export DOT_MEM_CUSTOM=".{custom?fmt=%s}" else - export INPUT_BASE=${VX_FCST_INPUT_BASEDIR}/$CDATE + ENSMEM_INDX='ZZZ' + INPUT_BASE=$( eval echo ${VX_FCST_INPUT_DIR} ) + export INPUT_BASE=${INPUT_BASE/ZZZ/'*'} export OUTPUT_BASE=$EXPTDIR export MEM_BASE=$EXPTDIR/$CDATE export LOG_DIR=${EXPTDIR}/log diff --git a/scripts/exregional_run_met_gridstat_vx.sh b/scripts/exregional_run_met_gridstat_vx.sh index 2b0216d41a..5ff8c3031f 100755 --- a/scripts/exregional_run_met_gridstat_vx.sh +++ b/scripts/exregional_run_met_gridstat_vx.sh @@ -55,6 +55,7 @@ the UPP output files by initialization time for all forecast hours. # #----------------------------------------------------------------------- # +set -x yyyymmdd=${PDY} hh=${cyc} export CDATE @@ -84,8 +85,8 @@ if [ $RUN_ENVIR = "nco" ]; then export MEM_CUSTOM= export DOT_MEM_CUSTOM=".{custom?fmt=%s}" else - export INPUT_BASE=${VX_FCST_INPUT_BASEDIR}/${CDATE}${SLASH_ENSMEM_SUBDIR_OR_NULL}/postprd - export OUTPUT_BASE=${EXPTDIR}/${CDATE}${SLASH_ENSMEM_SUBDIR_OR_NULL} + export INPUT_BASE=$( eval echo ${VX_FCST_INPUT_DIR} ) + export OUTPUT_BASE=${VX_OUTPUT_BASEDIR}/${CDATE}/mem${ENSMEM_INDX} export MEM_BASE=$EXPTDIR/$CDATE export LOG_DIR=${EXPTDIR}/log @@ -104,16 +105,11 @@ export DOT_ENSMEM=${dot_ensmem} #----------------------------------------------------------------------- # if [ ${VAR} == "APCP" ]; then - LOG_SUFFIX=GridStat_${VAR}${ACCUM_HH}h${USCORE_ENSMEM_NAME_OR_NULL}_${CDATE} + LOG_SUFFIX=GridStat_${VAR}${ACCUM_HH}h_mem${ENSMEM_INDX}_${CDATE} else - LOG_SUFFIX=GridStat_${VAR}${USCORE_ENSMEM_NAME_OR_NULL}_${CDATE} + LOG_SUFFIX=GridStat_${VAR}_mem${ENSMEM_INDX}_${CDATE} fi -#if [[ ${DO_ENSEMBLE} == "TRUE" ]]; then -# ENSMEM=`echo ${SLASH_ENSMEM_SUBDIR_OR_NULL} | cut -d"/" -f2` -# VX_FCST_MODEL_NAME=${VX_FCST_MODEL_NAME}_${ENSMEM} -#fi - # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_run_met_pb2nc_obs.sh b/scripts/exregional_run_met_pb2nc_obs.sh index 9c6eaf4709..19e2072879 100755 --- a/scripts/exregional_run_met_pb2nc_obs.sh +++ b/scripts/exregional_run_met_pb2nc_obs.sh @@ -243,7 +243,6 @@ settings="\ # Ensemble and member-specific information. # 'num_ens_members': '${NUM_ENS_MEMBERS}' - 'uscore_ensmem_name_or_null': '${USCORE_ENSMEM_NAME_OR_NULL:-}' 'time_lag': '${time_lag:-}' # # Field information. diff --git a/scripts/exregional_run_met_pcpcombine.sh b/scripts/exregional_run_met_pcpcombine.sh index b9d12abb66..cb2a7b28f3 100755 --- a/scripts/exregional_run_met_pcpcombine.sh +++ b/scripts/exregional_run_met_pcpcombine.sh @@ -150,10 +150,10 @@ if [ "${obs_or_fcst}" = "obs" ]; then elif [ "${obs_or_fcst}" = "fcst" ]; then - FCST_INPUT_DIR="${VX_FCST_INPUT_BASEDIR}" - FCST_INPUT_FN_TEMPLATE=$( eval echo ${FCST_SUBDIR_TEMPLATE}/${FCST_FN_TEMPLATE} ) + FCST_INPUT_DIR="$( eval echo ${VX_FCST_INPUT_DIR} )" + FCST_INPUT_FN_TEMPLATE=$( eval echo ${FCST_FN_TEMPLATE} ) - OUTPUT_BASE="${VX_OUTPUT_BASEDIR}/${CDATE}${SLASH_ENSMEM_SUBDIR_OR_NULL}" + OUTPUT_BASE="${VX_OUTPUT_BASEDIR}/${CDATE}/mem${ENSMEM_INDX}" OUTPUT_DIR="${OUTPUT_BASE}/metprd/${met_tool_pc}_fcst" OUTPUT_FN_TEMPLATE=$( eval echo ${FCST_FN_METPROC_TEMPLATE} ) STAGING_DIR="${OUTPUT_BASE}/stage/${FIELDNAME_IN_MET_FILEDIR_NAMES}" @@ -242,7 +242,7 @@ fi # metplus_config_tmpl_fn="${met_tool_pc}_${obs_or_fcst}" metplus_config_fn="${metplus_config_tmpl_fn}_${FIELDNAME_IN_MET_FILEDIR_NAMES}" -metplus_log_fn="${metplus_config_fn}${USCORE_ENSMEM_NAME_OR_NULL}_$CDATE" +metplus_log_fn="${metplus_config_fn}_mem${ENSMEM_INDX}_$CDATE" # # If operating on observation files, append the cycle date to the name # of the configuration file because in this case, the output files from @@ -299,7 +299,6 @@ settings="\ # Ensemble and member-specific information. # 'num_ens_members': '${NUM_ENS_MEMBERS}' - 'uscore_ensmem_name_or_null': '${USCORE_ENSMEM_NAME_OR_NULL:-}' 'time_lag': '${time_lag:-}' # # Field information. diff --git a/scripts/exregional_run_met_pointstat_vx.sh b/scripts/exregional_run_met_pointstat_vx.sh index d58e37a5bf..c852248553 100755 --- a/scripts/exregional_run_met_pointstat_vx.sh +++ b/scripts/exregional_run_met_pointstat_vx.sh @@ -64,6 +64,7 @@ the UPP output files by initialization time for all forecast hours. # #----------------------------------------------------------------------- # +set -x yyyymmdd=${PDY} hh=${cyc} export CDATE @@ -86,8 +87,8 @@ if [ $RUN_ENVIR = "nco" ]; then export MEM_CUSTOM= export DOT_MEM_CUSTOM=".{custom?fmt=%s}" else - export INPUT_BASE=${VX_FCST_INPUT_BASEDIR}/${CDATE}${SLASH_ENSMEM_SUBDIR_OR_NULL}/postprd - export OUTPUT_BASE=${VX_OUTPUT_BASEDIR}/${CDATE}${SLASH_ENSMEM_SUBDIR_OR_NULL} + export INPUT_BASE=$( eval echo ${VX_FCST_INPUT_DIR} ) + export OUTPUT_BASE=${VX_OUTPUT_BASEDIR}/${CDATE}/mem${ENSMEM_INDX} export MEM_BASE=$EXPTDIR/$CDATE export LOG_DIR=${EXPTDIR}/log @@ -106,10 +107,6 @@ export DOT_ENSMEM=${dot_ensmem} #----------------------------------------------------------------------- # LOG_SUFFIX="PointStat" -#if [[ ${DO_ENSEMBLE} == "TRUE" ]]; then -# ENSMEM=`echo ${SLASH_ENSMEM_SUBDIR_OR_NULL} | cut -d"/" -f2` -# VX_FCST_MODEL_NAME=${VX_FCST_MODEL_NAME}_${ENSMEM} -#fi # #----------------------------------------------------------------------- @@ -125,7 +122,7 @@ set_vx_fhr_list \ cdate="${CDATE}" \ fcst_len_hrs="${FCST_LEN_HRS}" \ field="$VAR" \ - accum_hh="${ACCUM_HH}" \ + accum_hh="${ACCUM_HH:-}" \ base_dir="${OBS_INPUT_DIR}" \ fn_template="${OBS_INPUT_FN_TEMPLATE}" \ check_hourly_files="FALSE" \ diff --git a/scripts/exregional_run_met_pointstat_vx_ensmean.sh b/scripts/exregional_run_met_pointstat_vx_ensmean.sh index ef814b480c..07e8965d16 100755 --- a/scripts/exregional_run_met_pointstat_vx_ensmean.sh +++ b/scripts/exregional_run_met_pointstat_vx_ensmean.sh @@ -130,7 +130,7 @@ set_vx_fhr_list \ cdate="${CDATE}" \ fcst_len_hrs="${FCST_LEN_HRS}" \ field="$VAR" \ - accum_hh="${ACCUM_HH}" \ + accum_hh="${ACCUM_HH:-}" \ base_dir="${OBS_INPUT_DIR}" \ fn_template="${OBS_INPUT_FN_TEMPLATE}" \ check_hourly_files="FALSE" \ diff --git a/scripts/exregional_run_met_pointstat_vx_ensprob.sh b/scripts/exregional_run_met_pointstat_vx_ensprob.sh index 649051ab96..b618cfe8f8 100755 --- a/scripts/exregional_run_met_pointstat_vx_ensprob.sh +++ b/scripts/exregional_run_met_pointstat_vx_ensprob.sh @@ -130,7 +130,7 @@ set_vx_fhr_list \ cdate="${CDATE}" \ fcst_len_hrs="${FCST_LEN_HRS}" \ field="$VAR" \ - accum_hh="${ACCUM_HH}" \ + accum_hh="${ACCUM_HH:-}" \ base_dir="${OBS_INPUT_DIR}" \ fn_template="${OBS_INPUT_FN_TEMPLATE}" \ check_hourly_files="FALSE" \ diff --git a/scripts/exregional_run_post.sh b/scripts/exregional_run_post.sh index 673ac861d7..de2f3edef4 100755 --- a/scripts/exregional_run_post.sh +++ b/scripts/exregional_run_post.sh @@ -64,7 +64,6 @@ export OMP_STACKSIZE=${OMP_STACKSIZE_RUN_POST} # eval ${PRE_TASK_CMDS} -nprocs=$(( NNODES_RUN_POST*PPN_RUN_POST )) if [ -z "${RUN_CMD_POST:-}" ] ; then print_err_msg_exit "\ Run command was not set in machine file. \ diff --git a/scripts/exregional_run_prdgen.sh b/scripts/exregional_run_prdgen.sh index 980a9c4b94..2632b659fd 100755 --- a/scripts/exregional_run_prdgen.sh +++ b/scripts/exregional_run_prdgen.sh @@ -63,7 +63,6 @@ export OMP_STACKSIZE=${OMP_STACKSIZE_RUN_PRDGEN} # eval ${PRE_TASK_CMDS} -nprocs=$(( NNODES_RUN_PRDGEN*PPN_RUN_PRDGEN )) if [ -z "${RUN_CMD_PRDGEN:-}" ] ; then print_err_msg_exit "\ Run command was not set in machine file. \ diff --git a/tests/WE2E/machine_suites/comprehensive b/tests/WE2E/machine_suites/comprehensive index 5a46e770f6..4fbe1e392a 100644 --- a/tests/WE2E/machine_suites/comprehensive +++ b/tests/WE2E/machine_suites/comprehensive @@ -6,7 +6,6 @@ grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_RRFS_v1beta grid_RRFS_CONUScompact_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_HRRR_suite_HRRR grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta -grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0 grid_SUBCONUS_Ind_3km_ics_HRRR_lbcs_HRRR_suite_HRRR grid_SUBCONUS_Ind_3km_ics_HRRR_lbcs_RAP_suite_WoFS_v0 grid_SUBCONUS_Ind_3km_ics_NAM_lbcs_NAM_suite_GFS_v16 @@ -44,3 +43,5 @@ grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta grid_RRFS_NA_13km_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta nco_grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 nco_grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km +grid_RRFS_NA_3km_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta +nco_ensemble diff --git a/tests/WE2E/run_WE2E_tests.py b/tests/WE2E/run_WE2E_tests.py index 6bb4de5c91..5eb0cbedf9 100755 --- a/tests/WE2E/run_WE2E_tests.py +++ b/tests/WE2E/run_WE2E_tests.py @@ -186,8 +186,7 @@ def run_we2e_tests(homedir, args) -> None: config_defaults,"lbcs") if 'verification' in test_cfg: - test_cfg['verification'] = check_task_verification(test_cfg,machine_defaults, - config_defaults) + logging.debug(test_cfg['verification']) logging.debug(f"Writing updated config.yaml for test {test_name}\n"\ "based on specified command-line arguments:\n") @@ -324,15 +323,22 @@ def check_task_get_extrn_bcs(cfg: dict, mach: dict, dflt: dict, ics_or_lbcs: str if ics_or_lbcs not in ["lbcs", "ics"]: raise ValueError("ics_or_lbcs must be set to 'lbcs' or 'ics'") - I_OR_L = ics_or_lbcs.upper() - #Make our lives easier by shortening some dictionary calls cfg_bcs = cfg[f'task_get_extrn_{ics_or_lbcs}'] - # If RUN_TASK_GET_EXTRN_* is explicitly set to false, do nothing and return - if cfg.get('workflow_switches', {}).get(f'RUN_TASK_GET_EXTRN_{I_OR_L}', True) is False: + # If the task is turned off explicitly, do nothing and return + # To turn off that task, taskgroups is included without the + # coldstart group, or task_get_extrn_{ics_or_lbcs} is included + # without a value + taskgroups = cfg.get('rocoto', {}).get('taskgroups') + if taskgroups is not None and "coldstart.yaml" not in taskgroups: + return cfg_bcs + rocoto_tasks = cfg.get('rocoto', {}).get('tasks',{}) + if rocoto_tasks.get(f"task_get_extrn_{ics_or_lbcs}", "NA") is None: return cfg_bcs + I_OR_L = ics_or_lbcs.upper() + # If USE_USER_STAGED_EXTRN_FILES not specified or false, do nothing and return if not cfg_bcs.get('USE_USER_STAGED_EXTRN_FILES'): logging.debug('USE_USER_STAGED_EXTRN_FILES not specified or False in '\ @@ -383,54 +389,6 @@ def check_task_get_extrn_bcs(cfg: dict, mach: dict, dflt: dict, ics_or_lbcs: str return cfg_bcs -def check_task_verification(cfg: dict, mach: dict, dflt: dict) -> dict: - """ - Function for checking and updating various settings in verification section of test config yaml - - Args: - cfg : Dictionary loaded from test config file - mach : Dictionary loaded from machine settings file - dflt : Dictionary loaded from default config file - Returns: - cfg_vx : Updated dictionary for verification section of test config - """ - - # Make our lives easier by shortening some dictionary calls - if 'verification' in cfg: - cfg_vx = cfg['verification'] - else: - cfg_vx = dict() - - # If VX_FCST_INPUT_BASEDIR is already explicitly set in the test configuration - # dictionary, keep that value and just return. - if 'VX_FCST_INPUT_BASEDIR' in cfg_vx: - return cfg_vx - - # Attempt to obtain the values of RUN_TASK_RUN_FCST, WRITE_DO_POST, and RUN_TASK_RUN_POST - # from the test configuration dictionary. If not available there, get them from the default - # configuration dictionary. - flags = {'RUN_TASK_RUN_FCST': False, 'WRITE_DOPOST': False, 'RUN_TASK_RUN_POST': False} - for section in ['workflow_switches', 'task_run_fcst']: - for flag in flags: - if (section in cfg) and (flag in cfg[section]): - flags[flag] = cfg[section][flag] - elif flag in dflt[section]: - flags[flag] = dflt[section][flag] - - # If UPP is going to be run (either in-line or as a separate set of tasks), set the - # VX_FCST_INPUT_BASEDIR to the default directory for the experiment. Otherwise, set - # it to the value of TEST_VX_FCST_INPUT_BASEDIR in the machine file. - if (flags['RUN_TASK_RUN_FCST'] and flags['WRITE_DOPOST']) or flags['RUN_TASK_RUN_POST']: - cfg_vx['VX_FCST_INPUT_BASEDIR'] = dflt['workflow']['EXPTDIR'] - else: - if 'TEST_VX_FCST_INPUT_BASEDIR' in mach['platform']: - cfg_vx['VX_FCST_INPUT_BASEDIR'] = mach['platform']['TEST_VX_FCST_INPUT_BASEDIR'] - else: - cfg_vx['VX_FCST_INPUT_BASEDIR'] = '' - - return cfg_vx - - def setup_logging(logfile: str = "log.run_WE2E_tests", debug: bool = False) -> None: """ Sets up logging, printing high-priority (INFO and higher) messages to screen, and printing all diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_2017_gfdlmp_regional_plot.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_2017_gfdlmp_regional_plot.yaml index 1c837100e5..927fc442f4 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_2017_gfdlmp_regional_plot.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_2017_gfdlmp_regional_plot.yaml @@ -12,8 +12,9 @@ workflow: DATE_LAST_CYCL: '2019070100' FCST_LEN_HRS: 6 PREEXISTING_DIR_METHOD: rename -workflow_switches: - RUN_TASK_PLOT_ALLVARS: true +rocoto: + tasks: + taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/plot.yaml"]|include }}' task_get_extrn_ics: EXTRN_MDL_NAME_ICS: FV3GFS USE_USER_STAGED_EXTRN_FILES: true diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_RAP_suite_HRRR.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_RAP_suite_HRRR.yaml index 53e1ff1763..2d2ee706a1 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_RAP_suite_HRRR.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_RAP_suite_HRRR.yaml @@ -15,9 +15,15 @@ workflow: DATE_LAST_CYCL: '2019061518' FCST_LEN_HRS: 6 PREEXISTING_DIR_METHOD: rename -workflow_switches: - RUN_TASK_VX_GRIDSTAT: true - RUN_TASK_VX_POINTSTAT: true +rocoto: + tasks: + taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/verify.yaml"]|include }}' + metatask_run_ensemble: + task_run_fcst_mem#mem#: + walltime: 01:00:00 + task_get_obs_ccpa: + task_get_obs_mrms: + task_get_obs_ndas: task_get_extrn_ics: EXTRN_MDL_NAME_ICS: FV3GFS FV3GFS_FILE_FMT_ICS: grib2 @@ -28,5 +34,4 @@ task_get_extrn_lbcs: EXTRN_MDL_LBCS_OFFSET_HRS: 0 USE_USER_STAGED_EXTRN_FILES: true verification: - RUN_TASKS_METVX_DET: true VX_FCST_MODEL_NAME: FV3_GFS_v15p2_CONUS_25km diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16.yaml index af33066fa0..9ec3925c59 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16.yaml @@ -14,11 +14,12 @@ workflow: DATE_LAST_CYCL: '2021051212' FCST_LEN_HRS: 6 PREEXISTING_DIR_METHOD: rename -workflow_switches: - RUN_TASK_VX_GRIDSTAT: true - RUN_TASK_VX_POINTSTAT: true - RUN_TASK_VX_ENSGRID: true - RUN_TASK_VX_ENSPOINT: true +rocoto: + tasks: + taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/verify.yaml", "parm/wflow/verify_ensgrid.yaml"]|include }}' + metatask_run_ensemble: + task_run_fcst_mem#mem#: + walltime: 01:00:00 task_get_extrn_ics: EXTRN_MDL_NAME_ICS: NAM USE_USER_STAGED_EXTRN_FILES: true @@ -30,7 +31,5 @@ global: DO_ENSEMBLE: true NUM_ENS_MEMBERS: 2 verification: - RUN_TASKS_METVX_DET: true - RUN_TASKS_METVX_ENS: true VX_FCST_MODEL_NAME: FV3_GFS_v15p2_CONUS_25km diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_NA_13km_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_NA_13km_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta.yaml index 0d0d6a847b..fa3c5ff504 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_NA_13km_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_NA_13km_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta.yaml @@ -23,15 +23,21 @@ task_get_extrn_lbcs: LBC_SPEC_INTVL_HRS: 6 USE_USER_STAGED_EXTRN_FILES: true task_make_ics: - NNODES_MAKE_ICS: 12 - PPN_MAKE_ICS: 4 OMP_STACKSIZE_MAKE_ICS: 2048m -task_make_lbcs: - NNODES_MAKE_LBCS: 12 - PPN_MAKE_LBCS: 4 - WTIME_MAKE_LBCS: 01:00:00 task_run_fcst: OMP_STACKSIZE_RUN_FCST: 2048m -task_run_post: - NNODES_RUN_POST: 6 - PPN_RUN_POST: 12 +rocoto: + tasks: + metatask_run_ensemble: + task_make_ics_mem#mem#: + nnodes: 12 + ppn: 4 + task_make_lbcs_mem#mem#: + nnodes: 12 + ppn: 4 + walltime: 01:00:00 + metatask_run_ens_post: + metatask_run_post: + task_run_post_mem#mem#_f#fhr#: + nnodes: 6 + ppn: 12 diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_NA_3km_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_NA_3km_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta.yaml index 56d1587595..459535b0c6 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_NA_3km_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_NA_3km_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta.yaml @@ -15,8 +15,6 @@ workflow: DATE_LAST_CYCL: '2019070100' FCST_LEN_HRS: 3 PREEXISTING_DIR_METHOD: rename -workflow_switches: - RUN_TASK_RUN_PRDGEN: true task_make_orog: OMP_NUM_THREADS_MAKE_OROG: 24 task_get_extrn_ics: @@ -27,20 +25,30 @@ task_get_extrn_lbcs: LBC_SPEC_INTVL_HRS: 3 USE_USER_STAGED_EXTRN_FILES: true task_make_ics: - NNODES_MAKE_ICS: 16 - PPN_MAKE_ICS: 4 OMP_STACKSIZE_MAKE_ICS: 2048m -task_make_lbcs: - NNODES_MAKE_LBCS: 12 - PPN_MAKE_LBCS: 4 - WTIME_MAKE_LBCS: 01:00:00 task_run_fcst: OMP_STACKSIZE_RUN_FCST: 2048m DT_ATMOS: 40 - WTIME_RUN_FCST: 01:00:00 -task_run_post: - NNODES_RUN_POST: 8 - PPN_RUN_POST: 12 task_run_prdgen: DO_PARALLEL_PRDGEN: true - PPN_RUN_PRDGEN: 22 +rocoto: + tasks: + taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/prdgen.yaml"]|include }}' + task_make_orog: + walltime: 01:00:00 + metatask_run_ensemble: + task_make_ics_mem#mem#: + nnodes: 16 + ppn: 4 + task_make_lbcs_mem#mem#: + nnodes: 12 + ppn: 4 + walltime: 01:00:00 + task_run_fcst_mem#mem#: + walltime: 01:00:00 + metatask_run_ens_post: + metatask_run_post: + task_run_post_mem#mem#_f#fhr#: + ppn: 12 + nnodes: 8 + diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16.yaml index 4ef3f80834..ea5d319ede 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16.yaml @@ -12,10 +12,9 @@ workflow: DATE_LAST_CYCL: '2019061500' FCST_LEN_HRS: 6 PREEXISTING_DIR_METHOD: rename -workflow_switches: - RUN_TASK_MAKE_GRID: false - RUN_TASK_MAKE_OROG: false - RUN_TASK_MAKE_SFC_CLIMO: false +rocoto: + tasks: + taskgroups: '{{ ["parm/wflow/coldstart.yaml", "parm/wflow/post.yaml"]|include }}' task_get_extrn_ics: EXTRN_MDL_NAME_ICS: FV3GFS FV3GFS_FILE_FMT_ICS: grib2 diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_timeoffset_suite_GFS_v16.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_timeoffset_suite_GFS_v16.yaml index 9709986da7..79af5461e3 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_timeoffset_suite_GFS_v16.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_timeoffset_suite_GFS_v16.yaml @@ -12,10 +12,9 @@ workflow: DATE_LAST_CYCL: '2022081012' FCST_LEN_HRS: 6 PREEXISTING_DIR_METHOD: rename -workflow_switches: - RUN_TASK_MAKE_GRID: false - RUN_TASK_MAKE_OROG: false - RUN_TASK_MAKE_SFC_CLIMO: false +rocoto: + tasks: + taskgroups: '{{ ["parm/wflow/coldstart.yaml", "parm/wflow/post.yaml"]|include }}' task_get_extrn_ics: EXTRN_MDL_NAME_ICS: FV3GFS EXTRN_MDL_ICS_OFFSET_HRS: 6 diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km.yaml index 350bac9fd2..00a36c70bd 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km.yaml @@ -12,10 +12,9 @@ workflow: DATE_LAST_CYCL: '2019061500' FCST_LEN_HRS: 6 PREEXISTING_DIR_METHOD: rename -workflow_switches: - RUN_TASK_MAKE_GRID: false - RUN_TASK_MAKE_OROG: false - RUN_TASK_MAKE_SFC_CLIMO: false +rocoto: + tasks: + taskgroups: '{{ ["parm/wflow/coldstart.yaml", "parm/wflow/post.yaml"]|include }}' task_get_extrn_ics: EXTRN_MDL_NAME_ICS: FV3GFS FV3GFS_FILE_FMT_ICS: grib2 diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_HRRR.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_HRRR.yaml index 287465aeea..be68e9d45e 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_HRRR.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_HRRR.yaml @@ -12,10 +12,9 @@ workflow: DATE_LAST_CYCL: '2020081000' FCST_LEN_HRS: 6 PREEXISTING_DIR_METHOD: rename -workflow_switches: - RUN_TASK_MAKE_GRID: false - RUN_TASK_MAKE_OROG: false - RUN_TASK_MAKE_SFC_CLIMO: false +rocoto: + tasks: + taskgroups: '{{ ["parm/wflow/coldstart.yaml", "parm/wflow/post.yaml"]|include }}' task_get_extrn_ics: EXTRN_MDL_NAME_ICS: HRRR USE_USER_STAGED_EXTRN_FILES: true diff --git a/tests/WE2E/test_configs/release_SRW_v1/config.GST_release_public_v1.yaml b/tests/WE2E/test_configs/release_SRW_v1/config.GST_release_public_v1.yaml index 72dc3e169e..80715593d4 100644 --- a/tests/WE2E/test_configs/release_SRW_v1/config.GST_release_public_v1.yaml +++ b/tests/WE2E/test_configs/release_SRW_v1/config.GST_release_public_v1.yaml @@ -20,5 +20,8 @@ task_get_extrn_lbcs: LBC_SPEC_INTVL_HRS: 6 FV3GFS_FILE_FMT_LBCS: grib2 USE_USER_STAGED_EXTRN_FILES: true -task_run_fcst: - WTIME_RUN_FCST: 01:00:00 +rocoto: + tasks: + metatask_run_ensemble: + task_run_fcst_mem#mem#: + walltime: 01:00:00 diff --git a/tests/WE2E/test_configs/verification/config.MET_ensemble_verification.yaml b/tests/WE2E/test_configs/verification/config.MET_ensemble_verification.yaml index e5004911fe..b2b8d75dda 120000 --- a/tests/WE2E/test_configs/verification/config.MET_ensemble_verification.yaml +++ b/tests/WE2E/test_configs/verification/config.MET_ensemble_verification.yaml @@ -1 +1 @@ -../grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_HRRR.yaml \ No newline at end of file +../grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16.yaml \ No newline at end of file diff --git a/tests/WE2E/test_configs/verification/config.MET_verification_only_vx.yaml b/tests/WE2E/test_configs/verification/config.MET_verification_only_vx.yaml index 87c68c8bad..44fea42c25 100644 --- a/tests/WE2E/test_configs/verification/config.MET_verification_only_vx.yaml +++ b/tests/WE2E/test_configs/verification/config.MET_verification_only_vx.yaml @@ -15,45 +15,29 @@ workflow: FCST_LEN_HRS: 6 PREEXISTING_DIR_METHOD: rename -workflow_switches: # # This test assumes that the post-processed forecast files are staged -# (i.e., not generated by running the forecast model). Thus, turn off -# pre-processing, forecast, post-processing, and other tasks ordinarily -# needed for generation of post-processed forecast files. -# - RUN_TASK_MAKE_GRID: false - RUN_TASK_MAKE_OROG: false - RUN_TASK_MAKE_SFC_CLIMO: false - RUN_TASK_GET_EXTRN_ICS: false - RUN_TASK_GET_EXTRN_LBCS: false - RUN_TASK_MAKE_ICS: false - RUN_TASK_MAKE_LBCS: false - RUN_TASK_RUN_FCST: false - RUN_TASK_RUN_POST: false -# -# This test assumes the observation files are staged. Thus, deactivate -# the GET_OBS_... tasks. Note we do not specify the obs staging directories -# (CCPA_OBS_DIR, MRMS_OBS_DIR, and NDAS_OBS_DIR) because those will be -# automatically set (in a platform-dependent way using the machine file) -# by the script that runs the WE2E tests. -# - RUN_TASK_GET_OBS_CCPA: false - RUN_TASK_GET_OBS_MRMS: false - RUN_TASK_GET_OBS_NDAS: false -# -# Turn on verification tasks. -# - RUN_TASK_VX_GRIDSTAT: true - RUN_TASK_VX_POINTSTAT: true +# (i.e., not generated by running the forecast model). +# It also assumes staged observations, so turn off those tasks. +rocoto: + tasks: + taskgroups: '{{ ["parm/wflow/verify.yaml"]|include }}' + task_get_obs_ccpa: + task_get_obs_mrms: + task_get_obs_ndas: + metatask_vx_ens_member: + metatask_GridStat_MRMS_mem#mem#: + task_run_MET_GridStat_vx_#VAR#_mem#mem#: + dependency: + metatask_PointStat_mem#mem#: + task_run_MET_PointStat_vx_#VAR#_mem#mem#: + dependency: + metatask_GridStat_APCP_acc: + metatask_vx_ens_member_acc: + task_run_MET_PcpCombine_fcst_APCP#ACCUM_HR#h_mem#mem#: + dependency: -# -# In the "verification" section below, we don't explicitly set the location -# of the staged forecast files (via VX_FCST_INPUT_BASEDIR) because this -# location gets set automatically by the script that runs the WE2E tests. -# The script sets the location in a platform-dependent way using the -# appropriate machine file. -# verification: - RUN_TASKS_METVX_DET: true VX_FCST_MODEL_NAME: FV3_GFS_v15p2_CONUS_25km + VX_FCST_INPUT_DIR: '{{ platform.get("TEST_VX_FCST_INPUT_DIR") }}' + diff --git a/tests/WE2E/test_configs/wflow_features/config.deactivate_tasks.yaml b/tests/WE2E/test_configs/wflow_features/config.deactivate_tasks.yaml index f17039df85..53d10f002a 100644 --- a/tests/WE2E/test_configs/wflow_features/config.deactivate_tasks.yaml +++ b/tests/WE2E/test_configs/wflow_features/config.deactivate_tasks.yaml @@ -25,10 +25,9 @@ workflow: DATE_LAST_CYCL: '2019070100' FCST_LEN_HRS: 6 PREEXISTING_DIR_METHOD: rename -workflow_switches: - RUN_TASK_GET_EXTRN_ICS: false - RUN_TASK_GET_EXTRN_LBCS: false - RUN_TASK_MAKE_ICS: false - RUN_TASK_MAKE_LBCS: false - RUN_TASK_RUN_FCST: false - RUN_TASK_RUN_POST: false +rocoto: + tasks: + task_get_extrn_ics: + task_get_extrn_lbcs: + metatask_run_ensemble: + metatask_run_ens_post: diff --git a/tests/WE2E/test_configs/wflow_features/config.nco_ensemble.yaml b/tests/WE2E/test_configs/wflow_features/config.nco_ensemble.yaml index d12aeb80f6..739b6bb3c5 100644 --- a/tests/WE2E/test_configs/wflow_features/config.nco_ensemble.yaml +++ b/tests/WE2E/test_configs/wflow_features/config.nco_ensemble.yaml @@ -19,10 +19,6 @@ workflow: INCR_CYCL_FREQ: 12 FCST_LEN_HRS: 6 PREEXISTING_DIR_METHOD: rename -workflow_switches: - RUN_TASK_MAKE_GRID: false - RUN_TASK_MAKE_OROG: false - RUN_TASK_MAKE_SFC_CLIMO: false task_get_extrn_ics: EXTRN_MDL_NAME_ICS: FV3GFS USE_USER_STAGED_EXTRN_FILES: true @@ -33,3 +29,6 @@ task_get_extrn_lbcs: global: DO_ENSEMBLE: true NUM_ENS_MEMBERS: 2 +rocoto: + tasks: + taskgroups: '{{ ["parm/wflow/coldstart.yaml", "parm/wflow/post.yaml"]|include }}' diff --git a/tests/WE2E/test_configs/wflow_features/config.pregen_grid_orog_sfc_climo.yaml b/tests/WE2E/test_configs/wflow_features/config.pregen_grid_orog_sfc_climo.yaml index 8ccf1a4bd6..e3e6a794e3 100644 --- a/tests/WE2E/test_configs/wflow_features/config.pregen_grid_orog_sfc_climo.yaml +++ b/tests/WE2E/test_configs/wflow_features/config.pregen_grid_orog_sfc_climo.yaml @@ -11,10 +11,6 @@ workflow: DATE_LAST_CYCL: '2019070100' FCST_LEN_HRS: 6 PREEXISTING_DIR_METHOD: rename -workflow_switches: - RUN_TASK_MAKE_GRID: false - RUN_TASK_MAKE_OROG: false - RUN_TASK_MAKE_SFC_CLIMO: false task_get_extrn_ics: EXTRN_MDL_NAME_ICS: FV3GFS USE_USER_STAGED_EXTRN_FILES: true @@ -22,3 +18,8 @@ task_get_extrn_lbcs: EXTRN_MDL_NAME_LBCS: FV3GFS LBC_SPEC_INTVL_HRS: 3 USE_USER_STAGED_EXTRN_FILES: true +rocoto: + tasks: + task_make_grid: + task_make_orog: + task_make_sfc_climo: diff --git a/ush/config.aqm.community.yaml b/ush/config.aqm.community.yaml index 173cee64f9..a6dd2f22be 100644 --- a/ush/config.aqm.community.yaml +++ b/ush/config.aqm.community.yaml @@ -5,7 +5,7 @@ user: MACHINE: [hera or wcoss2] ACCOUNT: [account name] workflow: - USE_CRON_TO_RELAUNCH: true + USE_CRON_TO_RELAUNCH: false CRON_RELAUNCH_INTVL_MNTS: 3 EXPT_SUBDIR: aqm_community_aqmna13 PREDEF_GRID_NAME: AQM_NA_13km @@ -25,22 +25,16 @@ workflow: DO_REAL_TIME: false nco: NET: aqm -workflow_switches: - RUN_TASK_MAKE_GRID: true - RUN_TASK_MAKE_OROG: true - RUN_TASK_MAKE_SFC_CLIMO: true - RUN_TASK_RUN_POST: true - RUN_TASK_AQM_ICS: true - RUN_TASK_AQM_LBCS: true - RUN_TASK_NEXUS_GFS_SFC: true - RUN_TASK_NEXUS_EMISSION: true - RUN_TASK_FIRE_EMISSION: true - RUN_TASK_POINT_SOURCE: true - RUN_TASK_PRE_POST_STAT: true - RUN_TASK_POST_STAT_O3: false - RUN_TASK_POST_STAT_PM25: false - RUN_TASK_BIAS_CORRECTION_O3: false - RUN_TASK_BIAS_CORRECTION_PM25: false +rocoto: + tasks: + taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/aqm_all.yaml"]|include }}' + task_post_stat_o3: + task_post_stat_pm25: + task_bias_correction_o3: + task_bias_correction_pm25: + metatask_run_ensemble: + task_run_fcst_mem#mem#: + walltime: 02:00:00 task_get_extrn_ics: EXTRN_MDL_NAME_ICS: FV3GFS FV3GFS_FILE_FMT_ICS: netcdf @@ -56,7 +50,6 @@ task_run_fcst: LAYOUT_Y: 34 BLOCKSIZE: 16 RESTART_INTERVAL: 6 - WTIME_RUN_FCST: 02:00:00 QUILTING: true PRINT_ESMF: false task_run_post: diff --git a/ush/config.aqm.nco.realtime.yaml b/ush/config.aqm.nco.realtime.yaml index 0d6cee3cb4..55043cad4a 100644 --- a/ush/config.aqm.nco.realtime.yaml +++ b/ush/config.aqm.nco.realtime.yaml @@ -5,7 +5,7 @@ user: MACHINE: wcoss2 ACCOUNT: [account name] workflow: - USE_CRON_TO_RELAUNCH: true + USE_CRON_TO_RELAUNCH: false CRON_RELAUNCH_INTVL_MNTS: 3 EXPT_SUBDIR: aqm_nco_aqmna13km PREDEF_GRID_NAME: AQM_NA_13km @@ -33,22 +33,16 @@ nco: model_ver: v7.0 RUN: aqm_nco_aqmna13km OPSROOT: /path/to/custom/opsroot -workflow_switches: - RUN_TASK_MAKE_GRID: false - RUN_TASK_MAKE_OROG: false - RUN_TASK_MAKE_SFC_CLIMO: false - RUN_TASK_RUN_POST: true - RUN_TASK_AQM_ICS: true - RUN_TASK_AQM_LBCS: true - RUN_TASK_NEXUS_GFS_SFC: true - RUN_TASK_NEXUS_EMISSION: true - RUN_TASK_FIRE_EMISSION: true - RUN_TASK_POINT_SOURCE: true - RUN_TASK_PRE_POST_STAT: true - RUN_TASK_POST_STAT_O3: true - RUN_TASK_POST_STAT_PM25: true - RUN_TASK_BIAS_CORRECTION_O3: true - RUN_TASK_BIAS_CORRECTION_PM25: true +rocoto: + taskgroups: '{{ ["parm/wflow/all_aqm.yaml", "parm/wflow/coldstart.yaml"]|include }}' + tasks: + task_get_extrn_lbcs: + walltime: 02:00:00 + metatask_run_ensemble: + task_run_fcst_mem#mem#: + walltime: 04:00:00 + task_aqm_lbcs: + walltime: 01:00:00 task_make_grid: GRID_DIR: /lfs/h2/emc/lam/noscrub/RRFS_CMAQ/DOMAIN_DATA/AQM_NA_13km task_make_orog: @@ -64,21 +58,17 @@ task_get_extrn_lbcs: LBC_SPEC_INTVL_HRS: 6 FV3GFS_FILE_FMT_LBCS: netcdf EXTRN_MDL_LBCS_OFFSET_HRS: 0 - WTIME_GET_EXTRN_LBCS: 02:00:00 task_run_fcst: DT_ATMOS: 180 LAYOUT_X: 50 LAYOUT_Y: 34 BLOCKSIZE: 16 RESTART_INTERVAL: 6 18 - WTIME_RUN_FCST: 04:00:00 OMP_NUM_THREADS_RUN_FCST: 1 QUILTING: true PRINT_ESMF: false task_run_post: POST_OUTPUT_DOMAIN_NAME: 793 -task_aqm_lbcs: - WTIME_AQM_LBCS: 01:00:00 global: DO_ENSEMBLE: false NUM_ENS_MEMBERS: 2 diff --git a/ush/config.community.yaml b/ush/config.community.yaml index 0ac4ec9d77..565f8ec613 100644 --- a/ush/config.community.yaml +++ b/ush/config.community.yaml @@ -22,19 +22,6 @@ workflow: PREEXISTING_DIR_METHOD: rename VERBOSE: true COMPILER: intel -workflow_switches: - RUN_TASK_MAKE_GRID: true - RUN_TASK_MAKE_OROG: true - RUN_TASK_MAKE_SFC_CLIMO: true - RUN_TASK_RUN_PRDGEN: false - RUN_TASK_GET_OBS_CCPA: false - RUN_TASK_GET_OBS_MRMS: false - RUN_TASK_GET_OBS_NDAS: false - RUN_TASK_VX_GRIDSTAT: false - RUN_TASK_VX_POINTSTAT: false - RUN_TASK_VX_ENSGRID: false - RUN_TASK_VX_ENSPOINT: false - RUN_TASK_PLOT_ALLVARS: false task_get_extrn_ics: EXTRN_MDL_NAME_ICS: FV3GFS FV3GFS_FILE_FMT_ICS: grib2 @@ -43,7 +30,6 @@ task_get_extrn_lbcs: LBC_SPEC_INTVL_HRS: 6 FV3GFS_FILE_FMT_LBCS: grib2 task_run_fcst: - WTIME_RUN_FCST: 02:00:00 QUILTING: true task_plot_allvars: COMOUT_REF: "" @@ -52,3 +38,8 @@ global: NUM_ENS_MEMBERS: 2 verification: VX_FCST_MODEL_NAME: FV3_GFS_v16_CONUS_25km +rocoto: + tasks: + metatask_run_ensemble: + task_run_fcst_mem#mem#: + walltime: 02:00:00 diff --git a/ush/config.nco.yaml b/ush/config.nco.yaml index 7955c00255..0019cf989c 100644 --- a/ush/config.nco.yaml +++ b/ush/config.nco.yaml @@ -16,12 +16,6 @@ workflow: PREEXISTING_DIR_METHOD: rename VERBOSE: true COMPILER: intel -workflow_switches: - RUN_TASK_MAKE_GRID: false - RUN_TASK_MAKE_OROG: false - RUN_TASK_MAKE_SFC_CLIMO: false - RUN_TASK_RUN_PRDGEN: false - RUN_TASK_PLOT_ALLVARS: false nco: NET: rrfs model_ver: v1.0 @@ -34,10 +28,15 @@ task_get_extrn_lbcs: LBC_SPEC_INTVL_HRS: 3 FV3GFS_FILE_FMT_LBCS: grib2 task_run_fcst: - WTIME_RUN_FCST: 01:00:00 WRITE_DOPOST: true QUILTING: true task_plot_allvars: COMOUT_REF: "" task_run_post: POST_OUTPUT_DOMAIN_NAME: conus_25km +rocoto: + tasks: + taskgroups: '{{ ["parm/wflow/coldstart.yaml", "parm/wflow/post.yaml"]|include }}' + metatask_run_ensemble: + task_run_fcst_mem#mem#: + walltime: 01:00:00 diff --git a/ush/config_defaults.yaml b/ush/config_defaults.yaml index de52981d03..559cf64625 100644 --- a/ush/config_defaults.yaml +++ b/ush/config_defaults.yaml @@ -159,6 +159,10 @@ platform: # If this is not set or set to an empty string, it will be (re)set to a # machine-dependent value. # + # REMOVE_MEMORY: + # Boolean flag determining whether to remove the memory flag for the + # Rocoto XML. Some platforms are not configured to accept the memory + # flag, so must not be included at all in the XML. #----------------------------------------------------------------------- # WORKFLOW_MANAGER: "" @@ -175,6 +179,7 @@ platform: QUEUE_HPSS: "" PARTITION_FCST: "" QUEUE_FCST: "" + REMOVE_MEMORY: False # #----------------------------------------------------------------------- # @@ -242,78 +247,88 @@ platform: # # CCPA_OBS_DIR: # User-specified location of top-level directory where CCPA hourly - # precipitation files used by METplus are located. This parameter needs - # to be set for both user-provided observations and for observations - # that are retrieved from the NOAA HPSS (if the user has access) via - # the TN_GET_OBS_CCPA task (activated in workflow by setting - # RUN_TASK_GET_OBS_CCPA=true). In the case of pulling observations - # directly from NOAA HPSS, the data retrieved will be placed in this - # directory. Please note, this path must be defind as - # /full-path-to-obs/ccpa/proc. METplus is configured to verify 01-, - # 03-, 06-, and 24-h accumulated precipitation using hourly CCPA files. - # METplus configuration files require the use of predetermined directory - # structure and file names. Therefore, if the CCPA files are user - # provided, they need to follow the anticipated naming structure: - # {YYYYMMDD}/ccpa.t{HH}z.01h.hrap.conus.gb2, where YYYY is the 4-digit - # valid year, MM the 2-digit valid month, DD the 2-digit valid day of - # the month, and HH the 2-digit valid hour of the day. In addition, a - # caveat is noted for using hourly CCPA data. There is a problem with - # the valid time in the metadata for files valid from 19 - 00 UTC (or - # files under the '00' directory). The script to pull the CCPA data - # from the NOAA HPSS has an example of how to account for this as well - # as organizing the data into a more intuitive format: - # scripts/exregional_get_ccpa_files.sh. When a fix is provided, it will - # be accounted for in the exregional_get_ccpa_files.sh script. + # precipitation files used by METplus are located. This parameter + # needs to be set for both user-provided observations and for + # observations that are retrieved from the NOAA HPSS (if the user has + # access) via the get_obs_ccpa task (activated in workflow by + # including the parm/wflow/verify.yaml task set in the wflow config). + # In the case of pulling observations directly from NOAA HPSS, the + # data retrieved will be placed in this directory. Please note, this + # path must be defind as /full-path-to-obs/ccpa/proc. METplus is + # configured to verify 01-, 03-, 06-, and 24-h accumulated + # precipitation using hourly CCPA files. + # METplus configuration files require the use of predetermined + # directory structure and file names. Therefore, if the CCPA files are + # user provided, they need to follow the anticipated naming structure: + # + # {YYYYMMDD}/ccpa.t{HH}z.01h.hrap.conus.gb2, + # + # where YYYY is the 4-digit valid year, MM the 2-digit valid month, DD + # the 2-digit valid day of the month, and HH the 2-digit valid hour of + # the day. In addition, a caveat is noted for using hourly CCPA data. + # There is a problem with the valid time in the metadata for files + # valid from 19 - 00 UTC (or files under the '00' directory). The + # script to pull the CCPA data from the NOAA HPSS has an example of + # how to account for this as well as organizing the data into a more + # intuitive format: scripts/exregional_get_ccpa_files.sh. When a fix + # is provided, it will be accounted for in the + # exregional_get_ccpa_files.sh script. # # MRMS_OBS_DIR: # User-specified location of top-level directory where MRMS composite - # reflectivity files used by METplus are located. This parameter needs - # to be set for both user-provided observations and for observations - # that are retrieved from the NOAA HPSS (if the user has access) via the - # TN_GET_OBS_MRMS task (activated in workflow by setting - # RUN_TASK_GET_OBS_MRMS=true). In the case of pulling observations - # directly from NOAA HPSS, the data retrieved will be placed in this - # directory. Please note, this path must be defind as - # /full-path-to-obs/mrms/proc. METplus configuration files require the - # use of predetermined directory structure and file names. Therefore, if - # the MRMS files are user provided, they need to follow the anticipated - # naming structure: + # reflectivity files used by METplus are located. This parameter + # needs to be set for both user-provided observations and for + # observations that are retrieved from the NOAA HPSS (if the user has + # access) via the get_obs_mrms task (activated in workflow by + # including the parm/wflow/verify.yaml task set in the wflow config). + # In the case of pulling observations directly from NOAA HPSS, the + # data retrieved will be placed in this directory. Please note, this + # path must be defind as /full-path-to-obs/mrms/proc. + # METplus configuration files require the use of predetermined + # directory structure and file names. Therefore, if the MRMS files are + # user provided, they need to follow the anticipated naming structure: + # # {YYYYMMDD}/MergedReflectivityQCComposite_00.50_{YYYYMMDD}-{HH}{mm}{SS}.grib2, - # where YYYY is the 4-digit valid year, MM the 2-digit valid month, DD - # the 2-digit valid day of the month, HH the 2-digit valid hour of the - # day, mm the 2-digit valid minutes of the hour, and SS is the two-digit - # valid seconds of the hour. In addition, METplus is configured to look - # for a MRMS composite reflectivity file for the valid time of the - # forecast being verified; since MRMS composite reflectivity files do - # not always exactly match the valid time, a script, within the main - # script to retrieve MRMS data from the NOAA HPSS, is used to identify - # and rename the MRMS composite reflectivity file to match the valid - # time of the forecast. The script to pull the MRMS data from the NOAA - # HPSS has an example of the expected file naming structure: - # scripts/exregional_get_mrms_files.sh. This script calls the script - # used to identify the MRMS file closest to the valid time: - # ush/mrms_pull_topofhour.py. + # + # where YYYY is the 4-digit valid year, MM the 2-digit valid month, DD + # the 2-digit valid day of the month, HH the 2-digit valid hour of the + # day, mm the 2-digit valid minutes of the hour, and SS is the + # two-digit valid seconds of the hour. In addition, METplus is + # configured to look for a MRMS composite reflectivity file for the + # valid time of the forecast being verified; since MRMS composite + # reflectivity files do not always exactly match the valid time, a + # script, within the main script to retrieve MRMS data from the NOAA + # HPSS, is used to identify and rename the MRMS composite reflectivity + # file to match the valid time of the forecast. The script to pull + # the MRMS data from the NOAA HPSS has an example of the expected file + # naming structure: scripts/exregional_get_mrms_files.sh. This script + # calls the script used to identify the MRMS file closest to the valid + # time: ush/mrms_pull_topofhour.py. # # NDAS_OBS_DIR: # User-specified location of top-level directory where NDAS prepbufr # files used by METplus are located. This parameter needs to be set for # both user-provided observations and for observations that are # retrieved from the NOAA HPSS (if the user has access) via the - # TN_GET_OBS_NDAS task (activated in workflow by setting  - # RUN_TASK_GET_OBS_NDAS=true). In the case of pulling observations - # directly from NOAA HPSS, the data retrieved will be placed in this - # directory. Please note, this path must be defind as - # /full-path-to-obs/ndas/proc. METplus is configured to verify - # near-surface variables hourly and upper-air variables at times valid - # at 00 and 12 UTC with NDAS prepbufr files. METplus configuration files - # require the use of predetermined file names. Therefore, if the NDAS - # files are user provided, they need to follow the anticipated naming - # structure: prepbufr.ndas.{YYYYMMDDHH}, where YYYY is the 4-digit valid - # year, MM the 2-digit valid month, DD the 2-digit valid day of the - # month, and HH the 2-digit valid hour of the day. The script to pull - # the NDAS data from the NOAA HPSS has an example of how to rename the - # NDAS data into a more intuitive format with the valid time listed in - # the file name: scripts/exregional_get_ndas_files.sh + # get_obs_ndas task (activated in workflow by includeing the + # parm/wflow/verify.yaml task set in the wflow config).  + # In the case of pulling observations directly from NOAA HPSS, the + # data retrieved will be placed in this directory. Please note, this + # path must be defind as /full-path-to-obs/ndas/proc. METplus is + # configured to verify near-surface variables hourly and upper-air + # variables at times valid at 00 and 12 UTC with NDAS prepbufr files. + # METplus configuration files require the use of predetermined file + # names. Therefore, if the NDAS files are user provided, they need to + # follow the anticipated naming structure: + # + # prepbufr.ndas.{YYYYMMDDHH}, + # + # where YYYY is the 4-digit valid year, MM the 2-digit valid month, DD + # the 2-digit valid day of the month, and HH the 2-digit valid hour of + # the day. The script to pull the NDAS data from the NOAA HPSS has an + # example of how to rename the NDAS data into a more intuitive format + # with the valid time listed in the file name: + # scripts/exregional_get_ndas_files.sh # #----------------------------------------------------------------------- # @@ -432,7 +447,7 @@ workflow: # #----------------------------------------------------------------------- # - WORKFLOW_ID: "" + WORKFLOW_ID: !nowtimestamp '' # #----------------------------------------------------------------------- # @@ -586,12 +601,16 @@ workflow: # to each workflow task) in order to make all the experiment variables # available in those scripts. # + # ROCOTO_YAML_FN: + # Name of the YAML file containing the YAML workflow definition from + # which the Rocoto XML file is created. + # # EXTRN_MDL_VAR_DEFNS_FN: # Name of file (a shell script) containing the defintions of variables # associated with the external model from which ICs or LBCs are generated. This - # file is created by the TN_GET_EXTRN_* task because the values of the variables + # file is created by the get_extrn_* task because the values of the variables # it contains are not known before this task runs. The file is then sourced by - # the TN_MAKE_ICS and TN_MAKE_LBCS tasks. + # the make_ics and make_lbcs tasks. # # WFLOW_LAUNCH_SCRIPT_FN: # Name of the script that can be used to (re)launch the experiment's rocoto @@ -643,11 +662,13 @@ workflow: FCST_MODEL: "ufs-weather-model" WFLOW_XML_FN: "FV3LAM_wflow.xml" GLOBAL_VAR_DEFNS_FN: "var_defns.sh" + ROCOTO_YAML_FN: "rocoto_defns.yaml" EXTRN_MDL_VAR_DEFNS_FN: "extrn_mdl_var_defns" WFLOW_LAUNCH_SCRIPT_FN: "launch_FV3LAM_wflow.sh" WFLOW_LAUNCH_LOG_FN: "log.launch_FV3LAM_wflow" GLOBAL_VAR_DEFNS_FP: '{{ [EXPTDIR, GLOBAL_VAR_DEFNS_FN] |path_join }}' + ROCOTO_YAML_FP: '{{ [EXPTDIR, ROCOTO_YAML_FN] |path_join }}' WFLOW_LAUNCH_SCRIPT_FP: '{{ [user.USHdir, WFLOW_LAUNCH_SCRIPT_FN] |path_join }}' WFLOW_LAUNCH_LOG_FP: '{{ [EXPTDIR, WFLOW_LAUNCH_LOG_FN] |path_join }}' # @@ -680,7 +701,7 @@ workflow: # #----------------------------------------------------------------------- # - FIXdir: '{{ EXPTDIR if workflow_switches.RUN_TASK_MAKE_GRID else [user.HOMEdir, "fix"]|path_join }}' + FIXdir: '{{ EXPTDIR if rocoto.tasks.get("task_make_grid") else [user.HOMEdir, "fix"]|path_join }}' FIXam: '{{ [FIXdir, "fix_am"]|path_join }}' FIXclim: '{{ [FIXdir, "fix_clim"]|path_join }}' FIXlam: '{{ [FIXdir, "fix_lam"]|path_join }}' @@ -810,12 +831,22 @@ workflow: # Default is 24, which means cycle_freq=24:00:00 # # FCST_LEN_HRS: - # The length of each forecast, in integer hours. + # The length of each forecast, in integer hours. The short forecast + # length when there are different lengths. + # + # LONG_FCST_LEN_HRS: + # The length of the longer forecast in integer hours in a system that + # varies the length of the forecast by time of day forecasts for a + # shorter period. There is no need for the user to update this value + # directly, as it is derived from FCST_LEN_CYCL when FCST_LEN_HRS=-1 # # FCST_LEN_CYCL: # The length of forecast for each cycle date in integer hours. # This is valid only when FCST_LEN_HRS = -1. # This pattern is recurred for all cycle dates. + # Must have the same number of entries as cycles per day, or if less + # than one day the entries must include the length of each cycle to be + # run. # #----------------------------------------------------------------------- # @@ -828,6 +859,7 @@ workflow: - 12 - 12 - 6 + LONG_FCST_LEN: '{% if FCST_LEN_HRS < 0 %}{{ FCST_LEN_CYCL|min }}{% else %}{{ FCST_LEN_HRS }}{% endif %}' # #----------------------------------------------------------------------- @@ -963,7 +995,7 @@ nco: PACKAGEROOT: '{{ OPSROOT }}/packages' DATAROOT: '{{ OPSROOT }}/tmp' DCOMROOT: '{{ OPSROOT }}/dcom' - LOGBASEDIR: '{{ OPSROOT }}/output' + LOGBASEDIR: '{% if user.RUN_ENVIR == "nco" %}{{ [OPSROOT, "output"]|path_join }}{% else %}{{ [workflow.EXPTDIR, "log"]|path_join }}{% endif %}' EXTROOT: '{{ OPSROOT }}/ext' COMIN_BASEDIR: '{{ COMROOT }}/{{ NET }}/{{ model_ver }}' COMOUT_BASEDIR: '{{ COMROOT }}/{{ NET }}/{{ model_ver }}' @@ -985,176 +1017,20 @@ nco: MAILTO: "" MAILCC: "" - -#---------------------------- -# WORKFLOW SWITCHES config parameters -#----------------------------- -workflow_switches: - # - #----------------------------------------------------------------------- - # - # Set flags (and related directories) that determine whether various - # workflow tasks should be run. Note that the TN_MAKE_GRID, TN_MAKE_OROG, - # and TN_MAKE_SFC_CLIMO are all cycle-independent tasks, i.e. if they - # are to be run, they do so only once at the beginning of the workflow - # before any cycles are run. Definitions: - # - # RUN_TASK_MAKE_GRID: - # Flag that determines whether the TN_MAKE_GRID task is to be run. If - # this is set to true, the grid generation task is run and new grid - # files are generated. If it is set to false, then the scripts look - # for pregenerated grid files in the directory specified by GRID_DIR - # (see below). - # - # RUN_TASK_MAKE_OROG: - # Same as RUN_TASK_MAKE_GRID but for the TN_MAKE_OROG task. - # - # RUN_TASK_MAKE_SFC_CLIMO: - # Same as RUN_TASK_MAKE_GRID but for the TN_MAKE_SFC_CLIMO task. - # - # RUN_TASK_GET_EXTRN_ICS: - # Flag that determines whether the TN_GET_EXTRN_ICS task is to be run. - # - # RUN_TASK_GET_EXTRN_LBCS: - # Flag that determines whether the TN_GET_EXTRN_LBCS task is to be run. - # - # RUN_TASK_MAKE_ICS: - # Flag that determines whether the TN_MAKE_ICS task is to be run. - # - # RUN_TASK_MAKE_LBCS: - # Flag that determines whether the TN_MAKE_LBCS task is to be run. - # - # RUN_TASK_RUN_FCST: - # Flag that determines whether the TN_RUN_FCST task is to be run. - # - # RUN_TASK_RUN_POST: - # Flag that determines whether the TN_RUN_POST task is to be run. - # - # RUN_TASK_RUN_PRDGEN: - # Flag that determines whether the TN_RUN_PRDGEN task is to be run. - # - # RUN_TASK_GET_OBS_CCPA: - # Flag that determines whether to run the TN_GET_OBS_CCPA task, which - # retrieves the CCPA hourly precipitation files used by METplus from NOAA HPSS. - # - # RUN_TASK_GET_OBS_MRMS: - # Flag that determines whether to run the TN_GET_OBS_MRMS task, which - # retrieves the MRMS composite reflectivity files used by METplus from NOAA HPSS. - # - # RUN_TASK_GET_OBS_NDAS: - # Flag that determines whether to run the TN_GET_OBS_NDAS task, which - # retrieves the NDAS PrepBufr files used by METplus from NOAA HPSS. - # - # RUN_TASK_VX_GRIDSTAT: - # Flag that determines whether the grid-stat verification task is to be - # run. - # - # RUN_TASK_VX_POINTSTAT: - # Flag that determines whether the point-stat verification task is to be - # run. - # - # RUN_TASK_VX_ENSGRID: - # Flag that determines whether the ensemble-stat verification for gridded - # data task is to be run. - # - # RUN_TASK_VX_ENSPOINT: - # Flag that determines whether the ensemble point verification task is - # to be run. If this flag is set, both ensemble-stat point verification - # and point verification of ensemble-stat output is computed. - # - # RUN_TASK_PLOT_ALLVARS: - # Flag that determines whether to run python plotting scripts - # - # RUN_TASK_AQM_ICS: - # Flag that determines whether the TN_AQM_ICS task is to be run for air quality modeling. - # - # RUN_TASK_AQM_LBCS: - # Flag that determines whether the TN_AQM_LBCS task is to be run for air quality modeling. - # - # RUN_TASK_NEXUS_GFS_SFC: - # Flag that determines whether the TN_NEXUS_GFS_SFC task is to be run for air quality modeling. - # - # RUN_TASK_NEXUS_EMISSION: - # Flag that determines whether the TN_NEXUS_EMISSION task is to be run for air quality modeling. - # - # RUN_TASK_FIRE_EMISSION: - # Flag that determines whether the TN_FIRE_EMISSION task is to be run for air quality modeling. - # - # RUN_TASK_POINT_SOURCE: - # Flag that determines whether the TN_POINT_SOURCE task is to be run for air quality modeling. - # - # RUN_TASK_PRE_POST_STAT: - # Flag that determines whether the TN_PRE_POST_STAT task is to be run for air quality modeling. - # - # RUN_TASK_POST_STAT_O3: - # Flag that determines whether the TN_POST_STAT_O3 task is to be run for air quality modeling. - # - # RUN_TASK_POST_STAT_PM25: - # Flag that determines whether the TN_POST_STAT_PM25 task is to be run for air quality modeling. - # - # RUN_TASK_BIAS_CORRECTION_O3: - # Flag that determines whether the TN_BIAS_CORRECTION_O3 task is to be run for air quality modeling. - # - # RUN_TASK_BIAS_CORRECTION_PM25: - # Flag that determines whether the TN_BIAS_CORRECTION_PM25 task is to be run for air quality modeling. - # - #----------------------------------------------------------------------- - # - RUN_TASK_MAKE_GRID: true - RUN_TASK_MAKE_OROG: true - RUN_TASK_MAKE_SFC_CLIMO: true - - RUN_TASK_GET_EXTRN_ICS: true - RUN_TASK_GET_EXTRN_LBCS: true - RUN_TASK_MAKE_ICS: true - RUN_TASK_MAKE_LBCS: true - RUN_TASK_RUN_FCST: true - RUN_TASK_RUN_POST: true - - RUN_TASK_RUN_PRDGEN: false - - RUN_TASK_GET_OBS_CCPA: false - RUN_TASK_GET_OBS_MRMS: false - RUN_TASK_GET_OBS_NDAS: false - RUN_TASK_VX_GRIDSTAT: false - RUN_TASK_VX_POINTSTAT: false - RUN_TASK_VX_ENSGRID: false - RUN_TASK_VX_ENSPOINT: false - - RUN_TASK_PLOT_ALLVARS: false - - RUN_TASK_AQM_ICS: false - RUN_TASK_AQM_LBCS: false - RUN_TASK_NEXUS_GFS_SFC: false - RUN_TASK_NEXUS_EMISSION: false - RUN_TASK_FIRE_EMISSION: false - RUN_TASK_POINT_SOURCE: false - RUN_TASK_PRE_POST_STAT: false - RUN_TASK_POST_STAT_O3: false - RUN_TASK_POST_STAT_PM25: false - RUN_TASK_BIAS_CORRECTION_O3: false - RUN_TASK_BIAS_CORRECTION_PM25: false - - #---------------------------- # MAKE GRID config parameters #----------------------------- task_make_grid: - TN_MAKE_GRID: "make_grid" - NNODES_MAKE_GRID: 1 - PPN_MAKE_GRID: 24 - WTIME_MAKE_GRID: 00:20:00 - MAXTRIES_MAKE_GRID: 2 # #----------------------------------------------------------------------- # # GRID_DIR: - # The directory in which to look for pregenerated grid files if - # RUN_TASK_MAKE_GRID is set to false. + # The directory in which to look for pregenerated grid files if the + # make_grid task is not set to run. # #----------------------------------------------------------------------- # - GRID_DIR: '{{ [workflow.EXPTDIR, "grid"]|path_join if workflow_switches.RUN_TASK_MAKE_GRID else "" }}' + GRID_DIR: '{{ [workflow.EXPTDIR, "grid"]|path_join if rocoto.tasks.get("task_make_grid") else "" }}' # #----------------------------------------------------------------------- # @@ -1427,40 +1303,24 @@ task_make_grid: # MAKE OROG config parameters #----------------------------- task_make_orog: - TN_MAKE_OROG: "make_orog" - NNODES_MAKE_OROG: 1 - PPN_MAKE_OROG: 24 - WTIME_MAKE_OROG: 00:20:00 - MAXTRIES_MAKE_OROG: 2 KMP_AFFINITY_MAKE_OROG: "disabled" OMP_NUM_THREADS_MAKE_OROG: 6 OMP_STACKSIZE_MAKE_OROG: "2048m" - OROG_DIR: '{{ [workflow.EXPTDIR, "orog"]|path_join if workflow_switches.RUN_TASK_MAKE_OROG else "" }}' + OROG_DIR: '{{ [workflow.EXPTDIR, "orog"]|path_join if rocoto.tasks.get("task_make_orog") else "" }}' #---------------------------- # MAKE SFC CLIMO config parameters #----------------------------- task_make_sfc_climo: - TN_MAKE_SFC_CLIMO: "make_sfc_climo" - NNODES_MAKE_SFC_CLIMO: 2 - PPN_MAKE_SFC_CLIMO: 24 - WTIME_MAKE_SFC_CLIMO: 00:20:00 - MAXTRIES_MAKE_SFC_CLIMO: 2 KMP_AFFINITY_MAKE_SFC_CLIMO: "scatter" OMP_NUM_THREADS_MAKE_SFC_CLIMO: 1 OMP_STACKSIZE_MAKE_SFC_CLIMO: "1024m" - SFC_CLIMO_DIR: '{{ [workflow.EXPTDIR, "sfc_climo"]|path_join if workflow_switches.RUN_TASK_MAKE_SFC_CLIMO else "" }}' + SFC_CLIMO_DIR: '{{ [workflow.EXPTDIR, "sfc_climo"]|path_join if rocoto.tasks.get("task_make_sfc_climo") else "" }}' #---------------------------- # EXTRN ICS config parameters #----------------------------- task_get_extrn_ics: - TN_GET_EXTRN_ICS: "get_extrn_ics" - NNODES_GET_EXTRN_ICS: 1 - PPN_GET_EXTRN_ICS: 1 - MEM_GET_EXTRN_ICS: 2G - WTIME_GET_EXTRN_ICS: 00:45:00 - MAXTRIES_GET_EXTRN_ICS: 1 # #----------------------------------------------------------------------- # @@ -1554,12 +1414,6 @@ task_get_extrn_ics: # EXTRN LBCS config parameters #----------------------------- task_get_extrn_lbcs: - TN_GET_EXTRN_LBCS: "get_extrn_lbcs" - NNODES_GET_EXTRN_LBCS: 1 - PPN_GET_EXTRN_LBCS: 1 - MEM_GET_EXTRN_LBCS: 2G - WTIME_GET_EXTRN_LBCS: 00:45:00 - MAXTRIES_GET_EXTRN_LBCS: 1 # #----------------------------------------------------------------------- # @@ -1635,11 +1489,6 @@ task_get_extrn_lbcs: # MAKE ICS config parameters #----------------------------- task_make_ics: - TN_MAKE_ICS: "make_ics" - NNODES_MAKE_ICS: 4 - PPN_MAKE_ICS: 12 - WTIME_MAKE_ICS: 00:30:00 - MAXTRIES_MAKE_ICS: 1 KMP_AFFINITY_MAKE_ICS: "scatter" OMP_NUM_THREADS_MAKE_ICS: 1 OMP_STACKSIZE_MAKE_ICS: "1024m" @@ -1680,11 +1529,6 @@ task_make_ics: # MAKE LBCS config parameters #----------------------------- task_make_lbcs: - TN_MAKE_LBCS: "make_lbcs" - NNODES_MAKE_LBCS: 4 - PPN_MAKE_LBCS: 12 - WTIME_MAKE_LBCS: 00:30:00 - MAXTRIES_MAKE_LBCS: 1 KMP_AFFINITY_MAKE_LBCS: "scatter" OMP_NUM_THREADS_MAKE_LBCS: 1 OMP_STACKSIZE_MAKE_LBCS: "1024m" @@ -1693,11 +1537,8 @@ task_make_lbcs: # FORECAST config parameters #----------------------------- task_run_fcst: - TN_RUN_FCST: "run_fcst" NNODES_RUN_FCST: '{{ (PE_MEMBER01 + PPN_RUN_FCST - 1) // PPN_RUN_FCST }}' PPN_RUN_FCST: '{{ platform.NCORES_PER_NODE // OMP_NUM_THREADS_RUN_FCST }}' - WTIME_RUN_FCST: 04:30:00 - MAXTRIES_RUN_FCST: 1 FV3_EXEC_FP: '{{ [user.EXECdir, workflow.FV3_EXEC_FN]|path_join }}' # #----------------------------------------------------------------------- @@ -1755,11 +1596,10 @@ task_run_fcst: # fh = 1, 2, and 5. # # WRITE_DOPOST: - # Flag that determines whether or not to use the inline post feature - # [i.e. calling the Unified Post Processor (UPP) from within the weather - # model]. If this is set to true, the TN_RUN_POST task is deactivated - # (i.e. RUN_TASK_RUN_POST is set to false) to avoid unnecessary - # computations. + # Flag that determines whether or not to use the inline post feature + # [i.e. calling the Unified Post Processor (UPP) from within the + # weather model]. If this is set to true, the the run_post task will + # be deactivated. # #----------------------------------------------------------------------- # @@ -1945,11 +1785,6 @@ task_run_fcst: # POST config parameters #----------------------------- task_run_post: - TN_RUN_POST: "run_post" - NNODES_RUN_POST: 2 - PPN_RUN_POST: 24 - WTIME_RUN_POST: 00:15:00 - MAXTRIES_RUN_POST: 2 KMP_AFFINITY_RUN_POST: "scatter" OMP_NUM_THREADS_RUN_POST: 1 OMP_STACKSIZE_RUN_POST: "1024m" @@ -2003,7 +1838,7 @@ task_run_post: # # POST_OUTPUT_DOMAIN_NAME: # Domain name (in lowercase) used in constructing the names of the output - # files generated by UPP [which is called either by running the TN_RUN_POST + # files generated by UPP [which is called either by running the run_post # task or by activating the inline post feature (WRITE_DOPOST set to true)]. # The post output files are named as follows: # @@ -2021,15 +1856,20 @@ task_run_post: POST_OUTPUT_DOMAIN_NAME: '{{ workflow.PREDEF_GRID_NAME }}' TESTBED_FIELDS_FN: "" +#---------------------------- +# NEXUS_EMISSION config parameters +#----------------------------- +task_nexus_emission: + PPN_NEXUS_EMISSION: '{{ platform.NCORES_PER_NODE // OMP_NUM_THREADS_NEXUS_EMISSION }}' + KMP_AFFINITY_NEXUS_EMISSION: "scatter" + OMP_NUM_THREADS_NEXUS_EMISSION: 2 + OMP_STACKSIZE_NEXUS_EMISSION: "1024m" + + #---------------------------- # RUN PRDGEN config parameters #----------------------------- task_run_prdgen: - TN_RUN_PRDGEN: "run_prdgen" - NNODES_RUN_PRDGEN: 1 - PPN_RUN_PRDGEN: 22 - WTIME_RUN_PRDGEN: 00:30:00 - MAXTRIES_RUN_PRDGEN: 1 KMP_AFFINITY_RUN_PRDGEN: "scatter" OMP_NUM_THREADS_RUN_PRDGEN: 1 OMP_STACKSIZE_RUN_PRDGEN: "1024m" @@ -2068,11 +1908,6 @@ task_run_prdgen: # PLOT_ALLVARS config parameters #----------------------------- task_plot_allvars: - TN_PLOT_ALLVARS: "plot_allvars" - NNODES_PLOT_ALLVARS: 1 - PPN_PLOT_ALLVARS: 24 - WTIME_PLOT_ALLVARS: 01:00:00 - MAXTRIES_PLOT_ALLVARS: 1 #------------------------------------------------------------------------- # Reference experiment's COMOUT directory. This is where the GRIB2 files # from postprocessing are located. Make this a template to compare @@ -2096,539 +1931,6 @@ task_plot_allvars: #------------------------------------------------------------------------------- PLOT_DOMAINS: ["conus"] -#---------------------------- -# GET OBS CCPA config parameters -#----------------------------- -task_get_obs_ccpa: - TN_GET_OBS_CCPA: "get_obs_ccpa" - NNODES_GET_OBS_CCPA: 1 - PPN_GET_OBS_CCPA: 1 - MEM_GET_OBS_CCPA: 2G - WTIME_GET_OBS_CCPA: 00:45:00 - MAXTRIES_GET_OBS_CCPA: 1 - -#---------------------------- -# GET OBS MRMS config parameters -#----------------------------- -task_get_obs_mrms: - TN_GET_OBS_MRMS: "get_obs_mrms" - NNODES_GET_OBS_MRMS: 1 - PPN_GET_OBS_MRMS: 1 - MEM_GET_OBS_MRMS: 2G - WTIME_GET_OBS_MRMS: 00:45:00 - MAXTRIES_GET_OBS_MRMS: 1 - -#---------------------------- -# GET OBS NDAS config parameters -#----------------------------- -task_get_obs_ndas: - TN_GET_OBS_NDAS: "get_obs_ndas" - NNODES_GET_OBS_NDAS: 1 - PPN_GET_OBS_NDAS: 1 - MEM_GET_OBS_NDAS: 2G - WTIME_GET_OBS_NDAS: 02:00:00 - MAXTRIES_GET_OBS_NDAS: 1 - -#---------------------------- -# tn_run_met_pb2nc_obs config parameters -#----------------------------- -task_tn_run_met_pb2nc_obs: - TN_RUN_MET_PB2NC_OBS: "run_MET_Pb2nc_obs" - NNODES_RUN_MET_PB2NC_OBS: 1 - PPN_RUN_MET_PB2NC_OBS: 1 - MEM_RUN_MET_PB2NC_OBS: 2G - WTIME_RUN_MET_PB2NC_OBS: 00:30:00 - MAXTRIES_RUN_MET_PB2NC_OBS: 2 - -#---------------------------- -# tn_run_met_pcpcombine config parameters -#----------------------------- -task_tn_run_met_pcpcombine: - TN_RUN_MET_PCPCOMBINE: "run_MET_PcpCombine" -# - NNODES_RUN_MET_PCPCOMBINE_OBS: 1 - PPN_RUN_MET_PCPCOMBINE_OBS: 1 - MEM_RUN_MET_PCPCOMBINE_OBS: 2G - WTIME_RUN_MET_PCPCOMBINE_OBS: 00:30:00 - MAXTRIES_RUN_MET_PCPCOMBINE_OBS: 2 -# - NNODES_RUN_MET_PCPCOMBINE_FCST: 1 - PPN_RUN_MET_PCPCOMBINE_FCST: 1 - MEM_RUN_MET_PCPCOMBINE_FCST: 2G - WTIME_RUN_MET_PCPCOMBINE_FCST: 00:30:00 - MAXTRIES_RUN_MET_PCPCOMBINE_FCST: 2 - -#---------------------------- -# run_met_gridstat_vx_apcp01h config parameters -#----------------------------- -task_run_met_gridstat_vx_apcp01h: - TN_RUN_MET_GRIDSTAT_VX_APCP01H: "run_MET_GridStat_vx_APCP01h" - NNODES_RUN_MET_GRIDSTAT_VX_APCP01H: 1 - PPN_RUN_MET_GRIDSTAT_VX_APCP01H: 1 - MEM_RUN_MET_GRIDSTAT_VX_APCP01H: 2G - WTIME_RUN_MET_GRIDSTAT_VX_APCP01H: 02:00:00 - MAXTRIES_RUN_MET_GRIDSTAT_VX_APCP01H: 2 - -#---------------------------- -# run_met_gridstat_vx_apcp03h config parameters -#----------------------------- -task_run_met_gridstat_vx_apcp03h: - TN_RUN_MET_GRIDSTAT_VX_APCP03H: "run_MET_GridStat_vx_APCP03h" - NNODES_RUN_MET_GRIDSTAT_VX_APCP03H: 1 - PPN_RUN_MET_GRIDSTAT_VX_APCP03H: 1 - MEM_RUN_MET_GRIDSTAT_VX_APCP03H: 2G - WTIME_RUN_MET_GRIDSTAT_VX_APCP03H: 02:00:00 - MAXTRIES_RUN_MET_GRIDSTAT_VX_APCP03H: 2 - -#---------------------------- -# run_met_gridstat_vx_apcp06h config parameters -#----------------------------- -task_run_met_gridstat_vx_apcp06h: - TN_RUN_MET_GRIDSTAT_VX_APCP06H: "run_MET_GridStat_vx_APCP06h" - NNODES_RUN_MET_GRIDSTAT_VX_APCP06H: 1 - PPN_RUN_MET_GRIDSTAT_VX_APCP06H: 1 - MEM_RUN_MET_GRIDSTAT_VX_APCP06H: 2G - WTIME_RUN_MET_GRIDSTAT_VX_APCP06H: 02:00:00 - MAXTRIES_RUN_MET_GRIDSTAT_VX_APCP06H: 2 - -#---------------------------- -# run_met_gridstat_vx_apcp24h config parameters -#----------------------------- -task_run_met_gridstat_vx_apcp24h: - TN_RUN_MET_GRIDSTAT_VX_APCP24H: "run_MET_GridStat_vx_APCP24h" - NNODES_RUN_MET_GRIDSTAT_VX_APCP24H: 1 - PPN_RUN_MET_GRIDSTAT_VX_APCP24H: 1 - MEM_RUN_MET_GRIDSTAT_VX_APCP24H: 2G - WTIME_RUN_MET_GRIDSTAT_VX_APCP24H: 02:00:00 - MAXTRIES_RUN_MET_GRIDSTAT_VX_APCP24H: 2 - -#---------------------------- -# run_met_gridstat_vx_refc config parameters -#----------------------------- -task_run_met_gridstat_vx_refc: - TN_RUN_MET_GRIDSTAT_VX_REFC: "run_MET_GridStat_vx_REFC" - NNODES_RUN_MET_GRIDSTAT_VX_REFC: 1 - PPN_RUN_MET_GRIDSTAT_VX_REFC: 1 - MEM_RUN_MET_GRIDSTAT_VX_REFC: 2G - WTIME_RUN_MET_GRIDSTAT_VX_REFC: 02:00:00 - MAXTRIES_RUN_MET_GRIDSTAT_VX_REFC: 2 - -#---------------------------- -# run_met_gridstat_vx_retop config parameters -#----------------------------- -task_run_met_gridstat_vx_retop: - TN_RUN_MET_GRIDSTAT_VX_RETOP: "run_MET_GridStat_vx_RETOP" - NNODES_RUN_MET_GRIDSTAT_VX_RETOP: 1 - PPN_RUN_MET_GRIDSTAT_VX_RETOP: 1 - MEM_RUN_MET_GRIDSTAT_VX_RETOP: 2G - WTIME_RUN_MET_GRIDSTAT_VX_RETOP: 02:00:00 - MAXTRIES_RUN_MET_GRIDSTAT_VX_RETOP: 2 - -#---------------------------- -# run_met_pointstat_vx_sfc config parameters -#----------------------------- -task_run_met_pointstat_vx_sfc: - TN_RUN_MET_POINTSTAT_VX_SFC: "run_MET_PointStat_vx_SFC" - NNODES_RUN_MET_POINTSTAT_VX_SFC: 1 - PPN_RUN_MET_POINTSTAT_VX_SFC: 1 - MEM_RUN_MET_POINTSTAT_VX_SFC: 2G - WTIME_RUN_MET_POINTSTAT_VX_SFC: 01:00:00 - MAXTRIES_RUN_MET_POINTSTAT_VX_SFC: 2 - -#---------------------------- -# run_met_pointstat_vx_upa config parameters -#----------------------------- -task_run_met_pointstat_vx_upa: - TN_RUN_MET_POINTSTAT_VX_UPA: "run_MET_PointStat_vx_UPA" - NNODES_RUN_MET_POINTSTAT_VX_UPA: 1 - PPN_RUN_MET_POINTSTAT_VX_UPA: 1 - MEM_RUN_MET_POINTSTAT_VX_UPA: 2G - WTIME_RUN_MET_POINTSTAT_VX_UPA: 01:00:00 - MAXTRIES_RUN_MET_POINTSTAT_VX_UPA: 2 - -#---------------------------- -# run_met_ensemblestat_vx_apcp01h config parameters -#----------------------------- -task_run_met_ensemblestat_vx_apcp01h: - TN_RUN_MET_ENSEMBLESTAT_VX_APCP01H: "run_MET_EnsembleStat_vx_APCP01h" - NNODES_RUN_MET_ENSEMBLESTAT_VX_APCP01H: 1 - PPN_RUN_MET_ENSEMBLESTAT_VX_APCP01H: 1 - MEM_RUN_MET_ENSEMBLESTAT_VX_APCP01H: 2G - WTIME_RUN_MET_ENSEMBLESTAT_VX_APCP01H: 01:00:00 - MAXTRIES_RUN_MET_ENSEMBLESTAT_VX_APCP01H: 2 - -#---------------------------- -# run_met_ensemblestat_vx_apcp03h config parameters -#----------------------------- -task_run_met_ensemblestat_vx_apcp03h: - TN_RUN_MET_ENSEMBLESTAT_VX_APCP03H: "run_MET_EnsembleStat_vx_APCP03h" - NNODES_RUN_MET_ENSEMBLESTAT_VX_APCP03H: 1 - PPN_RUN_MET_ENSEMBLESTAT_VX_APCP03H: 1 - MEM_RUN_MET_ENSEMBLESTAT_VX_APCP03H: 2G - WTIME_RUN_MET_ENSEMBLESTAT_VX_APCP03H: 01:00:00 - MAXTRIES_RUN_MET_ENSEMBLESTAT_VX_APCP03H: 2 - -#---------------------------- -# run_met_ensemblestat_vx_apcp06h config parameters -#----------------------------- -task_run_met_ensemblestat_vx_apcp06h: - TN_RUN_MET_ENSEMBLESTAT_VX_APCP06H: "run_MET_EnsembleStat_vx_APCP06h" - NNODES_RUN_MET_ENSEMBLESTAT_VX_APCP06H: 1 - PPN_RUN_MET_ENSEMBLESTAT_VX_APCP06H: 1 - MEM_RUN_MET_ENSEMBLESTAT_VX_APCP06H: 2G - WTIME_RUN_MET_ENSEMBLESTAT_VX_APCP06H: 01:00:00 - MAXTRIES_RUN_MET_ENSEMBLESTAT_VX_APCP06H: 2 - -#---------------------------- -# run_met_ensemblestat_vx_apcp24h config parameters -#----------------------------- -task_run_met_ensemblestat_vx_apcp24h: - TN_RUN_MET_ENSEMBLESTAT_VX_APCP24H: "run_MET_EnsembleStat_vx_APCP24h" - NNODES_RUN_MET_ENSEMBLESTAT_VX_APCP24H: 1 - PPN_RUN_MET_ENSEMBLESTAT_VX_APCP24H: 1 - MEM_RUN_MET_ENSEMBLESTAT_VX_APCP24H: 2G - WTIME_RUN_MET_ENSEMBLESTAT_VX_APCP24H: 01:00:00 - MAXTRIES_RUN_MET_ENSEMBLESTAT_VX_APCP24H: 2 - -#---------------------------- -# run_met_ensemblestat_vx_refc config parameters -#----------------------------- -task_run_met_ensemblestat_vx_refc: - TN_RUN_MET_ENSEMBLESTAT_VX_REFC: "run_MET_EnsembleStat_vx_REFC" - NNODES_RUN_MET_ENSEMBLESTAT_VX_REFC: 1 - PPN_RUN_MET_ENSEMBLESTAT_VX_REFC: 1 - MEM_RUN_MET_ENSEMBLESTAT_VX_REFC: 2G - WTIME_RUN_MET_ENSEMBLESTAT_VX_REFC: 01:00:00 - MAXTRIES_RUN_MET_ENSEMBLESTAT_VX_REFC: 2 - -#---------------------------- -# run_met_ensemblestat_vx_retop config parameters -#----------------------------- -task_run_met_ensemblestat_vx_retop: - TN_RUN_MET_ENSEMBLESTAT_VX_RETOP: "run_MET_EnsembleStat_vx_RETOP" - NNODES_RUN_MET_ENSEMBLESTAT_VX_RETOP: 1 - PPN_RUN_MET_ENSEMBLESTAT_VX_RETOP: 1 - MEM_RUN_MET_ENSEMBLESTAT_VX_RETOP: 2G - WTIME_RUN_MET_ENSEMBLESTAT_VX_RETOP: 01:00:00 - MAXTRIES_RUN_MET_ENSEMBLESTAT_VX_RETOP: 2 - -#---------------------------- -# run_met_ensemblestat_vx_sfc config parameters -#----------------------------- -task_run_met_ensemblestat_vx_sfc: - TN_RUN_MET_ENSEMBLESTAT_VX_SFC: "run_MET_EnsembleStat_vx_SFC" - NNODES_RUN_MET_ENSEMBLESTAT_VX_SFC: 1 - PPN_RUN_MET_ENSEMBLESTAT_VX_SFC: 1 - MEM_RUN_MET_ENSEMBLESTAT_VX_SFC: 2G - WTIME_RUN_MET_ENSEMBLESTAT_VX_SFC: 01:00:00 - MAXTRIES_RUN_MET_ENSEMBLESTAT_VX_SFC: 2 - -#---------------------------- -# run_met_ensemblestat_vx_upa config parameters -#----------------------------- -task_run_met_ensemblestat_vx_upa: - TN_RUN_MET_ENSEMBLESTAT_VX_UPA: "run_MET_EnsembleStat_vx_UPA" - NNODES_RUN_MET_ENSEMBLESTAT_VX_UPA: 1 - PPN_RUN_MET_ENSEMBLESTAT_VX_UPA: 1 - MEM_RUN_MET_ENSEMBLESTAT_VX_UPA: 2G - WTIME_RUN_MET_ENSEMBLESTAT_VX_UPA: 01:00:00 - MAXTRIES_RUN_MET_ENSEMBLESTAT_VX_UPA: 2 - -#---------------------------- -# run_met_gridstat_vx_ensmean_apcp01h config parameters -#----------------------------- -task_run_met_gridstat_vx_ensmean_apcp01h: - TN_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP01H: "run_MET_GridStat_vx_ensmean_APCP01h" - NNODES_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP01H: 1 - PPN_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP01H: 1 - MEM_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP01H: 2G - WTIME_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP01H: 01:00:00 - MAXTRIES_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP01H: 2 - -#---------------------------- -# run_met_gridstat_vx_ensmean_apcp03h config parameters -#----------------------------- -task_run_met_gridstat_vx_ensmean_apcp03h: - TN_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP03H: "run_MET_GridStat_vx_ensmean_APCP03h" - NNODES_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP03H: 1 - PPN_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP03H: 1 - MEM_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP03H: 2G - WTIME_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP03H: 01:00:00 - MAXTRIES_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP03H: 2 - -#---------------------------- -# run_met_gridstat_vx_ensmean_apcp06h config parameters -#----------------------------- -task_run_met_gridstat_vx_ensmean_apcp06h: - TN_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP06H: "run_MET_GridStat_vx_ensmean_APCP06h" - NNODES_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP06H: 1 - PPN_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP06H: 1 - MEM_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP06H: 2G - WTIME_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP06H: 01:00:00 - MAXTRIES_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP06H: 2 - -#---------------------------- -# run_met_gridstat_vx_ensmean_apcp24h config parameters -#----------------------------- -task_run_met_gridstat_vx_ensmean_apcp24h: - TN_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP24H: "run_MET_GridStat_vx_ensmean_APCP24h" - NNODES_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP24H: 1 - PPN_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP24H: 1 - MEM_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP24H: 2G - WTIME_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP24H: 01:00:00 - MAXTRIES_RUN_MET_GRIDSTAT_VX_ENSMEAN_APCP24H: 2 - -#---------------------------- -# run_met_pointstat_vx_ensmean_sfc config parameters -#----------------------------- -task_run_met_pointstat_vx_ensmean_sfc: - TN_RUN_MET_POINTSTAT_VX_ENSMEAN_SFC: "run_MET_PointStat_vx_ensmean_SFC" - NNODES_RUN_MET_POINTSTAT_VX_ENSMEAN_SFC: 1 - PPN_RUN_MET_POINTSTAT_VX_ENSMEAN_SFC: 1 - MEM_RUN_MET_POINTSTAT_VX_ENSMEAN_SFC: 2G - WTIME_RUN_MET_POINTSTAT_VX_ENSMEAN_SFC: 01:00:00 - MAXTRIES_RUN_MET_POINTSTAT_VX_ENSMEAN_SFC: 2 - -#---------------------------- -# run_met_pointstat_vx_ensmean_upa config parameters -#----------------------------- -task_run_met_pointstat_vx_ensmean_upa: - TN_RUN_MET_POINTSTAT_VX_ENSMEAN_UPA: "run_MET_PointStat_vx_ensmean_UPA" - NNODES_RUN_MET_POINTSTAT_VX_ENSMEAN_UPA: 1 - PPN_RUN_MET_POINTSTAT_VX_ENSMEAN_UPA: 1 - MEM_RUN_MET_POINTSTAT_VX_ENSMEAN_UPA: 2G - WTIME_RUN_MET_POINTSTAT_VX_ENSMEAN_UPA: 01:00:00 - MAXTRIES_RUN_MET_POINTSTAT_VX_ENSMEAN_UPA: 2 - -#---------------------------- -# run_met_gridstat_vx_ensprob_apcp01h config parameters -#----------------------------- -task_run_met_gridstat_vx_ensprob_apcp01h: - TN_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP01H: "run_MET_GridStat_vx_ensprob_APCP01h" - NNODES_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP01H: 1 - PPN_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP01H: 1 - MEM_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP01H: 2G - WTIME_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP01H: 01:00:00 - MAXTRIES_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP01H: 2 - -#---------------------------- -# run_met_gridstat_vx_ensprob_apcp03h config parameters -#----------------------------- -task_run_met_gridstat_vx_ensprob_apcp03h: - TN_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP03H: "run_MET_GridStat_vx_ensprob_APCP03h" - NNODES_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP03H: 1 - PPN_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP03H: 1 - MEM_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP03H: 2G - WTIME_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP03H: 01:00:00 - MAXTRIES_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP03H: 2 - -#---------------------------- -# run_met_gridstat_vx_ensprob_apcp06h config parameters -#----------------------------- -task_run_met_gridstat_vx_ensprob_apcp06h: - TN_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP06H: "run_MET_GridStat_vx_ensprob_APCP06h" - NNODES_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP06H: 1 - PPN_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP06H: 1 - MEM_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP06H: 2G - WTIME_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP06H: 01:00:00 - MAXTRIES_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP06H: 2 - -#---------------------------- -# run_met_gridstat_vx_ensprob_apcp24h config parameters -#----------------------------- -task_run_met_gridstat_vx_ensprob_apcp24h: - TN_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP24H: "run_MET_GridStat_vx_ensprob_APCP24h" - NNODES_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP24H: 1 - PPN_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP24H: 1 - MEM_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP24H: 2G - WTIME_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP24H: 01:00:00 - MAXTRIES_RUN_MET_GRIDSTAT_VX_ENSPROB_APCP24H: 2 - -#---------------------------- -# run_met_gridstat_vx_ensprob_refc config parameters -#----------------------------- -task_run_met_gridstat_vx_ensprob_refc: - TN_RUN_MET_GRIDSTAT_VX_ENSPROB_REFC: "run_MET_GridStat_vx_ensprob_REFC" - NNODES_RUN_MET_GRIDSTAT_VX_ENSPROB_REFC: 1 - PPN_RUN_MET_GRIDSTAT_VX_ENSPROB_REFC: 1 - MEM_RUN_MET_GRIDSTAT_VX_ENSPROB_REFC: 2G - WTIME_RUN_MET_GRIDSTAT_VX_ENSPROB_REFC: 01:00:00 - MAXTRIES_RUN_MET_GRIDSTAT_VX_ENSPROB_REFC: 2 - -#---------------------------- -# run_met_gridstat_vx_ensprob_retop config parameters -#----------------------------- -task_run_met_gridstat_vx_ensprob_retop: - TN_RUN_MET_GRIDSTAT_VX_ENSPROB_RETOP: "run_MET_GridStat_vx_ensprob_RETOP" - NNODES_RUN_MET_GRIDSTAT_VX_ENSPROB_RETOP: 1 - PPN_RUN_MET_GRIDSTAT_VX_ENSPROB_RETOP: 1 - MEM_RUN_MET_GRIDSTAT_VX_ENSPROB_RETOP: 2G - WTIME_RUN_MET_GRIDSTAT_VX_ENSPROB_RETOP: 01:00:00 - MAXTRIES_RUN_MET_GRIDSTAT_VX_ENSPROB_RETOP: 2 - -#---------------------------- -# run_met_pointstat_vx_ensprob_sfc config parameters -#----------------------------- -task_run_met_pointstat_vx_ensprob_sfc: - TN_RUN_MET_POINTSTAT_VX_ENSPROB_SFC: "run_MET_PointStat_vx_ensprob_SFC" - NNODES_RUN_MET_POINTSTAT_VX_ENSPROB_SFC: 1 - PPN_RUN_MET_POINTSTAT_VX_ENSPROB_SFC: 1 - MEM_RUN_MET_POINTSTAT_VX_ENSPROB_SFC: 2G - WTIME_RUN_MET_POINTSTAT_VX_ENSPROB_SFC: 01:00:00 - MAXTRIES_RUN_MET_POINTSTAT_VX_ENSPROB_SFC: 2 - -#---------------------------- -# run_met_pointstat_vx_ensprob_upa config parameters -#----------------------------- -task_run_met_pointstat_vx_ensprob_upa: - TN_RUN_MET_POINTSTAT_VX_ENSPROB_UPA: "run_MET_PointStat_vx_ensprob_UPA" - NNODES_RUN_MET_POINTSTAT_VX_ENSPROB_UPA: 1 - PPN_RUN_MET_POINTSTAT_VX_ENSPROB_UPA: 1 - MEM_RUN_MET_POINTSTAT_VX_ENSPROB_UPA: 2G - WTIME_RUN_MET_POINTSTAT_VX_ENSPROB_UPA: 01:00:00 - MAXTRIES_RUN_MET_POINTSTAT_VX_ENSPROB_UPA: 2 - -#---------------------------- -# AQM_ICS config parameters -#----------------------------- -task_aqm_ics: - TN_AQM_ICS: "aqm_ics" - NNODES_AQM_ICS: 1 - PPN_AQM_ICS: 1 - WTIME_AQM_ICS: 00:30:00 - MAXTRIES_AQM_ICS: 2 - -#---------------------------- -# AQM_LBCS config parameters -#----------------------------- -task_aqm_lbcs: - TN_AQM_LBCS: "aqm_lbcs" - NNODES_AQM_LBCS: 1 - PPN_AQM_LBCS: 24 - WTIME_AQM_LBCS: 00:30:00 - MAXTRIES_AQM_LBCS: 2 - -#---------------------------- -# NEXUS_GFS_SFC config parameters -#----------------------------- -task_nexus_gfs_sfc: - TN_NEXUS_GFS_SFC: "nexus_gfs_sfc" - NNODES_NEXUS_GFS_SFC: 1 - PPN_NEXUS_GFS_SFC: 1 - MEM_NEXUS_GFS_SFC: 2G - WTIME_NEXUS_GFS_SFC: 00:30:00 - MAXTRIES_NEXUS_GFS_SFC: 2 - -#---------------------------- -# NEXUS_EMISSION config parameters -#----------------------------- -task_nexus_emission: - TN_NEXUS_EMISSION: "nexus_emission" - NNODES_NEXUS_EMISSION: 4 - PPN_NEXUS_EMISSION: '{{ platform.NCORES_PER_NODE // OMP_NUM_THREADS_NEXUS_EMISSION }}' - WTIME_NEXUS_EMISSION: 01:00:00 - MAXTRIES_NEXUS_EMISSION: 2 - KMP_AFFINITY_NEXUS_EMISSION: "scatter" - OMP_NUM_THREADS_NEXUS_EMISSION: 2 - OMP_STACKSIZE_NEXUS_EMISSION: "1024m" - -#---------------------------- -# NEXUS_POST_SPLIT config parameters -#----------------------------- -task_nexus_post_split: - TN_NEXUS_POST_SPLIT: "nexus_post_split" - NNODES_NEXUS_POST_SPLIT: 1 - PPN_NEXUS_POST_SPLIT: 1 - WTIME_NEXUS_POST_SPLIT: 00:30:00 - MAXTRIES_NEXUS_POST_SPLIT: 2 - -#---------------------------- -# FIRE_EMISSION config parameters -#----------------------------- -task_fire_emission: - TN_FIRE_EMISSION: "fire_emission" - NNODES_FIRE_EMISSION: 1 - PPN_FIRE_EMISSION: 1 - MEM_FIRE_EMISSION: 2G - WTIME_FIRE_EMISSION: 00:30:00 - MAXTRIES_FIRE_EMISSION: 2 - -#---------------------------- -# POINT_SOURCE config parameters -#----------------------------- -task_point_source: - TN_POINT_SOURCE: "point_source" - NNODES_POINT_SOURCE: 1 - PPN_POINT_SOURCE: 1 - WTIME_POINT_SOURCE: 01:00:00 - MAXTRIES_POINT_SOURCE: 2 - -#---------------------------- -# PRE_POST_STAT config parameters -#----------------------------- -task_pre_post_stat: - TN_PRE_POST_STAT: "pre_post_stat" - NNODES_PRE_POST_STAT: 1 - PPN_PRE_POST_STAT: 1 - WTIME_PRE_POST_STAT: 00:30:00 - MAXTRIES_PRE_POST_STAT: 2 - -#---------------------------- -# POST_STAT_O3 config parameters -#----------------------------- -task_post_stat_o3: - TN_POST_STAT_O3: "post_stat_o3" - NNODES_POST_STAT_O3: 1 - PPN_POST_STAT_O3: 1 - MEM_POST_STAT_O3: 120G - WTIME_POST_STAT_O3: 00:30:00 - MAXTRIES_POST_STAT_O3: 2 - KMP_AFFINITY_POST_STAT_O3: "scatter" - OMP_NUM_THREADS_POST_STAT_O3: 1 - OMP_STACKSIZE_POST_STAT_O3: "2056M" - -#---------------------------- -# POST_STAT_PM25 config parameters -#----------------------------- -task_post_stat_pm25: - TN_POST_STAT_PM25: "post_stat_pm25" - NNODES_POST_STAT_PM25: 1 - PPN_POST_STAT_PM25: 1 - MEM_POST_STAT_PM25: 120G - WTIME_POST_STAT_PM25: 00:30:00 - MAXTRIES_POST_STAT_PM25: 2 - KMP_AFFINITY_POST_STAT_PM25: "scatter" - OMP_NUM_THREADS_POST_STAT_PM25: 1 - OMP_STACKSIZE_POST_STAT_PM25: "2056M" - -#---------------------------- -# BIAS_CORRECTION_O3 config parameters -#----------------------------- -task_bias_correction_o3: - TN_BIAS_CORRECTION_O3: "bias_correction_o3" - NNODES_BIAS_CORRECTION_O3: 1 - PPN_BIAS_CORRECTION_O3: 1 - MEM_BIAS_CORRECTION_O3: 120G - WTIME_BIAS_CORRECTION_O3: 00:30:00 - MAXTRIES_BIAS_CORRECTION_O3: 2 - KMP_AFFINITY_BIAS_CORRECTION_O3: "scatter" - OMP_NUM_THREADS_BIAS_CORRECTION_O3: 128 - OMP_STACKSIZE_BIAS_CORRECTION_O3: "2056M" - -#---------------------------- -# BIAS_CORRECTION_PM25 config parameters -#----------------------------- -task_bias_correction_pm25: - TN_BIAS_CORRECTION_PM25: "bias_correction_pm25" - NNODES_BIAS_CORRECTION_PM25: 1 - PPN_BIAS_CORRECTION_PM25: 1 - MEM_BIAS_CORRECTION_PM25: 120G - WTIME_BIAS_CORRECTION_PM25: 00:30:00 - MAXTRIES_BIAS_CORRECTION_PM25: 2 - KMP_AFFINITY_BIAS_CORRECTION_PM25: "scatter" - OMP_NUM_THREADS_BIAS_CORRECTION_PM25: 128 - OMP_STACKSIZE_BIAS_CORRECTION_PM25: "2056M" - #---------------------------- # global config parameters #----------------------------- @@ -2687,7 +1989,7 @@ global: # DO_ENSEMBLE: false NUM_ENS_MEMBERS: 0 - ENSMEM_NAMES: '{% for m in range(NUM_ENS_MEMBERS) %} "mem%03d, " % m {% endfor %}' + ENSMEM_NAMES: '{% for m in range(NUM_ENS_MEMBERS) %}{{ "mem%03d, " % m }}{% endfor %}' FV3_NML_ENSMEM_FPS: '{% for mem in ENSMEM_NAMES %}{{ [EXPTDIR, "%s_%s" % FV3_NML_FN, mem]|path_join }}{% endfor %}' ENS_TIME_LAG_HRS: '[ {% for m in range(NUM_ENS_MEMBERS) %} 0, {% endfor %} ]' # @@ -2795,12 +2097,6 @@ global: # verification (vx) parameters #----------------------------- verification: - # Move some of the following to another section at some point. - # - # GET_OBS_LOCAL_MODULE_FN: - # Local task modulefile name for all GET_OBS_* tasks. - # - GET_OBS_LOCAL_MODULE_FN: 'get_obs' # # Templates for CCPA observation files. # @@ -2809,61 +2105,25 @@ verification: OBS_NDAS_SFCorUPA_FN_TEMPLATE: 'prepbufr.ndas.{valid?fmt=%Y%m%d%H}' OBS_NDAS_SFCorUPA_FN_METPROC_TEMPLATE: '${OBS_NDAS_SFCorUPA_FN_TEMPLATE}.nc' # - # VX_LOCAL_MODULE_FN: - # Name (without extension) of the local module file for running the vx - # tasks in the workflow. - # - VX_LOCAL_MODULE_FN: 'run_vx' - # - # RUN_TASKS_METVX_DET: - # Flag that specifies whether to run deterministic verification. If set - # to True, this will run deterministic vx on the post-processed forecast - # output. This post-processed output may consist of a single forecast - # or an ensemble of foreasts, and it may be staged from previous runs of - # the SRW App or may be generated by running the TN_RUN_FCST task as part - # of the current SRW-App-generated experiment. - # - # RUN_TASKS_METVX_ENS: - # Flag that specifies whether to run ensemble verification. The ensemble - # forecast output on which vx will be run may be staged or generated by - # running an ensemble of forecasts with the weather model as part of the - # current SRW-App-generated experiment. - # - RUN_TASKS_METVX_DET: False - RUN_TASKS_METVX_ENS: False - # # VX_FCST_MODEL_NAME: # String that specifies a descriptive name for the model being verified. # This is used in forming the names of the verification output files as # well as in the contents of those files. # - # VX_FIELDS: - # The fields or groups of fields on which to run verification. - # - # VX_APCP_ACCUMS_HH: - # The 2-digit accumulation periods (in units of hours) to consider for - # APCP (accumulated precipitation). If VX_FIELDS contains "APCP", then - # VX_APCP_ACCUMS_HH must contain at least one element. If not, - # VX_APCP_ACCUMS_HH will be ignored. - # VX_FCST_MODEL_NAME: '{{ nco.NET }}.{{ task_run_post.POST_OUTPUT_DOMAIN_NAME }}' - VX_FIELDS: [ "APCP", "REFC", "RETOP", "SFC", "UPA" ] - VX_APCP_ACCUMS_HH: [ "01", "03", "06", "24" ] # - # VX_FCST_INPUT_BASEDIR: - # Location of top-level directory containing forecast (but not obs) files - # that will be used as input into METplus for verification. If not - # specified, this gets set to EXPTDIR. + # VX_FCST_INPUT_DIR: + # Directory template for the path containing forecast (but not obs) files + # that will be used as input into METplus for verification. # # VX_OUTPUT_BASEDIR: # Top-level directory in which METplus will place its output. # - VX_FCST_INPUT_BASEDIR: '{{ workflow.EXPTDIR if ((workflow_switches.RUN_TASK_RUN_FCST and task_run_fcst.WRITE_DOPOST) or workflow_switches.RUN_TASK_RUN_POST) }}' + VX_FCST_INPUT_DIR: '{{ workflow.EXPTDIR }}/${CDATE}{% if global.DO_ENSEMBLE %}"/mem${ENSMEM_INDX}"{% endif %}/postprd' VX_OUTPUT_BASEDIR: '{{ workflow.EXPTDIR }}' # # File name and path templates are used in the verification tasks. # - FCST_SUBDIR_TEMPLATE: '{init?fmt=%Y%m%d%H?shift=-${time_lag}}${SLASH_ENSMEM_SUBDIR_OR_NULL}/postprd' FCST_FN_TEMPLATE: '${NET}.t{init?fmt=%H?shift=-${time_lag}}z.prslev.f{lead?fmt=%HHH?shift=${time_lag}}.${POST_OUTPUT_DOMAIN_NAME}.grib2' FCST_FN_METPROC_TEMPLATE: '${NET}.t{init?fmt=%H}z.prslev.f{lead?fmt=%HHH}.${POST_OUTPUT_DOMAIN_NAME}_a${ACCUM_HH}h.nc' # @@ -3056,3 +2316,11 @@ cpl_aqm_parm: AQM_AIRNOW_HIST_DIR: "/path/to/historical/airnow/data/dir" +rocoto: + attrs: "" + cycledefs: "" + entities: "" + log: "" + tasks: + taskgroups: "" + diff --git a/ush/fill_jinja_template.py b/ush/fill_jinja_template.py index d7a9673f36..f810136753 100755 --- a/ush/fill_jinja_template.py +++ b/ush/fill_jinja_template.py @@ -246,7 +246,7 @@ def update_dict(dest, newdict, quiet=False): print("*" * 50) -def fill_jinja_template(argv): +def fill_jinja_template(argv, config_dict=None): """ Loads a Jinja template, determines its necessary undefined variables, @@ -259,7 +259,8 @@ def fill_jinja_template(argv): cla.config = config_exists(cla.config) # Create a Jinja Environment to load the template. - env = j2.Environment(loader=j2.FileSystemLoader(cla.template)) + env = j2.Environment(loader=j2.FileSystemLoader(cla.template, + encoding='utf-8')) template_source = env.loader.get_source(env, "") template = env.get_template("") parsed_content = env.parse(template_source) @@ -270,6 +271,9 @@ def fill_jinja_template(argv): # Read in the config options from the provided (optional) YAML file cfg = cla.config if cla.config is not None else {} + if config_dict is not None: + update_dict(cfg, config_dict, quiet=cla.quiet) + # Update cfg with (optional) command-line entries, overriding those in YAML file if cla.user_config: update_dict(cfg, cla.user_config, quiet=cla.quiet) diff --git a/ush/generate_FV3LAM_wflow.py b/ush/generate_FV3LAM_wflow.py index 2355468d21..a4d43a70e4 100755 --- a/ush/generate_FV3LAM_wflow.py +++ b/ush/generate_FV3LAM_wflow.py @@ -102,140 +102,31 @@ def generate_FV3LAM_wflow(ushdir, logfile: str = "log.generate_FV3LAM_wflow", de expt_config["user"]["PARMdir"], wflow_xml_fn, ) + global_var_defns_fp = expt_config["workflow"]["GLOBAL_VAR_DEFNS_FP"] log_info( f""" - Creating rocoto workflow XML file (WFLOW_XML_FP) from jinja template XML - file (template_xml_fp): - template_xml_fp = '{template_xml_fp}' + Creating rocoto workflow XML file (WFLOW_XML_FP): WFLOW_XML_FP = '{wflow_xml_fp}'""" ) # - # Dictionary of settings to pass to fill_jinja + # Call the python script to generate the experiment's XML file # - settings = {} - for k, v in flatten_dict(expt_config).items(): - settings[lowercase(k)] = v - - ensmem_indx_name = "" - uscore_ensmem_name = "" - slash_ensmem_subdir = "" - if expt_config["global"]["DO_ENSEMBLE"]: - ensmem_indx_name = "mem" - uscore_ensmem_name = f"_mem#{ensmem_indx_name}#" - slash_ensmem_subdir = f"/mem#{ensmem_indx_name}#" - - dt_atmos = expt_config["task_run_fcst"]["DT_ATMOS"] - date_first_cycl = expt_config["workflow"]["DATE_FIRST_CYCL"] - date_last_cycl = expt_config["workflow"]["DATE_LAST_CYCL"] - first_file_time = date_first_cycl + timedelta(seconds=dt_atmos) - fcst_threads = expt_config["task_run_fcst"]["OMP_NUM_THREADS_RUN_FCST"] - - if date_first_cycl == date_last_cycl: - cycl_next = date_to_str(date_first_cycl, format="%Y%m%d%H00") - else: - cycl_next = date_to_str(date_first_cycl + timedelta(hours=expt_config['workflow']['INCR_CYCL_FREQ']), format="%Y%m%d%H00") - - incr_cycl_freq = expt_config["workflow"]["INCR_CYCL_FREQ"] - date_2nd_cycl = date_to_str(date_first_cycl + timedelta(hours=incr_cycl_freq), format="%Y%m%d%H00") - date_3rd_cycl = date_to_str(date_first_cycl + timedelta(hours=incr_cycl_freq*2), format="%Y%m%d%H00") - date_4th_cycl = date_to_str(date_first_cycl + timedelta(hours=incr_cycl_freq*3), format="%Y%m%d%H00") - fcst_len_hrs = expt_config["workflow"]["FCST_LEN_HRS"] - fcst_len_cycl = expt_config["workflow"]["FCST_LEN_CYCL"] - num_fcst_len_cycl = len(fcst_len_cycl) - if fcst_len_hrs == -1: - all_cdates = expt_config["workflow"]["ALL_CDATES"] - num_all_cdates = len(all_cdates) - num_cyc_days = num_all_cdates // num_fcst_len_cycl -1 - else: - num_cyc_days = 0 - date_1st_last_cycl = date_to_str(date_first_cycl + timedelta(hours=24*num_cyc_days), format="%Y%m%d%H00") - date_2nd_last_cycl = date_to_str(date_first_cycl + timedelta(hours=incr_cycl_freq) + timedelta(hours=24*num_cyc_days), format="%Y%m%d%H00") - date_3rd_last_cycl = date_to_str(date_first_cycl + timedelta(hours=incr_cycl_freq*2) + timedelta(hours=24*num_cyc_days), format="%Y%m%d%H00") - date_4th_last_cycl = date_to_str(date_first_cycl + timedelta(hours=incr_cycl_freq*3) + timedelta(hours=24*num_cyc_days), format="%Y%m%d%H00") - - settings.update( - { - # - # Number of cores used for a task - # - "ncores_run_fcst": expt_config["task_run_fcst"]["PE_MEMBER01"], - "native_run_fcst": f"--cpus-per-task {fcst_threads} --exclusive", - "native_nexus_emission": f"--cpus-per-task {expt_config['task_nexus_emission']['OMP_NUM_THREADS_NEXUS_EMISSION']}", - # - # Parameters that determine the set of cycles to run. - # - "date_first_cycl": date_to_str(date_first_cycl, format="%Y%m%d%H00"), - "date_last_cycl": date_to_str(date_last_cycl, format="%Y%m%d%H00"), - "cdate_first_cycl": date_first_cycl, - "cycl_freq": f"{expt_config['workflow']['INCR_CYCL_FREQ']:02d}:00:00", - "cycl_next": cycl_next, - "date_2nd_cycl": date_2nd_cycl, - "date_3rd_cycl": date_3rd_cycl, - "date_4th_cycl": date_4th_cycl, - "date_1st_last_cycl": date_1st_last_cycl, - "date_2nd_last_cycl": date_2nd_last_cycl, - "date_3rd_last_cycl": date_3rd_last_cycl, - "date_4th_last_cycl": date_4th_last_cycl, - "fcst_len_hrs": fcst_len_hrs, - "fcst_len_cycl": fcst_len_cycl, - "num_fcst_len_cycl": num_fcst_len_cycl, - # - # Ensemble-related parameters. - # - "ensmem_indx_name": ensmem_indx_name, - "uscore_ensmem_name": uscore_ensmem_name, - "slash_ensmem_subdir": slash_ensmem_subdir, - # - # Parameters associated with subhourly post-processed output - # - "delta_min": expt_config["task_run_post"]["DT_SUBHOURLY_POST_MNTS"], - "first_fv3_file_tstr": first_file_time.strftime("000:%M:%S"), - } - ) - - # Log "settings" variable. - settings_str = cfg_to_yaml_str(settings) + rocoto_yaml_fp = expt_config["workflow"]["ROCOTO_YAML_FP"] + args = ["-o", wflow_xml_fp, + "-t", template_xml_fp, + "-c", rocoto_yaml_fp ] + if not debug: + args.append("-q") - log_info( - f""" - The variable 'settings' specifying values of the rococo XML variables - has been set as follows: - #----------------------------------------------------------------------- - settings =\n\n""", - verbose=verbose, - ) - log_info(settings_str, verbose=verbose) - - # - # Call the python script to generate the experiment's actual XML file - # from the jinja template file. - # try: - fill_jinja_template( - ["-q", "-u", settings_str, "-t", template_xml_fp, "-o", wflow_xml_fp] - ) + fill_jinja_template(args) except: - logging.info( - dedent( - f""" - Variable settings specified on command line for - fill_jinja_template.py:\n - settings =\n\n""" - ) - + settings_str - ) raise Exception( dedent( f""" - Call to python script fill_jinja_template.py to create a rocoto workflow - XML file from a template file failed. Parameters passed to this script - are: - Full path to template rocoto XML file: - template_xml_fp = '{template_xml_fp}' - Full path to output rocoto XML file: - WFLOW_XML_FP = '{wflow_xml_fp}' + Call to fill_jinja_template failed. """ ) ) @@ -763,7 +654,7 @@ def generate_FV3LAM_wflow(ushdir, logfile: str = "log.generate_FV3LAM_wflow", de # the C-resolution of the grid), and this parameter is in most workflow # configurations is not known until the grid is created. # - if not RUN_TASK_MAKE_GRID: + if not expt_config['rocoto']['tasks'].get('task_make_grid'): set_FV3nml_sfc_climo_filenames() @@ -883,6 +774,7 @@ def setup_logging(logfile: str = "log.generate_FV3LAM_wflow", debug: bool = Fals """ ) ) + raise # Note workflow generation completion log_info( @@ -931,10 +823,11 @@ def run_workflow(USHdir, logfile): "user": { "MACHINE": "linux", }, - "workflow_switches": { - "RUN_TASK_MAKE_GRID": True, - "RUN_TASK_MAKE_OROG": True, - "RUN_TASK_MAKE_SFC_CLIMO": True, + "rocoto": { + "tasks": { + "taskgroups": \ + '\'{{ ["parm/wflow/prep.yaml","parm/wflow/coldstart.yaml", "parm/wflow/post.yaml"]|include }}\'' + }, }, } update_dict(cfg_updates, nco_test_config) diff --git a/ush/machine/gaea.yaml b/ush/machine/gaea.yaml index bac487dbf0..9653d74c45 100644 --- a/ush/machine/gaea.yaml +++ b/ush/machine/gaea.yaml @@ -12,6 +12,7 @@ platform: QUEUE_DEFAULT: normal QUEUE_FCST: normal QUEUE_HPSS: normal + REMOVE_MEMORY: True RUN_CMD_FCST: srun --export=ALL --mpi=pmi2 -n ${PE_MEMBER01} RUN_CMD_POST: srun --export=ALL --mpi=pmi2 -n $nprocs RUN_CMD_PRDGEN: srun --export=ALL --mpi=pmi2 -n $nprocs diff --git a/ush/machine/hera.yaml b/ush/machine/hera.yaml index 6b27fb8584..314f770d94 100644 --- a/ush/machine/hera.yaml +++ b/ush/machine/hera.yaml @@ -28,7 +28,7 @@ platform: TEST_PREGEN_BASEDIR: /scratch1/NCEPDEV/nems/role.epic/UFS_SRW_data/develop/FV3LAM_pregen TEST_ALT_EXTRN_MDL_SYSBASEDIR_ICS: /scratch1/NCEPDEV/nems/role.epic/UFS_SRW_data/develop/dummy_FV3GFS_sys_dir TEST_ALT_EXTRN_MDL_SYSBASEDIR_LBCS: /scratch1/NCEPDEV/nems/role.epic/UFS_SRW_data/develop/dummy_FV3GFS_sys_dir - TEST_VX_FCST_INPUT_BASEDIR: '{{ "/scratch1/NCEPDEV/nems/role.epic/UFS_SRW_data/develop/output_data/fcst_" }}{{ "ens" if (global.NUM_ENS_MEMBERS > 0) else "det" }}{{ "/{{workflow.PREDEF_GRID_NAME}}" }}{% raw %}{% endraw %}' + TEST_VX_FCST_INPUT_DIR: '{{ "/scratch1/NCEPDEV/nems/role.epic/UFS_SRW_data/develop/output_data/fcst_" }}{{ "ens" if (global.NUM_ENS_MEMBERS > 0) else "det" }}{{ "/{{workflow.PREDEF_GRID_NAME}}/${CDATE}/postprd" }}{% raw %}{% endraw %}' FIXaer: /scratch1/NCEPDEV/nems/role.epic/UFS_SRW_data/develop/fix/fix_aer FIXgsi: /scratch1/NCEPDEV/nems/role.epic/UFS_SRW_data/develop/fix/fix_gsi FIXgsm: /scratch1/NCEPDEV/nems/role.epic/UFS_SRW_data/develop/fix/fix_am @@ -38,6 +38,18 @@ platform: FIXshp: /scratch1/NCEPDEV/nems/role.epic/UFS_SRW_data/develop/NaturalEarth EXTRN_MDL_DATA_STORES: hpss aws nomads + +rocoto: + tasks: + metatask_run_ensemble: + task_run_fcst_mem#mem#: + cores: '{{ task_run_fcst.PE_MEMBER01 // 1 }}' + native: '--cpus-per-task {{ task_run_fcst.OMP_NUM_THREADS_RUN_FCST|int }} --exclusive {{ platform.SCHED_NATIVE_CMD }}' + nodes: + nnodes: + nodesize: + ppn: + data: obs: RAP_obs: /scratch2/BMC/public/data/grids/rap/obs @@ -53,3 +65,4 @@ cpl_aqm_parm: NEXUS_FIX_DIR: /scratch2/NCEPDEV/naqfc/RRFS_CMAQ/nexus/fix PT_SRC_BASEDIR: /scratch1/RDARCH/rda-arl-gpu/YouHua.Tang/nei2016v1-pt + diff --git a/ush/machine/jet.yaml b/ush/machine/jet.yaml index 3d5027f1fa..b8f8a3b921 100644 --- a/ush/machine/jet.yaml +++ b/ush/machine/jet.yaml @@ -47,3 +47,14 @@ data: GFS_obs: prepbufr: /public/data/grids/gfs/prepbufr tcvitals: /public/data/grids/gfs/bufr + +rocoto: + tasks: + metatask_run_ensemble: + task_run_fcst_mem#mem#: + cores: '{{ task_run_fcst.PE_MEMBER01 // 1 }}' + native: '--cpus-per-task {{ task_run_fcst.OMP_NUM_THREADS_RUN_FCST|int }} --exclusive {{ platform.SCHED_NATIVE_CMD }}' + nodes: + nnodes: + nodesize: + ppn: diff --git a/ush/machine/linux.yaml b/ush/machine/linux.yaml index b6c274f9db..ae18b57b19 100644 --- a/ush/machine/linux.yaml +++ b/ush/machine/linux.yaml @@ -30,3 +30,14 @@ platform: data: ics_lbcs: FV3GFS: /home/username/DATA/UFS/FV3GFS + +rocoto: + tasks: + metatask_run_ensemble: + task_run_fcst_mem#mem#: + cores: '{{ task_run_fcst.PE_MEMBER01 // 1 }}' + native: '--cpus-per-task {{ task_run_fcst.OMP_NUM_THREADS_RUN_FCST|int }} --exclusive' + nodes: + nnodes: + nodesize: + ppn: diff --git a/ush/machine/noaacloud.yaml b/ush/machine/noaacloud.yaml index ecdd2982a8..6f35de59d8 100644 --- a/ush/machine/noaacloud.yaml +++ b/ush/machine/noaacloud.yaml @@ -8,6 +8,7 @@ platform: METPLUS_PATH: /contrib/EPIC/spack-stack/envs/srw-develop-intel/install/intel/2021.3.0/metplus MET_BIN_EXEC: bin MET_INSTALL_DIR: /contrib/EPIC/spack-stack/envs/srw-develop-intel/install/intel/2021.3.0/met + REMOVE_MEMORY: True RUN_CMD_FCST: mpiexec -np ${PE_MEMBER01} RUN_CMD_POST: mpiexec -np $nprocs RUN_CMD_PRDGEN: mpiexec -np $nprocs diff --git a/ush/machine/wcoss2.yaml b/ush/machine/wcoss2.yaml index 994a0f0fb9..9ba28b4d3d 100644 --- a/ush/machine/wcoss2.yaml +++ b/ush/machine/wcoss2.yaml @@ -54,3 +54,21 @@ cpl_aqm_parm: NEXUS_GFS_SFC_DIR: /lfs/h2/emc/lam/noscrub/RRFS_CMAQ/GFS_DATA PT_SRC_BASEDIR: /lfs/h2/emc/physics/noscrub/Youhua.Tang/nei2016v1-pt +rocoto: + tasks: + task_get_extrn_ics: + native: + task_get_extrn_lbcs: + native: + task_plot_allvars: + native: + metatask_run_ensemble: + task_run_fcst_mem#mem#: + cores: + native: '{{ platform.SCHED_NATIVE_CMD }}' + nodes: '{{ parent.nnodes }}:ppn={{ parent.ppn }}:tpp={{ task_run_fcst.OMP_NUM_THREADS_RUN_FCST }}' + nodesize: '{{ platform.NCORES_PER_NODE }}' + task_nexus_emission_#nspt#: + nodes: '{{ parent.nnodes }}:ppn={{ parent.ppn }}:tpp={{ task_nexus_emission.OMP_NUM_THREADS_NEXUS_EMISSION }}' + task_fire_emission: + native: diff --git a/ush/python_utils/config_parser.py b/ush/python_utils/config_parser.py index a66be884f2..bbabeee3db 100644 --- a/ush/python_utils/config_parser.py +++ b/ush/python_utils/config_parser.py @@ -15,10 +15,19 @@ """ import argparse +import configparser import datetime +import json +import os +import pathlib +import re +from textwrap import dedent +import xml.etree.ElementTree as ET +from xml.dom import minidom +import jinja2 # -# Note: Yaml maynot be available in which case we suppress +# Note: yaml may not be available in which case we suppress # the exception, so that we can have other functionality # provided by this module. # @@ -26,19 +35,8 @@ import yaml except ModuleNotFoundError: pass -# The rest of the formats: JSON/SHELL/INI/XML do not need -# external packages -import json -import os -import re -from textwrap import dedent -import configparser -import xml.etree.ElementTree as ET -from xml.dom import minidom -import jinja2 - -from .environment import list_to_str, str_to_list +from .environment import list_to_str, str_to_list, str_to_type from .run_command import run_command ########## @@ -76,28 +74,73 @@ def cfg_to_yaml_str(cfg): """Get contents of config file as a yaml string""" return yaml.dump( - cfg, Dumper=custom_dumper, sort_keys=False, default_flow_style=False + cfg, sort_keys=False, default_flow_style=False ) +def cycstr(loader, node): + + ''' Returns a cyclestring Element whose content corresponds to the + input node argument ''' + + arg = loader.construct_scalar(node) + return f'{arg}' + +def include(filepaths): + + ''' Returns a dictionary that includes the contents of the referenced + YAML file(s). ''' + + srw_path = pathlib.Path(__file__).resolve().parents[0].parents[0] + + cfg = {} + for filepath in filepaths: + abs_path = filepath + if not os.path.isabs(filepath): + abs_path = os.path.join(os.path.dirname(srw_path), filepath) + with open(abs_path, 'r') as fp: + contents = yaml.load(fp, Loader=yaml.SafeLoader) + for key, value in contents.items(): + cfg[key] = value + return yaml.dump(cfg, sort_keys=False) def join_str(loader, node): """Custom tag hangler to join strings""" seq = loader.construct_sequence(node) return "".join([str(i) for i in seq]) +def startstopfreq(loader, node): + + ''' Returns a Rocoto-formatted string for the contents of a cycledef + tag. Assume that the items in the node are env variables, and return + a Rocoto-formatted string''' + + args = loader.construct_sequence(node) + + # Try to fill the values from environment values, default to the + # value provided in the entry. + start, stop, freq = (os.environ.get(arg, arg) for arg in args) + + return f'{start}00 {stop}00 {freq}:00:00' + +def nowtimestamp(loader, node): + return "id_" + str(int(datetime.datetime.now().timestamp())) try: + yaml.add_constructor("!cycstr", cycstr, Loader=yaml.SafeLoader) + yaml.add_constructor("!include", include, Loader=yaml.SafeLoader) yaml.add_constructor("!join_str", join_str, Loader=yaml.SafeLoader) + yaml.add_constructor("!startstopfreq", startstopfreq, Loader=yaml.SafeLoader) + yaml.add_constructor("!nowtimestamp", nowtimestamp ,Loader=yaml.SafeLoader) except NameError: pass + def path_join(arg): """A filter for jinja2 that joins paths""" return os.path.join(*arg) - def days_ago(arg): """A filter for jinja2 that gives us a date string for x number of days ago""" @@ -105,9 +148,7 @@ def days_ago(arg): return (datetime.date.today() - datetime.timedelta(days=arg)).strftime("%Y%m%d00") - -def extend_yaml(yaml_dict, full_dict=None): - +def extend_yaml(yaml_dict, full_dict=None, parent=None): """ Updates yaml_dict inplace by rendering any existing Jinja2 templates that exist in a value. @@ -116,64 +157,93 @@ def extend_yaml(yaml_dict, full_dict=None): if full_dict is None: full_dict = yaml_dict + if parent is None: + full_dict = yaml_dict + if not isinstance(yaml_dict, dict): return - for k, v in yaml_dict.items(): + for k, val in yaml_dict.items(): - if isinstance(v, dict): - extend_yaml(v, full_dict) + if isinstance(val, dict): + extend_yaml(val, full_dict, yaml_dict) else: - # Save a bit of compute and only do this part for strings that - # contain the jinja double brackets. - v_str = str(v.text) if isinstance(v, ET.Element) else str(v) - is_a_template = any((ele for ele in ["{{", "{%"] if ele in v_str)) - if is_a_template: - - # Find expressions first, and process them as a single template - # if they exist - # Find individual double curly brace template in the string - # otherwise. We need one substitution template at a time so that - # we can opt to leave some un-filled when they are not yet set. - # For example, we can save cycle-dependent templates to fill in - # at run time. - if "{%" in v: - templates = [v_str] - else: - # Separates out all the double curly bracket pairs - templates = re.findall(r"{{[^}]*}}|\S", v_str) - data = [] - for template in templates: - j2env = jinja2.Environment( - loader=jinja2.BaseLoader, undefined=jinja2.StrictUndefined - ) - j2env.filters["path_join"] = path_join - j2env.filters["days_ago"] = days_ago - j2tmpl = j2env.from_string(template) - try: - # Fill in a template that has the appropriate variables - # set. - template = j2tmpl.render(**yaml_dict, **full_dict) - except jinja2.exceptions.UndefinedError as e: - # Leave a templated field as-is in the resulting dict - pass - except TypeError: - pass - except ZeroDivisionError: - pass - except: - print(f"{k}: {template}") - raise - - data.append(template) + if not isinstance(val, list): + val = [val] + for v_idx, v in enumerate(val): + # Save a bit of compute and only do this part for strings that + # contain the jinja double brackets. + v_str = str(v.text) if isinstance(v, ET.Element) else str(v) if isinstance(v, ET.Element): - v.text = "".join(data) - else: - # Put the full template line back together as it was, - # filled or not - yaml_dict[k] = "".join(data) + print('ELEMENT VSTR', v_str, v.text, yaml_dict) + is_a_template = any((ele for ele in ["{{", "{%"] if ele in v_str)) + if is_a_template: + # Find expressions first, and process them as a single template + # if they exist + # Find individual double curly brace template in the string + # otherwise. We need one substitution template at a time so that + # we can opt to leave some un-filled when they are not yet set. + # For example, we can save cycle-dependent templates to fill in + # at run time. + if "{%" in v_str: + templates = [v_str] + else: + # Separates out all the double curly bracket pairs + templates = [m.group() for m in + re.finditer(r"{{[^}]*}}|\S", v_str) if '{{' + in m.group()] + data = [] + for template in templates: + j2env = jinja2.Environment( + loader=jinja2.BaseLoader, undefined=jinja2.StrictUndefined + ) + j2env.filters["path_join"] = path_join + j2env.filters["days_ago"] = days_ago + j2env.filters["include"] = include + try: + j2tmpl = j2env.from_string(template) + except: + print(f"ERROR filling template: {template}, {v_str}") + raise + try: + # Fill in a template that has the appropriate variables + # set. + template = j2tmpl.render(parent=parent, **yaml_dict, **full_dict) + except jinja2.exceptions.UndefinedError as e: + # Leave a templated field as-is in the resulting dict + pass + except ValueError: + pass + except TypeError: + pass + except ZeroDivisionError: + pass + except: + print(f"{k}: {template}") + raise + + data.append(template) + + convert_type = True + for tmpl, rendered in zip(templates, data): + v_str = v_str.replace(tmpl, rendered) + if "string" in tmpl: + convert_type = False + + if convert_type: + v_str = str_to_type(v_str) + + if isinstance(v, ET.Element): + print('Replacing ET text with', v_str) + v.text = v_str + elif isinstance(yaml_dict[k], list): + yaml_dict[k][v_idx] = v_str + else: + # Put the full template line back together as it was, + # filled or not + yaml_dict[k] = v_str ########## @@ -457,12 +527,15 @@ def update_dict(dict_o, dict_t, provide_default=False): Returns: None """ - for k, v in dict_o.items(): + for k, v in dict_o.copy().items(): if isinstance(v, dict): if isinstance(dict_t.get(k), dict): update_dict(v, dict_t[k], provide_default) else: dict_t[k] = v + elif v is None and k in dict_t.keys(): + # remove the key if the source dict has null entry + del dict_t[k] elif k in dict_t.keys(): if ( (not provide_default) @@ -471,11 +544,13 @@ def update_dict(dict_o, dict_t, provide_default=False): or ("{{" in dict_t[k]) ): dict_t[k] = v + elif k not in dict_t.keys(): + dict_t[k] = v def check_structure_dict(dict_o, dict_t): """Check if a dictionary's structure follows a template. - The invalid entries are returned as a list of lists. + The invalid entries are returned as a dictionary. If all entries are valid, returns an empty dictionary Args: diff --git a/ush/python_utils/environment.py b/ush/python_utils/environment.py index da50085933..711934c4ec 100644 --- a/ush/python_utils/environment.py +++ b/ush/python_utils/environment.py @@ -64,6 +64,8 @@ def str_to_type(s, return_string=0): return True if s.lower() in ["false", "no", "nope"]: return False + if s in ["None", "null"]: + return None v = str_to_date(s) if v is not None: if return_string == 2: diff --git a/ush/set_vx_fhr_list.sh b/ush/set_vx_fhr_list.sh index 3b86cf1022..d3403d09f2 100644 --- a/ush/set_vx_fhr_list.sh +++ b/ush/set_vx_fhr_list.sh @@ -256,7 +256,7 @@ Final (i.e. after filtering for missing files) set of foreast hours is if [ "${num_missing_files}" -gt "${NUM_MISSING_OBS_FILES_MAX}" ]; then print_err_msg_exit "\ The number of missing files (num_missing_files) is greater than the -maximum allowed number (NUM_MISSING_OBS_MAS): +maximum allowed number (NUM_MISSING_OBS_FILES_MAX): num_missing_files = ${num_missing_files} NUM_MISSING_OBS_FILES_MAX = ${NUM_MISSING_OBS_FILES_MAX}" fi diff --git a/ush/setup.py b/ush/setup.py index c6ea815d9d..b133612bef 100644 --- a/ush/setup.py +++ b/ush/setup.py @@ -1,5 +1,7 @@ #!/usr/bin/env python3 +import copy +import json import os import sys import datetime @@ -7,6 +9,8 @@ import logging from textwrap import dedent +import yaml + from python_utils import ( log_info, cd_vrfy, @@ -86,6 +90,14 @@ def load_config_for_setup(ushdir, default_config, user_config): # Make sure the keys in user config match those in the default # config. invalid = check_structure_dict(cfg_u, cfg_d) + + # Task and metatask entries can be added arbitrarily under the + # rocoto section. Remove those from invalid if they exist + for key in invalid.copy().keys(): + if key.split("_", maxsplit=1)[0] in ["task", "metatask"]: + invalid.pop(key) + logging.info(f"Found and allowing key {key}") + if invalid: errmsg = f"Invalid key(s) specified in {user_config}:\n" for entry in invalid: @@ -130,6 +142,54 @@ def load_config_for_setup(ushdir, default_config, user_config): # Load the constants file cfg_c = load_config_file(os.path.join(ushdir, "constants.yaml")) + + # Load the rocoto workflow default file + cfg_wflow = load_config_file(os.path.join(ushdir, os.pardir, "parm", + "wflow", "default_workflow.yaml")) + + # Takes care of removing any potential "null" entries, i.e., + # unsetting a default value from an anchored default_task + update_dict(cfg_wflow, cfg_wflow) + + + # Take any user-specified taskgroups entry here. + taskgroups = cfg_u.get('rocoto', {}).get('tasks', {}).get('taskgroups') + if taskgroups: + cfg_wflow['rocoto']['tasks']['taskgroups'] = taskgroups + + # Extend yaml here on just the rocoto section to include the + # appropriate groups of tasks + extend_yaml(cfg_wflow) + + + # Put the entries expanded under taskgroups in tasks + rocoto_tasks = cfg_wflow["rocoto"]["tasks"] + cfg_wflow["rocoto"]["tasks"] = yaml.load(rocoto_tasks.pop("taskgroups"),Loader=yaml.SafeLoader) + + # Update wflow config from user one more time to make sure any of + # the "null" settings are removed, i.e., tasks turned off. + update_dict(cfg_u.get('rocoto', {}), cfg_wflow["rocoto"]) + + def add_jobname(tasks): + """ Add the jobname entry for all the tasks in the workflow """ + + if not isinstance(tasks, dict): + return + for task, task_settings in tasks.items(): + task_type = task.split("_", maxsplit=1)[0] + if task_type == "task": + # Use the provided attribute if it is present, otherwise use + # the name in the key + tasks[task]["jobname"] = \ + task_settings.get("attrs", {}).get("name") or \ + task.split("_", maxsplit=1)[1] + elif task_type == "metatask": + add_jobname(task_settings) + + + # Add jobname entry to each remaining task + add_jobname(cfg_wflow["rocoto"]["tasks"]) + # Update default config with the constants, the machine config, and # then the user_config # Recall: update_dict updates the second dictionary with the first, @@ -139,6 +199,9 @@ def load_config_for_setup(ushdir, default_config, user_config): # Constants update_dict(cfg_c, cfg_d) + # Default workflow settings + update_dict(cfg_wflow, cfg_d) + # Machine settings update_dict(machine_cfg, cfg_d) @@ -148,6 +211,10 @@ def load_config_for_setup(ushdir, default_config, user_config): # User settings (take precedence over all others) update_dict(cfg_u, cfg_d) + # Update the cfg_d against itself now, to remove any "null" + # stranglers. + update_dict(cfg_d, cfg_d) + # Set "Home" directory, the top-level ufs-srweather-app directory homedir = os.path.abspath(os.path.dirname(__file__) + os.sep + os.pardir) cfg_d["user"]["HOMEdir"] = homedir @@ -327,10 +394,7 @@ def setup(USHdir, user_config_fn="config.yaml", debug: bool = False): # Workflow workflow_config = expt_config["workflow"] - # Generate a unique number for this workflow run. This may be used to - # get unique log file names for example - workflow_id = "id_" + str(int(datetime.datetime.now().timestamp())) - workflow_config["WORKFLOW_ID"] = workflow_id + workflow_id = workflow_config["WORKFLOW_ID"] log_info(f"""WORKFLOW ID = {workflow_id}""") debug = workflow_config.get("DEBUG") @@ -419,23 +483,24 @@ def setup(USHdir, user_config_fn="config.yaml", debug: bool = False): # ----------------------------------------------------------------------- # - workflow_switches = expt_config["workflow_switches"] - run_task_make_grid = workflow_switches['RUN_TASK_MAKE_GRID'] - run_task_make_orog = workflow_switches['RUN_TASK_MAKE_OROG'] - run_task_make_sfc_climo = workflow_switches['RUN_TASK_MAKE_SFC_CLIMO'] + rocoto_config = expt_config.get('rocoto', {}) + rocoto_tasks = rocoto_config.get("tasks") + run_make_grid = rocoto_tasks.get('task_make_grid') is not None + run_make_orog = rocoto_tasks.get('task_make_orog') is not None + run_make_sfc_climo = rocoto_tasks.get('task_make_sfc_climo') is not None # Necessary tasks are turned on pregen_basedir = expt_config["platform"].get("DOMAIN_PREGEN_BASEDIR") if pregen_basedir is None and not ( - run_task_make_grid and run_task_make_orog and run_task_make_sfc_climo + run_make_grid and run_make_orog and run_make_sfc_climo ): raise Exception( f""" DOMAIN_PREGEN_BASEDIR must be set when any of the following - tasks are turned off: - RUN_TASK_MAKE_GRID = {run_task_make_grid} - RUN_TASK_MAKE_OROG = {run_task_make_orog} - RUN_TASK_MAKE_SFC_CLIMO = {run_task_make_sfc_climo}""" + tasks are not included in the workflow: + RUN_MAKE_GRID = {run_make_grid} + RUN_MAKE_OROG = {run_make_orog} + RUN_MAKE_SFC_CLIMO = {run_make_sfc_climo}""" ) # A batch system account is specified @@ -449,6 +514,34 @@ def setup(USHdir, user_config_fn="config.yaml", debug: bool = False): ) ) + def remove_tag(tasks, tag): + """ Remove the tag for all the tasks in the workflow """ + + if not isinstance(tasks, dict): + return + for task, task_settings in tasks.items(): + task_type = task.split("_", maxsplit=1)[0] + if task_type == "task": + task_settings.pop(tag, None) + elif task_type == "metatask": + remove_tag(task_settings, tag) + + # Remove all memory tags for platforms that do not support them + remove_memory = expt_config["platform"].get("REMOVE_MEMORY") + if remove_memory: + remove_tag(rocoto_tasks, "memory") + + for part in ['PARTITION_HPSS', 'PARTITION_DEFAULT', 'PARTITION_FCST']: + partition = expt_config["platform"].get(part) + if not partition: + remove_tag(rocoto_tasks, 'partition') + + # When not running subhourly post, remove those tasks, if they exist + if not expt_config.get("task_run_post", {}).get("SUB_HOURLY_POST"): + post_meta = rocoto_tasks.get("metatask_run_ens_post", {}) + post_meta.pop("metatask_run_sub_hourly_post", None) + post_meta.pop("metatask_sub_hourly_last_hour_post", None) + # # ----------------------------------------------------------------------- # @@ -581,41 +674,59 @@ def get_location(xcs, fmt, expt_cfg): run_envir = expt_config["user"].get("RUN_ENVIR", "") # set varying forecast lengths only when fcst_len_hrs=-1 + fcst_len_hrs = workflow_config.get("FCST_LEN_HRS") if fcst_len_hrs == -1: - # Create a full list of cycle dates + + # Check that the number of entries divides into a day fcst_len_cycl = workflow_config.get("FCST_LEN_CYCL") - num_fcst_len_cycl = len(fcst_len_cycl) + incr_cycl_freq = int(workflow_config.get("INCR_CYCL_FREQ")) + date_first_cycl = workflow_config.get("DATE_FIRST_CYCL") date_last_cycl = workflow_config.get("DATE_LAST_CYCL") - incr_cycl_freq = workflow_config.get("INCR_CYCL_FREQ") - all_cdates = set_cycle_dates(date_first_cycl,date_last_cycl,incr_cycl_freq) - num_all_cdates = len(all_cdates) - # Create a full list of forecast hours - num_recur = num_all_cdates // num_fcst_len_cycl - rem_recur = num_all_cdates % num_fcst_len_cycl - if rem_recur == 0: - fcst_len_cycl = fcst_len_cycl * num_recur - num_fcst_len_cycl = len(fcst_len_cycl) - workflow_config.update({"FCST_LEN_CYCL_ALL": fcst_len_cycl}) - workflow_config.update({"ALL_CDATES": all_cdates}) - else: - raise Exception( - f""" - The number of the cycle dates is not evenly divisible by the - number of the forecast lengths: - num_all_cdates = {num_all_cdates} - num_fcst_len_cycl = {num_fcst_len_cycl} - rem = num_all_cdates%%num_fcst_len_cycl = {rem_recur}""" - ) - if num_fcst_len_cycl != num_all_cdates: - raise Exception( - f""" - The number of the cycle dates does not match with the number of - the forecast lengths: - num_all_cdates = {num_all_cdates} - num_fcst_len_cycl = {num_fcst_len_cycl}""" - ) + + if 24 / incr_cycl_freq != len(fcst_len_cycl): + + # Also allow for the possibility that the user is running + # cycles for less than a day: + num_cycles = len(set_cycle_dates( + date_first_cycl, + date_last_cycl, + incr_cycl_freq)) + + if num_cycles != len(fcst_len_cycl): + logger.error(f""" The number of entries in FCST_LEN_CYCL does + not divide evenly into a 24 hour day or the number of cycles + in your experiment! + FCST_LEN_CYCL = {fcst_len_cycl} + """ + ) + raise ValueError + + # Build cycledefs entries for the long forecasts + # Short forecast cycles will be relevant to all intended + # forecasts...after all, a 12 hour forecast also encompasses a 3 + # hour forecast, so the short ones will be consistent with the + # existing default forecast cycledef + + # Reset the hours to the short forecast length + workflow_config["FCST_LEN_HRS"] = min(fcst_len_cycl) + + # Find the entries that match the long forecast, and map them to + # their time of day. + long_fcst_len = max(fcst_len_cycl) + long_indices = [i for i,x in enumerate(fcst_len_cycl) if x == long_fcst_len] + long_cycles = [i * incr_cycl_freq for i in long_indices] + + # add one forecast entry per cycle per day + fcst_cdef = [] + + for hh in long_cycles: + first = date_first_cycl.replace(hour=hh).strftime("%Y%m%d%H") + last = date_last_cycl.replace(hour=hh).strftime("%Y%m%d%H") + fcst_cdef.append(f'{first}00 {last}00 24:00:00') + + rocoto_config['cycledefs']['long_forecast'] = fcst_cdef # # ----------------------------------------------------------------------- @@ -977,7 +1088,8 @@ def get_location(xcs, fmt, expt_cfg): for nco_var in nco_vars: nco_config[nco_var.upper()] = exptdir - nco_config["LOGBASEDIR"] = os.path.join(exptdir, "log") + # Set the rocoto string for the fcst output location + rocoto_config["entities"]["FCST_DIR"] = "{{ nco.COMOUT_BASEDIR }}/@Y@m@d@H" # Use env variables for NCO variables and create NCO directories if run_envir == "nco": @@ -992,11 +1104,18 @@ def get_location(xcs, fmt, expt_cfg): mkdir_vrfy(f' -p "{nco_config.get("PACKAGEROOT")}"') mkdir_vrfy(f' -p "{nco_config.get("DATAROOT")}"') mkdir_vrfy(f' -p "{nco_config.get("DCOMROOT")}"') - mkdir_vrfy(f' -p "{nco_config.get("LOGBASEDIR")}"') mkdir_vrfy(f' -p "{nco_config.get("EXTROOT")}"') + + # Update the rocoto string for the fcst output location if + # running an ensemble in nco mode + if global_sect["DO_ENSEMBLE"]: + rocoto_config["entities"]["FCST_DIR"] = \ + "{{ nco.DATAROOT }}/run_fcst_mem#mem#.{{ workflow.WORKFLOW_ID }}_@Y@m@d@H" + if nco_config["DBNROOT"]: mkdir_vrfy(f' -p "{nco_config["DBNROOT"]}"') + mkdir_vrfy(f' -p "{nco_config.get("LOGBASEDIR")}"') # create experiment dir mkdir_vrfy(f' -p "{exptdir}"') @@ -1103,17 +1222,23 @@ def get_location(xcs, fmt, expt_cfg): # ----------------------------------------------------------------------- # + task_defs = rocoto_config.get('tasks') + # Ensemble verification can only be run in ensemble mode do_ensemble = global_sect["DO_ENSEMBLE"] - run_task_vx_ensgrid = workflow_switches["RUN_TASK_VX_ENSGRID"] - run_task_vx_enspoint = workflow_switches["RUN_TASK_VX_ENSPOINT"] - if (not do_ensemble) and (run_task_vx_ensgrid or run_task_vx_enspoint): + + + # Gather all the tasks/metatasks that are defined for verifying + # ensembles + ens_vx_tasks = [task for task in task_defs if "MET_GridStat_vx_ens" in task] + if (not do_ensemble) and ens_vx_tasks: + task_str = "\n".join(ens_vx_tasks) raise Exception( f''' Ensemble verification can not be run unless running in ensemble mode: DO_ENSEMBLE = \"{do_ensemble}\" - RUN_TASK_VX_ENSGRID = \"{run_task_vx_ensgrid}\" - RUN_TASK_VX_ENSPOINT = \"{run_task_vx_enspoint}\"''' + Ensemble verification tasks: {task_str} + ''' ) # @@ -1129,35 +1254,48 @@ def get_location(xcs, fmt, expt_cfg): mkdir_vrfy(f' -p "{fixlam}"') # - # Use the pregenerated domain files if the RUN_TASK_MAKE* tasks are - # turned off. Link the files, and check that they all contain the - # same resolution input. + # Use the pregenerated domain files if the tasks to generate them + # are turned off. Link the files, and check that they all contain + # the same resolution input. # - run_task_make_ics = workflow_switches['RUN_TASK_MAKE_LBCS'] - run_task_make_lbcs = workflow_switches['RUN_TASK_MAKE_ICS'] - run_task_run_fcst = workflow_switches['RUN_TASK_RUN_FCST'] - run_task_makeics_or_makelbcs_or_runfcst = run_task_make_ics or \ - run_task_make_lbcs or \ - run_task_run_fcst + + def dict_find(user_dict, substring): + + if not isinstance(user_dict, dict): + return + + for key, value in user_dict.items(): + if substring in key: + return True + if isinstance(value, dict): + dict_find(value, substring) + return False + + run_make_ics = dict_find(rocoto_tasks, "task_make_ics") + run_make_lbcs = dict_find(rocoto_tasks, "task_make_ics") + run_run_fcst = dict_find(rocoto_tasks, "task_run_fcst") + run_any_coldstart_task = run_make_ics or \ + run_make_lbcs or \ + run_run_fcst # Flags for creating symlinks to pre-generated grid, orography, and sfc_climo files. # These consider dependencies of other tasks on each pre-processing task. create_symlinks_to_pregen_files = { - "GRID": (not workflow_switches['RUN_TASK_MAKE_GRID']) and \ - (run_task_make_orog or run_task_make_sfc_climo or run_task_makeics_or_makelbcs_or_runfcst), - "OROG": (not workflow_switches['RUN_TASK_MAKE_OROG']) and \ - (run_task_make_sfc_climo or run_task_makeics_or_makelbcs_or_runfcst), - "SFC_CLIMO": (not workflow_switches['RUN_TASK_MAKE_SFC_CLIMO']) and \ - (run_task_make_ics or run_task_make_lbcs), + "GRID": (not run_make_grid) and \ + (run_make_orog or run_make_sfc_climo or run_any_coldstart_task), + "OROG": (not run_make_orog) and \ + (run_make_sfc_climo or run_any_coldstart_task), + "SFC_CLIMO": (not run_make_sfc_climo) and \ + (run_make_ics or run_make_lbcs), } prep_tasks = ["GRID", "OROG", "SFC_CLIMO"] res_in_fixlam_filenames = None for prep_task in prep_tasks: res_in_fns = "" + sect_key = f"task_make_{prep_task.lower()}" # If the user doesn't want to run the given task, link the fix # file from the staged files. - if create_symlinks_to_pregen_files[prep_task]: - sect_key = f"task_make_{prep_task.lower()}" + if not task_defs.get(sect_key): dir_key = f"{prep_task}_DIR" task_dir = expt_config[sect_key].get(dir_key) @@ -1233,16 +1371,18 @@ def get_location(xcs, fmt, expt_cfg): # if fcst_config["WRITE_DOPOST"]: # Turn off run_post - if workflow_switches["RUN_TASK_RUN_POST"]: + task_name = 'metatask_run_ens_post' + removed_task = task_defs.pop(task_name, None) + if removed_task: logger.warning( dedent( f""" - Inline post is turned on, deactivating post-processing tasks: - RUN_TASK_RUN_POST = False - """ + Inline post is turned on, deactivating post-processing tasks: + Removing {task_name} from task definitions + list. + """ ) ) - workflow_switches["RUN_TASK_RUN_POST"] = False # Check if SUB_HOURLY_POST is on if expt_config["task_run_post"]["SUB_HOURLY_POST"]: @@ -1345,8 +1485,20 @@ def get_location(xcs, fmt, expt_cfg): configuration file ('{user_config_fn}').""" ) + # Final failsafe before writing rocoto yaml to ensure we don't have any invalid dicts + # (e.g. metatasks with no tasks, tasks with no associated commands) + clean_rocoto_dict(expt_config["rocoto"]["tasks"]) + + rocoto_yaml_fp = workflow_config["ROCOTO_YAML_FP"] + with open(rocoto_yaml_fp, 'w') as f: + yaml.Dumper.ignore_aliases = lambda *args : True + yaml.dump(expt_config.get("rocoto"), f, sort_keys=False) + + var_defns_cfg = copy.deepcopy(expt_config) + del var_defns_cfg["rocoto"] with open(global_var_defns_fp, "a") as f: - f.write(cfg_to_shell_str(expt_config)) + f.write(cfg_to_shell_str(var_defns_cfg)) + # # ----------------------------------------------------------------------- @@ -1372,6 +1524,40 @@ def get_location(xcs, fmt, expt_cfg): return expt_config +def clean_rocoto_dict(rocotodict): + """Removes any invalid entries from rocoto_dict. Examples of invalid entries are: + + 1. A task dictionary containing no "command" key + 2. A metatask dictionary containing no task dictionaries""" + + # Loop 1: search for tasks with no command key, iterating over metatasks + for key in list(rocotodict.keys()): + if key.split("_", maxsplit=1)[0] == "metatask": + clean_rocoto_dict(rocotodict[key]) + elif key.split("_", maxsplit=1)[0] in ["task"]: + if not rocotodict[key].get("command"): + popped = rocotodict.pop(key) + logging.warning(f"Invalid task {key} removed due to empty/unset run command") + logging.debug(f"Removed entry:\n{popped}") + + # Loop 2: search for metatasks with no tasks in them + for key in list(rocotodict.keys()): + if key.split("_", maxsplit=1)[0] == "metatask": + valid = False + for key2 in list(rocotodict[key].keys()): + if key2.split("_", maxsplit=1)[0] == "metatask": + clean_rocoto_dict(rocotodict[key][key2]) + #After above recursion, any nested empty metatasks will have popped themselves + if rocotodict[key].get(key2): + valid = True + elif key2.split("_", maxsplit=1)[0] == "task": + valid = True + if not valid: + popped = rocotodict.pop(key) + logging.warning(f"Invalid/empty metatask {key} removed") + logging.debug(f"Removed entry:\n{popped}") + + # # ----------------------------------------------------------------------- diff --git a/ush/valid_param_vals.yaml b/ush/valid_param_vals.yaml index 237031e3d9..92cca29288 100644 --- a/ush/valid_param_vals.yaml +++ b/ush/valid_param_vals.yaml @@ -55,17 +55,7 @@ valid_vals_GRID_GEN_METHOD: ["GFDLgrid", "ESGgrid"] valid_vals_PREEXISTING_DIR_METHOD: ["delete", "rename", "quit"] valid_vals_GTYPE: ["regional"] valid_vals_WRTCMP_output_grid: ["rotated_latlon", "lambert_conformal", "regional_latlon"] -valid_vals_RUN_TASK_MAKE_GRID: [True, False] -valid_vals_RUN_TASK_MAKE_OROG: [True, False] -valid_vals_RUN_TASK_MAKE_SFC_CLIMO: [True, False] -valid_vals_RUN_TASK_RUN_POST: [True, False] -valid_vals_RUN_TASK_RUN_PRDGEN: [True, False] -valid_vals_DO_PARALLEL_PRDGEN: [True, False] valid_vals_WRITE_DOPOST: [True, False] -valid_vals_RUN_TASK_VX_GRIDSTAT: [True, False] -valid_vals_RUN_TASK_VX_POINTSTAT: [True, False] -valid_vals_RUN_TASK_VX_ENSGRID: [True, False] -valid_vals_RUN_TASK_VX_ENSPOINT: [True, False] valid_vals_QUILTING: [True, False] valid_vals_PRINT_ESMF: [True, False] valid_vals_USE_CRON_TO_RELAUNCH: [True, False] @@ -88,17 +78,6 @@ valid_vals_SUB_HOURLY_POST: [True, False] valid_vals_DT_SUBHOURLY_POST_MNTS: [0, 1, 2, 3, 4, 5, 6, 10, 12, 15, 20, 30] valid_vals_USE_MERRA_CLIMO: [True, False] valid_vals_CPL_AQM: [True, False] -valid_vals_RUN_TASK_AQM_ICS: [True, False] -valid_vals_RUN_TASK_AQM_LBCS: [True, False] -valid_vals_RUN_TASK_NEXUS_GFS_SFC: [True, False] -valid_vals_RUN_TASK_NEXUS_EMISSION: [True, False] -valid_vals_RUN_TASK_FIRE_EMISSION: [True, False] -valid_vals_RUN_TASK_POINT_SOURCE: [True, False] -valid_vals_RUN_TASK_PRE_POST_STAT: [True, False] -valid_vals_RUN_TASK_POST_STAT_O3: [True, False] -valid_vals_RUN_TASK_POST_STAT_PM25: [True, False] -valid_vals_RUN_TASK_BIAS_CORRECTION_O3: [True, False] -valid_vals_RUN_TASK_BIAS_CORRECTION_PM25: [True, False] valid_vals_DO_AQM_DUST: [True, False] valid_vals_DO_AQM_CANOPY: [True, False] valid_vals_DO_AQM_PRODUCT: [True, False]