From 1678a2f60cf6c47332dadb8052db9fedec16ee6c Mon Sep 17 00:00:00 2001 From: Rahul Mahajan Date: Thu, 12 Sep 2024 15:28:31 -0400 Subject: [PATCH 1/2] disable native grid model output when not doing JEDI-atm DA. Update model output options in the write grid comp --- parm/config/gefs/config.ufs | 39 ++++++++++++------------------ parm/config/gfs/config.ufs | 31 +++++++++--------------- ush/parsing_model_configure_FV3.sh | 6 ++++- 3 files changed, 33 insertions(+), 43 deletions(-) diff --git a/parm/config/gefs/config.ufs b/parm/config/gefs/config.ufs index bfc11e3c5a..bc3950490e 100644 --- a/parm/config/gefs/config.ufs +++ b/parm/config/gefs/config.ufs @@ -80,8 +80,8 @@ case "${fv3_res}" in export nthreads_fv3_gfs=1 export nthreads_ufs=1 export nthreads_ufs_gfs=1 - export xr_cnvcld=.false. # Do not pass conv. clouds to Xu-Randall cloud fraction - export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export xr_cnvcld=.false. # Do not pass conv. clouds to Xu-Randall cloud fraction + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export cdmbgwd_gsl="40.0,1.77,1.0,1.0" # settings for GSL drag suite export k_split=1 export n_split=4 @@ -104,8 +104,8 @@ case "${fv3_res}" in export nthreads_fv3_gfs=1 export nthreads_ufs=1 export nthreads_ufs_gfs=1 - export xr_cnvcld=".false." # Do not pass conv. clouds to Xu-Randall cloud fraction - export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export xr_cnvcld=".false." # Do not pass conv. clouds to Xu-Randall cloud fraction + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export cdmbgwd_gsl="20.0,2.5,1.0,1.0" # settings for GSL drag suite export knob_ugwp_tauamp=3.0e-3 # setting for UGWPv1 non-stationary GWD export k_split=1 @@ -254,40 +254,33 @@ export ntasks_fv3_gfs export ntasks_quilt export ntasks_quilt_gfs -# Determine whether to use compression in the write grid component based on resolution +# Determine whether to use compression in the write grid component +# and whether to use parallel NetCDF based on resolution case ${fv3_res} in - "C48" | "C96" | "C192" | "C384") + "C48" | "C96" | "C192") zstandard_level=0 ideflate=0 quantize_nsd=0 + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" ;; - "C768" | "C1152" | "C3072") + "C384" | "C768" | "C1152" | "C3072") zstandard_level=0 ideflate=1 quantize_nsd=5 - ;; - *) - echo "FATAL ERROR: Unrecognized FV3 resolution ${fv3_res}" - exit 15 - ;; -esac -export zstandard_level ideflate quantize_nsd - -# Determine whether to use parallel NetCDF based on resolution -case ${fv3_res} in - "C48" | "C96" | "C192" | "C384") - OUTPUT_FILETYPE_ATM="netcdf" - OUTPUT_FILETYPE_SFC="netcdf" - ;; - "C768" | "C1152" | "C3072") OUTPUT_FILETYPE_ATM="netcdf_parallel" - OUTPUT_FILETYPE_SFC="netcdf_parallel" + if [[ "${fv3_res}" == "C384" ]]; then + OUTPUT_FILETYPE_SFC="netcdf" # For C384, the write grid component is better off with serial netcdf + else + OUTPUT_FILETYPE_SFC="netcdf_parallel" + fi ;; *) echo "FATAL ERROR: Unrecognized FV3 resolution ${fv3_res}" exit 15 ;; esac +export zstandard_level ideflate quantize_nsd export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC # cpl defaults diff --git a/parm/config/gfs/config.ufs b/parm/config/gfs/config.ufs index b27845aec7..babbe1f2dd 100644 --- a/parm/config/gfs/config.ufs +++ b/parm/config/gfs/config.ufs @@ -356,40 +356,33 @@ export ntasks_fv3_gfs export ntasks_quilt_gdas export ntasks_quilt_gfs -# Determine whether to use compression in the write grid component based on resolution +# Determine whether to use compression in the write grid component +# and whether to use parallel NetCDF based on resolution case ${fv3_res} in - "C48" | "C96" | "C192" | "C384") + "C48" | "C96" | "C192") zstandard_level=0 ideflate=0 quantize_nsd=0 + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" ;; - "C768" | "C1152" | "C3072") + "C384" | "C768" | "C1152" | "C3072") zstandard_level=0 ideflate=1 quantize_nsd=5 - ;; - *) - echo "FATAL ERROR: Unrecognized FV3 resolution ${fv3_res}" - exit 15 - ;; -esac -export zstandard_level ideflate quantize_nsd - -# Determine whether to use parallel NetCDF based on resolution -case ${fv3_res} in - "C48" | "C96" | "C192" | "C384") - OUTPUT_FILETYPE_ATM="netcdf" - OUTPUT_FILETYPE_SFC="netcdf" - ;; - "C768" | "C1152" | "C3072") OUTPUT_FILETYPE_ATM="netcdf_parallel" - OUTPUT_FILETYPE_SFC="netcdf_parallel" + if [[ "${fv3_res}" == "C384" ]]; then + OUTPUT_FILETYPE_SFC="netcdf" # For C384, the write grid component is better off with serial netcdf + else + OUTPUT_FILETYPE_SFC="netcdf_parallel" + fi ;; *) echo "FATAL ERROR: Unrecognized FV3 resolution ${fv3_res}" exit 15 ;; esac +export zstandard_level ideflate quantize_nsd export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC # cpl defaults diff --git a/ush/parsing_model_configure_FV3.sh b/ush/parsing_model_configure_FV3.sh index 8f102fe298..7cf7bf8662 100755 --- a/ush/parsing_model_configure_FV3.sh +++ b/ush/parsing_model_configure_FV3.sh @@ -31,7 +31,11 @@ local WRITE_GROUP=${WRITE_GROUP:-1} local WRTTASK_PER_GROUP=${WRTTASK_PER_GROUP:-24} local ITASKS=1 local OUTPUT_HISTORY=${OUTPUT_HISTORY:-".true."} -local HISTORY_FILE_ON_NATIVE_GRID=".true." +if [[ "${DO_JEDIATMVAR:-}" == "YES" ]]; then + local HISTORY_FILE_ON_NATIVE_GRID=".true." +else + local HISTORY_FILE_ON_NATIVE_GRID=".false." +fi local WRITE_DOPOST=${WRITE_DOPOST:-".false."} local WRITE_NSFLIP=${WRITE_NSFLIP:-".false."} local NUM_FILES=${NUM_FILES:-2} From 6bd01783800a1b50cd8cf220a69180bd9faab86f Mon Sep 17 00:00:00 2001 From: Rahul Mahajan Date: Fri, 13 Sep 2024 09:28:21 -0400 Subject: [PATCH 2/2] only link cubed-sphere output files from the model when DO_JEDIATMVAR=YES --- ush/forecast_postdet.sh | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/ush/forecast_postdet.sh b/ush/forecast_postdet.sh index d13cb0df0c..58755d41d9 100755 --- a/ush/forecast_postdet.sh +++ b/ush/forecast_postdet.sh @@ -233,14 +233,14 @@ EOF ${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.atmf${FH3}.nc" "atmf${f_hhmmss}.nc" ${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.sfcf${FH3}.nc" "sfcf${f_hhmmss}.nc" ${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.atm.logf${FH3}.txt" "log.atm.f${f_hhmmss}" - ${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.cubed_sphere_grid_atmf${FH3}.nc" "cubed_sphere_grid_atmf${f_hhmmss}.nc" - ${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.cubed_sphere_grid_sfcf${FH3}.nc" "cubed_sphere_grid_sfcf${f_hhmmss}.nc" else ${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.atmf${FH3}.nc" "atmf${FH3}.nc" ${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.sfcf${FH3}.nc" "sfcf${FH3}.nc" ${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.atm.logf${FH3}.txt" "log.atm.f${FH3}" - ${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.cubed_sphere_grid_atmf${FH3}.nc" "cubed_sphere_grid_atmf${FH3}.nc" - ${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.cubed_sphere_grid_sfcf${FH3}.nc" "cubed_sphere_grid_sfcf${FH3}.nc" + if [[ "${DO_JEDIATMVAR:-}" == "YES" ]]; then + ${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.cubed_sphere_grid_atmf${FH3}.nc" "cubed_sphere_grid_atmf${FH3}.nc" + ${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.cubed_sphere_grid_sfcf${FH3}.nc" "cubed_sphere_grid_sfcf${FH3}.nc" + fi fi if [[ "${WRITE_DOPOST}" == ".true." ]]; then ${NLN} "${COMOUT_ATMOS_MASTER}/${RUN}.t${cyc}z.master.grb2f${FH3}" "GFSPRS.GrbF${FH2}"