Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

add ufs-datm-lnd RT baseline check in ctest #11

Merged
merged 3 commits into from
May 4, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions test/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -52,3 +52,8 @@ set_tests_properties(test_land_driver
DEPENDS "test_tile2vector"
ENVIRONMENT "LANDDA_INPUTS=$ENV{LANDDA_INPUTS};
TOL=$ENV{TOL}")

# test for ufs-datm-lnd model
add_test(NAME test_ufs_datm_land
COMMAND ${PROJECT_SOURCE_DIR}/test/run_ufs_datm_lnd.sh ${PROJECT_BINARY_DIR} ${PROJECT_SOURCE_DIR}
WORKING_DIRECTORY ${PROJECT_BINARY_DIR}/test)
2 changes: 1 addition & 1 deletion test/compare.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,4 +16,4 @@
sys.exit(2)
# If dimension is the same, compare data
else:
np.testing.assert_allclose(nc1[varname][:], nc2[varname][:], rtol=1e-5, atol=float(sys.argv[3]))
np.testing.assert_allclose(nc1[varname][:], nc2[varname][:], rtol=1e-8, atol=float(sys.argv[3]))
79 changes: 79 additions & 0 deletions test/retrieve_data.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
#!/bin/bash
set -e
################################################
# pass arguments
project_source_dir=$1

# first retrieve test data for landda system
cd ${project_source_dir}/../
[[ ! -f landda-test-inps.tar.gz ]] && wget https://epic-sandbox-srw.s3.amazonaws.com/landda-test-inps.tar.gz
[[ ! -d inputs ]] && tar xvfz landda-test-inps.tar.gz
cd ${project_source_dir}

# Then retrieve data for ufs-datm-lnd model test (RT: datm_cdeps_lnd_gswp3)

# First load modules
PATHRT=${project_source_dir}/ufs-weather-model/tests
RT_COMPILER=${RT_COMPILER:-intel}

# install aws
# users have to load modules before running the script
pip3 install awscli --upgrade --user
export PATH=${HOME}/.local/bin:${PATH}

# set envs
DATA_ROOT=${project_source_dir}/../inputs
INPUTDATA_ROOT=${DATA_ROOT}/NEMSfv3gfs
BL_DATE=20230413
INPUTDATA_DATE=20221101
[[ ! -d ${INPUTDATA_ROOT}/develop-${BL_DATE}/${RT_COMPILER^^} ]] && mkdir -p ${INPUTDATA_ROOT}/develop-${BL_DATE}/${RT_COMPILER^^}
[[ ! -d ${INPUTDATA_ROOT}/input-data-${INPUTDATA_DATE} ]] && mkdir -p ${INPUTDATA_ROOT}/input-data-${INPUTDATA_DATE}
RTPWD=${INPUTDATA_ROOT}/develop-${BL_DATE}/${RT_COMPILER^^}
INPUTDATA_ROOT=${INPUTDATA_ROOT}/input-data-${INPUTDATA_DATE}
AWS_URL=s3://noaa-ufs-regtests-pds
SRC_DIR=${AWS_URL}/input-data-${INPUTDATA_DATE}

# baseline data
DES_DIR=${RTPWD}/datm_cdeps_lnd_gswp3
[[ ! -d ${DES_DIR} ]] && mkdir -p ${DES_DIR}
echo ${DES_DIR}
cd $DES_DIR
aws s3 sync --no-sign-request ${AWS_URL}/develop-${BL_DATE}/${RT_COMPILER^^}/datm_cdeps_lnd_gswp3 .
cd ${project_source_dir}

# DATM data
DES_DIR=${INPUTDATA_ROOT}/DATM_GSWP3_input_data
[[ ! -d ${DES_DIR} ]] && mkdir -p ${DES_DIR}
echo ${DES_DIR}
cd $DES_DIR
aws s3 sync --no-sign-request ${SRC_DIR}/DATM_GSWP3_input_data .
cd ${project_source_dir}

# fixed data
DES_DIR=${INPUTDATA_ROOT}/FV3_fix_tiled/C96
[[ ! -d ${DES_DIR} ]] && mkdir -p ${DES_DIR}
echo ${DES_DIR}
cd $DES_DIR
aws s3 sync --no-sign-request ${SRC_DIR}/FV3_fix_tiled/C96 .

# input data
DES_DIR=${INPUTDATA_ROOT}/FV3_input_data/INPUT
[[ ! -d ${DES_DIR} ]] && mkdir -p ${DES_DIR}
echo ${DES_DIR}
cd $DES_DIR
aws s3 cp --no-sign-request ${SRC_DIR}/FV3_input_data/INPUT/C96_grid.tile1.nc .
aws s3 cp --no-sign-request ${SRC_DIR}/FV3_input_data/INPUT/C96_grid.tile2.nc .
aws s3 cp --no-sign-request ${SRC_DIR}/FV3_input_data/INPUT/C96_grid.tile3.nc .
aws s3 cp --no-sign-request ${SRC_DIR}/FV3_input_data/INPUT/C96_grid.tile4.nc .
aws s3 cp --no-sign-request ${SRC_DIR}/FV3_input_data/INPUT/C96_grid.tile5.nc .
aws s3 cp --no-sign-request ${SRC_DIR}/FV3_input_data/INPUT/C96_grid.tile6.nc .
aws s3 cp --no-sign-request ${SRC_DIR}/FV3_input_data/INPUT/grid_spec.nc .
cd ${project_source_dir}

# NOAHMP ICs
DES_DIR=${INPUTDATA_ROOT}/NOAHMP_IC
[[ ! -d ${DES_DIR} ]] && mkdir -p ${DES_DIR}
echo ${DES_DIR}
cd $DES_DIR
aws s3 sync --no-sign-request ${SRC_DIR}/NOAHMP_IC .
cd ${project_source_dir}
107 changes: 107 additions & 0 deletions test/run_ufs_datm_lnd.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,107 @@
#!/bin/bash
set -e
################################################
# pass arguments
project_binary_dir=$1
project_source_dir=$2

#
echo ${project_binary_dir}
echo ${project_source_dir}

#
export MACHINE_ID=${MACHINE_ID:-linux}
TEST_NAME=datm_cdeps_lnd_gswp3
PATHRT=${project_source_dir}/ufs-weather-model/tests
RT_COMPILER=${RT_COMPILER:-intel}
ATOL="1e-8"
source ${PATHRT}/detect_machine.sh
source ${PATHRT}/rt_utils.sh
source ${PATHRT}/default_vars.sh
source ${PATHRT}/tests/$TEST_NAME
source ${PATHRT}/atparse.bash

# Set inputdata location for each machines
echo "MACHINE_ID: $MACHINE_ID"
if [[ $MACHINE_ID = orion.* ]]; then
DISKNM=/work/noaa/nems/emc.nemspara/RT
elif [[ $MACHINE_ID = hera.* ]]; then
DISKNM=/scratch1/NCEPDEV/nems/emc.nemspara/RT
else
echo "Warning: MACHINE_ID is default, users will have to define INPUTDATA_ROOT and RTPWD by themselives"
fi
BL_DATE=20230413
RTPWD=${RTPWD:-$DISKNM/NEMSfv3gfs/develop-${BL_DATE}/${RT_COMPILER^^}}
INPUTDATA_ROOT=${INPUTDATA_ROOT:-$DISKNM/NEMSfv3gfs/input-data-20221101}

if [[ ! -d ${INPUTDATA_ROOT} ]] || [[ ! -d ${RTPWD} ]]; then
echo "Error: cannot find either folder for INPUTDATA_ROOT or RTPWD, please check!"
exit 1
fi

# create test folder
RUNDIR=${project_binary_dir}/test/${TEST_NAME}
[[ -d ${RUNDIR} ]] && echo "Warning: remove old test folder!" && rm -rf ${RUNDIR}
mkdir -p ${RUNDIR}
cd ${RUNDIR}

# modify some env variables - reduce core usage
export ATM_compute_tasks=0
export ATM_io_tasks=1
export LND_tasks=6
export layout_x=1
export layout_y=1

# FV3 executable:
cp ${project_binary_dir}/ufs-weather-model/src/ufs-weather-model-build/ufs_model ./ufs_model

#set multiple input files
for i in ${FV3_RUN:-fv3_run.IN}
do
atparse < ${PATHRT}/fv3_conf/${i} >> fv3_run
done

if [[ $DATM_CDEPS = 'true' ]] || [[ $FV3 = 'true' ]] || [[ $S2S = 'true' ]]; then
if [[ $HAFS = 'false' ]] || [[ $FV3 = 'true' && $HAFS = 'true' ]]; then
atparse < ${PATHRT}/parm/${INPUT_NML:-input.nml.IN} > input.nml
fi
fi

atparse < ${PATHRT}/parm/${MODEL_CONFIGURE:-model_configure.IN} > model_configure

compute_petbounds_and_tasks

atparse < ${PATHRT}/parm/${NEMS_CONFIGURE:-nems.configure} > nems.configure

# diag table
if [[ "Q${DIAG_TABLE:-}" != Q ]] ; then
atparse < ${PATHRT}/parm/diag_table/${DIAG_TABLE} > diag_table
fi
# Field table
if [[ "Q${FIELD_TABLE:-}" != Q ]] ; then
cp ${PATHRT}/parm/field_table/${FIELD_TABLE} field_table
fi

# Field Dictionary
cp ${PATHRT}/parm/fd_nems.yaml fd_nems.yaml

# Set up the run directory
source ./fv3_run

if [[ $DATM_CDEPS = 'true' ]]; then
atparse < ${PATHRT}/parm/${DATM_IN_CONFIGURE:-datm_in} > datm_in
atparse < ${PATHRT}/parm/${DATM_STREAM_CONFIGURE:-datm.streams.IN} > datm.streams
fi

# start runs
echo "Start ufs-cdeps-land model run with TASKS: ${TASKS}"
mpiexec -n ${TASKS} ./ufs_model

#
echo "Now check model output with ufs-wm baseline!"
for filename in ${LIST_FILES}; do
if [[ -f ${RUNDIR}/${filename} ]] ; then
echo "Baseline check with ${RTPWD}/${TEST_NAME}/${filename}"
${project_source_dir}/test/compare.py ${RUNDIR}/${filename} ${RTPWD}/${TEST_NAME}/${filename} ${ATOL}
fi
done