Skip to content

Commit

Permalink
Remove wcoss_dell_p3 from workflow (#810)
Browse files Browse the repository at this point in the history
* remove wcoss_dell_p3

* remove block for tide and gyre
  • Loading branch information
chan-hoo authored Jul 19, 2022
1 parent 6bd798f commit ce3c9b4
Show file tree
Hide file tree
Showing 16 changed files with 16 additions and 246 deletions.
28 changes: 0 additions & 28 deletions modulefiles/module-setup.csh.inc
Original file line number Diff line number Diff line change
Expand Up @@ -13,34 +13,6 @@ else if ( { test -d /scratch3 } ) then
source /apps/lmod/lmod/init/$__ms_shell
endif
module purge
else if ( { test -d /gpfs/hps -a -e /etc/SuSE-release } ) then
# We are on NOAA Luna or Surge
if ( ! { module help >& /dev/null } ) then
source /opt/modules/default/init/$__ms_shell
endif
module purge
module purge
unset _LMFILES_
unset LOADEDMODULES
module use /opt/modulefiles
module use /opt/cray/ari/modulefiles
module use /opt/cray/craype/default/alt-modulefiles
module use /opt/cray/alt-modulefiles
module use /gpfs/hps/nco/ops/nwprod/modulefiles
module use /gpfs/hps/nco/ops/nwprod/lib/modulefiles
module use /usrx/local/prod/modulefiles
else if ( { test -d /dcom -a -d /hwrf } ) then
# We are on NOAA Tide or Gyre
if ( ! { module help >& /dev/null } ) then
source /usrx/local/Modules/default/init/$__ms_shell
endif
module purge
else if ( { test -L /usrx && sh -c "readlink /usrx 2> /dev/null | grep dell" } ) then
# We are on WCOSS Mars or Venus
if ( ! { module help >& /dev/null } ) then
source /usrx/local/prod/lmod/lmod/init/$__ms_shell
endif
module purge
else if ( { test -d /glade } ) then
# We are on NCAR Yellowstone
if ( ! { module help >& /dev/null } ) then
Expand Down
29 changes: 0 additions & 29 deletions modulefiles/module-setup.sh.inc
Original file line number Diff line number Diff line change
Expand Up @@ -28,35 +28,6 @@ elif [[ -d /scratch3 ]] ; then
source /apps/lmod/lmod/init/$__ms_shell
fi
module purge
elif [[ -d /gpfs/hps && -e /etc/SuSE-release ]] ; then
# We are on NOAA Luna or Surge
if ( ! eval module help > /dev/null 2>&1 ) ; then
source /opt/modules/default/init/$__ms_shell
fi
module purge
module purge
# Workaround until module issues are fixed:
unset _LMFILES_
unset LOADEDMODULES
module use /opt/modulefiles
module use /opt/cray/ari/modulefiles
module use /opt/cray/craype/default/alt-modulefiles
module use /opt/cray/alt-modulefiles
module use /gpfs/hps/nco/ops/nwprod/modulefiles
module use /gpfs/hps/nco/ops/nwprod/lib/modulefiles
module use /usrx/local/prod/modulefiles
elif [[ -d /dcom && -d /hwrf ]] ; then
# We are on NOAA Tide or Gyre
if ( ! eval module help > /dev/null 2>&1 ) ; then
source /usrx/local/Modules/default/init/$__ms_shell
fi
module purge
elif [[ -L /usrx && "$( readlink /usrx 2> /dev/null )" =~ dell ]] ; then
# We are on NOAA Mars or Venus
if ( ! eval module help > /dev/null 2>&1 ) ; then
source /usrx/local/prod/lmod/lmod/init/$__ms_shell
fi
module purge
elif [[ -d /lustre && -d /ncrc ]] ; then
# We are on GAEA.
if ( ! eval module help > /dev/null 2>&1 ) ; then
Expand Down
6 changes: 0 additions & 6 deletions modulefiles/tasks/wcoss_dell_p3/get_extrn_ics.local

This file was deleted.

6 changes: 0 additions & 6 deletions modulefiles/tasks/wcoss_dell_p3/get_extrn_lbcs.local

This file was deleted.

6 changes: 0 additions & 6 deletions modulefiles/tasks/wcoss_dell_p3/get_obs.local

This file was deleted.

7 changes: 0 additions & 7 deletions modulefiles/tasks/wcoss_dell_p3/make_grid.local

This file was deleted.

7 changes: 0 additions & 7 deletions modulefiles/tasks/wcoss_dell_p3/make_ics.local

This file was deleted.

7 changes: 0 additions & 7 deletions modulefiles/tasks/wcoss_dell_p3/make_lbcs.local

This file was deleted.

7 changes: 0 additions & 7 deletions modulefiles/tasks/wcoss_dell_p3/run_fcst.local

This file was deleted.

3 changes: 0 additions & 3 deletions modulefiles/tasks/wcoss_dell_p3/run_vx.local

This file was deleted.

3 changes: 1 addition & 2 deletions scripts/exregional_make_orog.sh
Original file line number Diff line number Diff line change
Expand Up @@ -68,8 +68,7 @@ print_input_args valid_args
#
#-----------------------------------------------------------------------
#
# Set OpenMP variables. The orog executable runs with OMP. On
# WCOSS (Cray), it is optimized for six threads, which is the default.
# Set OpenMP variables. The orog executable runs with OMP.
#
#-----------------------------------------------------------------------
#
Expand Down
38 changes: 14 additions & 24 deletions ush/get_crontab_contents.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,22 +42,18 @@ def get_crontab_contents(called_from_cron):
IMPORTS = ["MACHINE", "USER"]
import_vars(env_vars=IMPORTS)

if MACHINE == "WCOSS_DELL_P3":
__crontab_cmd__=""
(_,__crontab_contents__,_)=run_command(f'''cat "/u/{USER}/cron/mycrontab"''')
else:
__crontab_cmd__="crontab"
#
# On Cheyenne, simply typing "crontab" will launch the crontab command
# at "/glade/u/apps/ch/opt/usr/bin/crontab". This is a containerized
# version of crontab that will work if called from scripts that are
# themselves being called as cron jobs. In that case, we must instead
# call the system version of crontab at /usr/bin/crontab.
#
if MACHINE == "CHEYENNE":
if called_from_cron:
__crontab_cmd__="/usr/bin/crontab"
(_,__crontab_contents__,_)=run_command(f'''{__crontab_cmd__} -l''')
__crontab_cmd__="crontab"
#
# On Cheyenne, simply typing "crontab" will launch the crontab command
# at "/glade/u/apps/ch/opt/usr/bin/crontab". This is a containerized
# version of crontab that will work if called from scripts that are
# themselves being called as cron jobs. In that case, we must instead
# call the system version of crontab at /usr/bin/crontab.
#
if MACHINE == "CHEYENNE":
if called_from_cron:
__crontab_cmd__="/usr/bin/crontab"
(_,__crontab_contents__,_)=run_command(f'''{__crontab_cmd__} -l''')

return __crontab_cmd__, __crontab_contents__

Expand Down Expand Up @@ -108,10 +104,7 @@ def add_crontab_line():
NEWLINE_CHAR="\n"

#add the crontab line
if MACHINE == "WCOSS_DELL_P3":
run_command(f'''printf "%b%s\n" '{NEWLINE_CHAR}' '{CRONTAB_LINE}' >> "/u/{USER}/cron/mycrontab"''')
else:
run_command(f'''printf "%s%b%s\n" '{crontab_contents}' '{NEWLINE_CHAR}' '{CRONTAB_LINE}' | {crontab_cmd}''')
run_command(f'''printf "%s%b%s\n" '{crontab_contents}' '{NEWLINE_CHAR}' '{CRONTAB_LINE}' | {crontab_cmd}''')

def delete_crontab_line(called_from_cron):
""" Delete crontab line after job is complete i.e. either SUCCESS/FAILURE
Expand All @@ -137,10 +130,7 @@ def delete_crontab_line(called_from_cron):
else:
crontab_contents = crontab_contents.replace(CRONTAB_LINE,'')

if MACHINE == "WCOSS_DELL_P3":
run_command(f'''echo '{crontab_contents}' > "/u/{USER}/cron/mycrontab"''')
else:
run_command(f'''echo '{crontab_contents}' | {crontab_cmd}''')
run_command(f'''echo '{crontab_contents}' | {crontab_cmd}''')

def parse_args(argv):
""" Parse command line arguments for deleting crontab line.
Expand Down
1 change: 0 additions & 1 deletion ush/launch_FV3LAM_wflow.sh
Original file line number Diff line number Diff line change
Expand Up @@ -401,7 +401,6 @@ script for this experiment:
MACHINE=$MACHINE CRONTAB_LINE=$CRONTAB_LINE \
python3 $USHDIR/get_crontab_contents.py --delete
fi

fi
#
# Print the workflow completion message to the launch log file.
Expand Down
89 changes: 0 additions & 89 deletions ush/machine/wcoss_dell_p3.sh

This file was deleted.

23 changes: 0 additions & 23 deletions ush/templates/FV3LAM_wflow.xml
Original file line number Diff line number Diff line change
Expand Up @@ -178,11 +178,7 @@ MODULES_RUN_TASK_FP script.

&RSRV_DEFAULT;
<command>&LOAD_MODULES_RUN_TASK_FP; "&MAKE_GRID_TN;" "&JOBSDIR;/JREGIONAL_MAKE_GRID"</command>
{%- if machine in ["WCOSS_DELL_P3"] %}
<nodes>{{ nnodes_make_grid }}:ppn=1</nodes>
{%- else %}
<nodes>{{ nnodes_make_grid }}:ppn={{ ppn_make_grid }}</nodes>
{%- endif %}
<walltime>{{ wtime_make_grid }}</walltime>
<nodesize>&NCORES_PER_NODE;</nodesize>
{%- if machine in ["GAEA"] %}
Expand All @@ -204,11 +200,7 @@ MODULES_RUN_TASK_FP script.

&RSRV_DEFAULT;
<command>&LOAD_MODULES_RUN_TASK_FP; "&MAKE_OROG_TN;" "&JOBSDIR;/JREGIONAL_MAKE_OROG"</command>
{%- if machine in ["WCOSS_DELL_P3"] %}
<nodes>{{ nnodes_make_orog }}:ppn=1</nodes>
{%- else %}
<nodes>{{ nnodes_make_orog }}:ppn={{ ppn_make_orog }}</nodes>
{%- endif %}
<walltime>{{ wtime_make_orog }}</walltime>
<nodesize>&NCORES_PER_NODE;</nodesize>
{%- if machine in ["GAEA"] %}
Expand Down Expand Up @@ -275,9 +267,6 @@ MODULES_RUN_TASK_FP script.

&RSRV_HPSS;
<command>&LOAD_MODULES_RUN_TASK_FP; "&GET_EXTRN_ICS_TN;" "&JOBSDIR;/JREGIONAL_GET_EXTRN_MDL_FILES"</command>
{%- if machine in ["WCOSS_DELL_P3"] %}
<memory>2048M</memory><native>-R affinity[core]</native>
{%- endif %}
<nodes>{{ nnodes_get_extrn_ics }}:ppn={{ ppn_get_extrn_ics }}</nodes>
<walltime>{{ wtime_get_extrn_ics }}</walltime>
<nodesize>&NCORES_PER_NODE;</nodesize>
Expand All @@ -304,9 +293,6 @@ MODULES_RUN_TASK_FP script.

&RSRV_HPSS;
<command>&LOAD_MODULES_RUN_TASK_FP; "&GET_EXTRN_LBCS_TN;" "&JOBSDIR;/JREGIONAL_GET_EXTRN_MDL_FILES"</command>
{%- if machine in ["WCOSS_DELL_P3"] %}
<memory>2048M</memory><native>-R affinity[core]</native>
{%- endif %}
<nodes>{{ nnodes_get_extrn_lbcs }}:ppn={{ ppn_get_extrn_lbcs }}</nodes>
<walltime>{{ wtime_get_extrn_lbcs }}</walltime>
<nodesize>&NCORES_PER_NODE;</nodesize>
Expand Down Expand Up @@ -728,9 +714,6 @@ the <task> tag to be identical to the ones above for other output times.

&RSRV_HPSS;
<command>&LOAD_MODULES_RUN_TASK_FP; "&GET_OBS;" "&JOBSDIR;/JREGIONAL_GET_OBS_CCPA"</command>
{%- if machine in ["WCOSS_DELL_P3"] %}
<memory>2048M</memory><native>-R affinity[core]</native>
{%- endif %}
<nodes>{{ nnodes_get_obs_ccpa }}:ppn={{ ppn_get_obs_ccpa }}</nodes>
<walltime>{{ wtime_get_obs_ccpa }}</walltime>
<nodesize>&NCORES_PER_NODE;</nodesize>
Expand Down Expand Up @@ -761,9 +744,6 @@ the <task> tag to be identical to the ones above for other output times.

&RSRV_HPSS;
<command>&LOAD_MODULES_RUN_TASK_FP; "&GET_OBS;" "&JOBSDIR;/JREGIONAL_GET_OBS_MRMS"</command>
{%- if machine in ["WCOSS_DELL_P3"] %}
<memory>2048M</memory><native>-R affinity[core]</native>
{%- endif %}
<nodes>{{ nnodes_get_obs_mrms }}:ppn={{ ppn_get_obs_mrms }}</nodes>
<walltime>{{ wtime_get_obs_mrms }}</walltime>
<nodesize>&NCORES_PER_NODE;</nodesize>
Expand Down Expand Up @@ -795,9 +775,6 @@ the <task> tag to be identical to the ones above for other output times.

&RSRV_HPSS;
<command>&LOAD_MODULES_RUN_TASK_FP; "&GET_OBS;" "&JOBSDIR;/JREGIONAL_GET_OBS_NDAS"</command>
{%- if machine in ["WCOSS_DELL_P3"] %}
<memory>2048M</memory><native>-R affinity[core]</native>
{%- endif %}
<nodes>{{ nnodes_get_obs_ndas }}:ppn={{ ppn_get_obs_ndas }}</nodes>
<walltime>{{ wtime_get_obs_ndas }}</walltime>
<nodesize>&NCORES_PER_NODE;</nodesize>
Expand Down
2 changes: 1 addition & 1 deletion ush/valid_param_vals.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
valid_vals_RUN_ENVIR: ["nco", "community"]
valid_vals_VERBOSE: [True, False]
valid_vals_DEBUG: [True, False]
valid_vals_MACHINE: ["WCOSS_DELL_P3", "HERA", "ORION", "JET", "ODIN", "CHEYENNE", "STAMPEDE", "LINUX", "MACOS", "NOAACLOUD", "SINGULARITY", "GAEA"]
valid_vals_MACHINE: ["HERA", "ORION", "JET", "ODIN", "CHEYENNE", "STAMPEDE", "LINUX", "MACOS", "NOAACLOUD", "SINGULARITY", "GAEA"]
valid_vals_SCHED: ["slurm", "pbspro", "lsf", "lsfcray", "none"]
valid_vals_FCST_MODEL: ["ufs-weather-model", "fv3gfs_aqm"]
valid_vals_WORKFLOW_MANAGER: ["rocoto", "none"]
Expand Down

0 comments on commit ce3c9b4

Please sign in to comment.