diff --git a/devbuild.sh b/devbuild.sh index b46063d30f..12ee482d1c 100755 --- a/devbuild.sh +++ b/devbuild.sh @@ -3,7 +3,7 @@ # usage instructions usage () { cat << EOF_USAGE -Usage: $0 [OPTIONS]... +Usage: $0 --platform=PLATFORM [OPTIONS]... OPTIONS -h, --help @@ -93,8 +93,6 @@ BUILD_JOBS=4 CLEAN=false CONTINUE=false VERBOSE=false -# detect PLATFORM (MACHINE) -source ${SRC_DIR}/env/detect_machine.sh # process required arguments if [[ ("$1" == "--help") || ("$1" == "-h") ]]; then @@ -138,17 +136,32 @@ while :; do shift done +# check if PLATFORM is set +if [ -z $PLATFORM ] ; then + printf "\nERROR: Please set PLATFORM.\n\n" + usage + exit 0 +fi + +# set PLATFORM (MACHINE) +MACHINE="${PLATFORM}" +printf "PLATFORM(MACHINE)=${PLATFORM}\n" >&2 + set -eu # automatically determine compiler if [ -z "${COMPILER}" ] ; then case ${PLATFORM} in - jet|hera) COMPILER=intel ;; + jet|hera|gaea) COMPILER=intel ;; orion) COMPILER=intel ;; wcoss_dell_p3) COMPILER=intel ;; cheyenne) COMPILER=intel ;; - macos) COMPILER=gccgfortran ;; - *) printf "ERROR: Unknown platform ${PLATFORM}\n" >&2; usage >&2; exit 1 ;; + macos,singularity) COMPILER=gnu ;; + odin) COMPILER=intel ;; + *) + COMPILER=intel + printf "WARNING: Setting default COMPILER=intel for new platform ${PLATFORM}\n" >&2; + ;; esac fi @@ -159,18 +172,19 @@ if [ "${VERBOSE}" = true ] ; then settings fi -# set ENV_FILE for this platform/compiler combination -ENV_FILE="${SRC_DIR}/env/build_${PLATFORM}_${COMPILER}.env" -if [ ! -f "${ENV_FILE}" ]; then - printf "ERROR: environment file does not exist for platform/compiler\n" >&2 - printf " ENV_FILE=${ENV_FILE}\n" >&2 +# set MODULE_FILE for this platform/compiler combination +MODULE_FILE="build_${PLATFORM}_${COMPILER}" +if [ ! -f "${SRC_DIR}/modulefiles/${MODULE_FILE}" ]; then + printf "ERROR: module file does not exist for platform/compiler\n" >&2 + printf " MODULE_FILE=${MODULE_FILE}\n" >&2 printf " PLATFORM=${PLATFORM}\n" >&2 printf " COMPILER=${COMPILER}\n\n" >&2 + printf "Please make sure PLATFORM and COMPILER are set correctly\n" >&2 usage >&2 exit 64 fi -printf "ENV_FILE=${ENV_FILE}\n" >&2 +printf "MODULE_FILE=${MODULE_FILE}\n" >&2 # if build directory already exists then exit if [ "${CLEAN}" = true ]; then @@ -228,10 +242,13 @@ if [ "${VERBOSE}" = true ]; then MAKE_SETTINGS="${MAKE_SETTINGS} VERBOSE=1" fi -# source the environment file for this platform/compiler combination, then build the code -printf "... Source ENV_FILE and create BUILD directory ...\n" -module use ${SRC_DIR}/env -. ${ENV_FILE} +# Before we go on load modules, we first need to activate Lmod for some systems +source ${SRC_DIR}/etc/lmod-setup.sh + +# source the module file for this platform/compiler combination, then build the code +printf "... Load MODULE_FILE and create BUILD directory ...\n" +module use ${SRC_DIR}/modulefiles +module load ${MODULE_FILE} module list mkdir -p ${BUILD_DIR} cd ${BUILD_DIR} diff --git a/docs/INSTALL b/docs/INSTALL index 5923285be0..4b659bb1d1 100644 --- a/docs/INSTALL +++ b/docs/INSTALL @@ -12,10 +12,38 @@ git clone https://github.com/ufs-community/ufs-srweather-app.git cd ufs-srweather-app/ ./manage_externals/checkout_externals -# Prior to building, you must set up the environment so cmake can find the appropriate compilers -# and libraries. For instructions specific to supported platforms, see the "build_[machine]_[compiler].env -# files in the "env" directory. These files give instructions assuming a bash or ksh login shell, for -# csh and tcsh users you will have to modify the commands for setting envronment variables. +# We can build ufs-sreweather-app binaries in two ways. + +# Method 1 +# ======== + +# This is the simplest way to build the binaries + +./devbuild.sh --platform=PLATFORM + +# If compiler auto-detection fails, specify it using + +./devbuild.sh --platform=PLATFORM --compiler=COMPILER + +# Method 2 +# ======== + +# The above instructions will work atleast on Tier-1 systems, if not on all supported machines. +# However, if it fails for some reason, we can build directly with cmake. + +# First, we need to make sure that there is a modulefile "build_[PLATFORM]_[COMPILER]" in the +# "modulefiles" directory. Also, on some systems (e.g. Gaea/Odin) that come with cray module app, +# we may need to swap that for Lmod instead. Assuming your login shell is bash, run + +source etc/lmod-setup.sh PLATFORM + +# and if your login schell is csh/tcsh, source etc/lmod-setup.csh instead. + +# From here on, we can assume Lmod is loaded and ready to go. Then we load the specific +# module for a given PLATFORM and COMPILER as follows + +module use modulefiles +module load build_[PLATFORM]_[COMPILER] # Supported CMake flags: # -DCMAKE_INSTALL_PREFIX Location where the bin/ include/ lib/ and share/ directories containing diff --git a/docs/RUNTIME b/docs/RUNTIME index a80a65228e..e2ca78894d 100644 --- a/docs/RUNTIME +++ b/docs/RUNTIME @@ -1,13 +1,22 @@ # Users should load the appropriate python environment for the workflow. # The workflow requires Python 3, with the packages 'PyYAML', 'Jinja2', and 'f90nml' available. -# For users' convenience, the python environment for the workflow is put in 'ufs-srweather-app/env/wflow_[machine].env'. -# When generating a workflow experiment or running a workflow, users can use this file for a specific machine. +# For users' convenience, the python environment for the workflow can be activated by loading wflow_[PLATFORM] modulefile # For example, on Hera: -cd ufs-srweather-app/env -source wflow_hera.env +module load wflow_hera + +# Due to older version of Lmod, inconsistency with TCL modulefiles etc, you may have to activate +# conda manually using instructions that the previous module command prints. +# Hera is one of those systems, so execute: + +conda activate regional_workflow + +# After that we can setup an experiment in the directory + +cd regional_workflow/ush + +# Once we prepare experiment file config.sh, we can generate workflow using -cd ../regional_workflow/ush ./generate_FV3LAM_wflow.sh diff --git a/docs/UsersGuide/source/BuildRunSRW.rst b/docs/UsersGuide/source/BuildRunSRW.rst index 5eee295f30..5ac57920f9 100644 --- a/docs/UsersGuide/source/BuildRunSRW.rst +++ b/docs/UsersGuide/source/BuildRunSRW.rst @@ -96,7 +96,9 @@ The cloned repository contains the configuration files and sub-directories shown +--------------------------------+--------------------------------------------------------+ | ufs_srweather_app.settings.in | SRW App configuration summary | +--------------------------------+--------------------------------------------------------+ - | env | Contains build and workflow environment files | + | modulefiles | Contains build and workflow module files | + +--------------------------------+--------------------------------------------------------+ + | etc | Contains Lmod startup scripts | +--------------------------------+--------------------------------------------------------+ | docs | Contains release notes, documentation, and User's Guide| +--------------------------------+--------------------------------------------------------+ @@ -123,38 +125,48 @@ Run the executable that pulls in SRW App components from external repositories: -.. _SetUpBuild: +Build with ``devbuild.sh`` +========================== -Set up the Build Environment -============================ +On Level-1 systems, for which a modulefile is provided under ``modulefiles`` directory, we can build SRW App binaries with: -Before building the SRW App, the build environment must be set up for the user's specific platform. There is a set of common modules required to build the SRW App. These are located in the ``env/srw_common`` file. To load the set of common modules, run: +.. code-block:: console + + ./devbuild.sh --platform=hera + +If compiler auto-detection fails for some reason, specify it using .. code-block:: console - module use + ./devbuild.sh --platform=hera --compiler=intel + +If this method doesn't work, we will have to manually setup the environment, and build SRW app binaries with CMake. -where ```` is the full path to the ``env`` directory. +.. _SetUpBuild: + +Set up the Build/Run Environment +================================ + +We need to setup our environment to run a workflow or to build the SRW app with CMake. Note that ``devbuild.sh`` does not prepare environment for workflow runs so this step is necessary even though binaries are built properly using ``devbuild.sh``. -Then, users must set up the platform-specific elements of the build environment. For Level 1 systems, scripts for loading the proper modules and/or setting the correct environment variables can be found in the ``env`` directory of the SRW App in files named ``build__.env``. Here is a sample directory listing of these build files: +The build environment must be set up for the user's specific platform. First, we need to make sure ``Lmod`` is the app used for loading modulefiles. That is often the case on most systems, however, on some systems such as Gaea/Odin, the default modulefile loader is from Cray and we need to swap it for ``Lmod``. For example on Gaea, assuming a ``bash`` login shell, run: .. code-block:: console - $ ls -l env/ - -rw-rw-r-- 1 user ral 1228 Oct 9 10:09 build_cheyenne_intel.env - -rw-rw-r-- 1 user ral 1134 Oct 9 10:09 build_hera_intel.env - -rw-rw-r-- 1 user ral 1228 Oct 9 10:09 build_jet_intel.env - ... + source etc/lmod-setup.sh gaea -On Level 1 systems, the commands in the ``build__.env`` files can be directly copy-pasted into the command line, or the file can be sourced from the ``ufs-srweather-app/env`` directory. For example, on Hera, run: +or if your login shell is ``csh`` or ``tcsh``, source ``etc/lmod-setup.csh`` instead. If you execute the above command on systems that don't need it, it will simply do a ``module purge``. From here on, we can assume, ``Lmod`` is ready to load modulefiles needed by the SRW app. -.. code-block:: +The modulefiles needed for building and running SRW App are located in ``modulefiles`` directory. To load the necessary modulefile for a specific ```` using ```` , run: + +.. code-block:: console - source env/build_hera_intel.env + module use + module load build__ -from the main ``ufs-srweather-app`` directory to source the appropriate file. +where ```` is the full path to the ``modulefiles`` directory. This will work on Level 1 systems, where a modulefile is available in the ``modulefiles`` directory. -On Level 2-4 systems, users will need to modify certain environment variables, such as the path to NCEP libraries, so that the SRW App can find and load the appropriate modules. For systems with Lmod installed, one of the current ``build__.env`` files can be copied and used as a template. To check whether Lmod is installed, run ``echo $LMOD_PKG``, and see if it outputs a path to the Lmod package. On systems without Lmod, users can modify or set the required environment variables with the ``export`` or ``setenv`` commands despending on whether they are using a bash or csh/tcsh shell, respectively: +On Level 2-4 systems, users will need to modify certain environment variables, such as the path to NCEP libraries, so that the SRW App can find and load the appropriate modules. For systems with Lmod installed, one of the current ``build__`` modulefiles can be copied and used as a template. To check whether Lmod is installed, run ``echo $LMOD_PKG``, and see if it outputs a path to the Lmod package. On systems without Lmod, users can modify or set the required environment variables with the ``export`` or ``setenv`` commands despending on whether they are using a bash or csh/tcsh shell, respectively: .. code-block:: @@ -599,7 +611,7 @@ The workflow requires Python 3 with the packages 'PyYAML', 'Jinja2', and 'f90nml .. code-block:: console - source ../../env/wflow_.env + module load wflow_ This command will activate the ``regional_workflow`` conda environment. The user should see ``(regional_workflow)`` in front of the Terminal prompt at this point. If this is not the case, activate the regional workflow from the ``ush`` directory by running: diff --git a/env/build_gaea_intel.env b/env/build_gaea_intel.env deleted file mode 100644 index f7ac329df1..0000000000 --- a/env/build_gaea_intel.env +++ /dev/null @@ -1,18 +0,0 @@ -#Setup instructions for NOAA RDHPC Gaea using Intel-18.0.6.288 (bash shell) -source /lustre/f2/pdata/esrl/gsd/contrib/lua-5.1.4.9/init/init_lmod.sh -module use /lustre/f2/pdata/ncep_shared/hpc-stack/modulefiles/stack -module load hpc hpc-intel hpc-cray-mpich -module load srw_common - -module use /lustre/f2/pdata/esrl/gsd/contrib/modulefiles -module load rocoto -module load cmake/3.20.1 - -export CC=cc -export FC=ftn -export CXX=CC -export CMAKE_C_COMPILER=cc -export CMAKE_CXX_COMPILER=CC -export CMAKE_Fortran_COMPILER=ftn -export CMAKE_Platform=gaea.intel - diff --git a/env/build_jet_intel.env b/env/build_jet_intel.env deleted file mode 100644 index 777930d660..0000000000 --- a/env/build_jet_intel.env +++ /dev/null @@ -1,22 +0,0 @@ -#Setup instructions for NOAA RDHPC Jet using Intel-18.0.5.274 (bash shell) - -module purge - -module use /contrib/sutils/modulefiles -module load sutils - -module load cmake/3.20.1 - -module use /lfs4/HFIP/hfv3gfs/nwprod/hpc-stack/libs/modulefiles/stack - -module load hpc/1.1.0 -module load hpc-intel/18.0.5.274 -module load hpc-impi/2018.4.274 - -module load srw_common - -export CMAKE_C_COMPILER=mpiicc -export CMAKE_CXX_COMPILER=mpiicpc -export CMAKE_Fortran_COMPILER=mpiifort -export CMAKE_Platform=jet.intel - diff --git a/env/build_macos_gnu.env b/env/build_macos_gnu.env deleted file mode 100644 index e735e6d82c..0000000000 --- a/env/build_macos_gnu.env +++ /dev/null @@ -1,85 +0,0 @@ -# Setup instructions for macOS Big Sur (Darwin20) -# Compilers: gcc-11.2.0 including gfortran-11.2.0 -# MPI: openmpi/4.2.1 -# Option 1 (default) : M1 chip, arm64, running natively (not using Rosetta) -# Option 2 (uncomment if needed) Intel chip, x86_64 -# -# Option 1 (default): -export BASH_ENV=/opt/homebrew/opt/lmod/init/bash -# Option 2 (uncomment if needed): -#export BASH_ENV=/usr/local/opt/lmod/init/bash -source $BASH_ENV - -# This path should point to your HPCstack installation directory -export HPCstack=/Users/username/hpc-stack/install - -# This path should point to your SRW Application directory -export SRW=/Users/username/ufs-srweather-app - -module purge -# Load HPC stack -module use ${HPCstack}/modulefiles/stack -module load hpc -module load hpc-python -# -module load hpc-gnu -module load openmpi -module load hpc-openmpi - -module use ${SRW}/env -#module load srw_common -module load jasper/2.0.25 -module load zlib/1.2.11 - -module load hdf5/1.10.6 -module load netcdf/4.7.4 -module load pio/2.5.3 -module load esmf/8_2_0 -module load fms/2021.04 - -module load bacio/2.4.1 -module load crtm/2.3.0 -module load g2/3.4.5 -module load g2tmpl/1.10.0 -module load ip/3.3.3 -module load sp/2.3.3 -module load w3nco/2.4.1 -module load upp/10.0.10 - -module load gftl-shared/v1.3.6 -module load yafyaml/v0.5.1 -module load mapl/2.12.2-esmf-8_2_0 -module load gfsio/1.4.1 -module load landsfcutil/2.4.1 -module load nemsio/2.5.4 -module load nemsiogfs/2.5.3 -module load sfcio/1.4.1 -module load sigio/2.3.2 -module load w3emc/2.9.2 -module load wgrib2/2.0.8 -module list - -# Option 1 compiler paths: -export CC=/opt/homebrew/bin/gcc -export FC=/opt/homebrew/bin/gfortran -export CXX=/opt/homebrew/bin/g++ - -# Option 2 compiler paths: -#export CC=/usr/local/bin/gcc -#export FC=/usr/local/bin/gfortran -#export CXX=/usr/local/bin/g++ - -ulimit -S -s unlimited - -export MPI_CC=mpicc -export MPI_CXX=mpicxx -export MPI_FC=mpif90 - -export CMAKE_C_COMPILER=${MPI_CC} -export CMAKE_CXX_COMPILER=${MPI_CXX} -export CMAKE_Fortran_COMPILER=${MPI_FC} -export CMAKE_Platform=macos.gnu - -export CMAKE_Fortran_COMPILER_ID="GNU" -export LDFLAGS="-L${MPI_ROOT}/lib" -export FFLAGS="-DNO_QUAD_PRECISION -fallow-argument-mismatch " diff --git a/env/build_odin_intel.env b/env/build_odin_intel.env deleted file mode 100644 index cf69d3d091..0000000000 --- a/env/build_odin_intel.env +++ /dev/null @@ -1,58 +0,0 @@ -#Setup instructions for NOAA RDHPC Jet using Intel-18.0.5.274 (bash shell) - -module unload modules -unset -f module - -export BASH_ENV=/usr/local/lmod/8.3.1/init/bash -source $BASH_ENV -export LMOD_SYSTEM_DEFAULT_MODULES=PrgEnv-intel:cray-mpich:intel:craype -module --initial_load --no_redirect restore -#module use <$HOME>/ -export MODULEPATH=/oldscratch/ywang/external/hpc-stack/modulefiles/mpi/intel/2020/cray-mpich/7.7.16:/oldscratch/ywang/external/hpc-stack/modulefiles/compiler/intel/2020:/oldscratch/ywang/external/hpc-stack/modulefiles/core:/oldscratch/ywang/external/hpc-stack/modulefiles/stack:/opt/cray/pe/perftools/21.02.0/modulefiles:/opt/cray/ari/modulefiles:/opt/cray/pe/craype-targets/default/modulefiles:/opt/cray/pe/modulefiles:/opt/cray/modulefiles:/opt/modulefiles - -#module purge -export CMAKE=/home/yunheng.wang/tools/cmake-3.23.0-rc2/bin/cmake -export PATH=/home/yunheng.wang/tools/cmake-3.23.0-rc2/bin:${PATH} - -module load hpc/1.2.0 -module load hpc-intel -module load hpc-cray-mpich - -#module load srw_common - -module load jasper -module load zlib -module load png - -#module load cray-hdf5 -#module load cray-netcdf -module load esmf -module load fms - -module load bacio -module load crtm -module load g2 -module load g2tmpl -module load ip -module load sp -module load w3nco -module load upp - -module load gftl-shared -module load yafyaml -module load mapl - -module load gfsio -module load landsfcutil -module load nemsio -module load nemsiogfs -module load sfcio -module load sigio -module load w3emc -module load wgrib2 - -export CMAKE_C_COMPILER=cc -export CMAKE_CXX_COMPILER=CC -export CMAKE_Fortran_COMPILER=ftn -export CMAKE_Platform=odin.intel - diff --git a/env/build_orion_intel.env b/env/build_orion_intel.env deleted file mode 100644 index 24db81de42..0000000000 --- a/env/build_orion_intel.env +++ /dev/null @@ -1,22 +0,0 @@ -#Setup instructions for MSU Orion using Intel-18.0.5 (bash shell) - -module purge - -module load contrib noaatools - -module load cmake/3.22.1 -module load python/3.9.2 - -module use /apps/contrib/NCEP/libs/hpc-stack/modulefiles/stack - -module load hpc/1.1.0 -module load hpc-intel/2018.4 -module load hpc-impi/2018.4 - -module load srw_common - -export CMAKE_C_COMPILER=mpiicc -export CMAKE_CXX_COMPILER=mpiicpc -export CMAKE_Fortran_COMPILER=mpiifort -export CMAKE_Platform=orion.intel - diff --git a/env/build_singularity_gnu.env b/env/build_singularity_gnu.env deleted file mode 100644 index 0c4eb2223e..0000000000 --- a/env/build_singularity_gnu.env +++ /dev/null @@ -1,40 +0,0 @@ -#Setup instructions for singularity container using gnu 9.3.0 built from this docker image: docker://noaaepic/ubuntu20.04-epic-srwapp:1.0 -#https://hub.docker.com/r/noaaepic/ubuntu20.04-epic-srwapp - -source /usr/share/lmod/6.6/init/profile - -module purge - -module use /opt/hpc-modules/modulefiles/stack - -module load hpc -module load hpc-gnu -module load hpc-openmpi - -module load netcdf -module load hdf5 -module load bacio -module load sfcio -module load sigio -module load nemsio -module load w3emc -module load esmf -module load fms -module load crtm -module load g2 -module load png -module load zlib -module load g2tmpl -module load ip -module load sp -module load w3nco -module load cmake -module load gfsio -module load wgrib2 -module load upp - - -export CMAKE_C_COMPILER=mpicc -export CMAKE_CXX_COMPILER=mpicxx -export CMAKE_Fortran_COMPILER=mpif90 -export CMAKE_Platform=singularity.gnu diff --git a/env/build_wcoss_dell_p3_intel.env b/env/build_wcoss_dell_p3_intel.env deleted file mode 100644 index 31978ef186..0000000000 --- a/env/build_wcoss_dell_p3_intel.env +++ /dev/null @@ -1,24 +0,0 @@ -#Setup instructions for NOAA WCOSS Dell using Intel-18.0.1.163 (bash shell) - -module purge - -module load ips/18.0.5.274 -module load impi/18.0.1 -module load lsf/10.1 -module load python/3.6.3 - -### hpc-stack ### -module use /usrx/local/nceplibs/dev/hpc-stack/libs/hpc-stack/modulefiles/stack -module load hpc/1.1.0 -module load hpc-ips/18.0.5.274 -module load hpc-impi/18.0.1 - -module load srw_common - -module load cmake/3.20.0 -module load HPSS/5.0.2.5 - -export CMAKE_C_COMPILER=mpiicc -export CMAKE_CXX_COMPILER=mpiicpc -export CMAKE_Fortran_COMPILER=mpiifort -export CMAKE_Platform=wcoss_dell_p3 diff --git a/env/detect_machine.sh b/env/detect_machine.sh deleted file mode 100755 index 1634b18dd1..0000000000 --- a/env/detect_machine.sh +++ /dev/null @@ -1,107 +0,0 @@ -#!/bin/bash - -case $(hostname -f) in - - v71a1.ncep.noaa.gov) MACHINE_ID=wcoss_dell_p3 ;; ### venus - v71a2.ncep.noaa.gov) MACHINE_ID=wcoss_dell_p3 ;; ### venus - v71a3.ncep.noaa.gov) MACHINE_ID=wcoss_dell_p3 ;; ### venus - v72a1.ncep.noaa.gov) MACHINE_ID=wcoss_dell_p3 ;; ### venus - v72a2.ncep.noaa.gov) MACHINE_ID=wcoss_dell_p3 ;; ### venus - v72a3.ncep.noaa.gov) MACHINE_ID=wcoss_dell_p3 ;; ### venus - - m71a1.ncep.noaa.gov) MACHINE_ID=wcoss_dell_p3 ;; ### mars - m71a2.ncep.noaa.gov) MACHINE_ID=wcoss_dell_p3 ;; ### mars - m71a3.ncep.noaa.gov) MACHINE_ID=wcoss_dell_p3 ;; ### mars - m72a1.ncep.noaa.gov) MACHINE_ID=wcoss_dell_p3 ;; ### mars - m72a2.ncep.noaa.gov) MACHINE_ID=wcoss_dell_p3 ;; ### mars - m72a3.ncep.noaa.gov) MACHINE_ID=wcoss_dell_p3 ;; ### mars - - alogin01.acorn.wcoss2.ncep.noaa.gov) MACHINE_ID=wcoss2 ;; ### acorn - alogin02.acorn.wcoss2.ncep.noaa.gov) MACHINE_ID=wcoss2 ;; ### acorn - adecflow01.acorn.wcoss2.ncep.noaa.gov) MACHINE_ID=wcoss2 ;; ### acorn - adecflow02.acorn.wcoss2.ncep.noaa.gov) MACHINE_ID=wcoss2 ;; ### acorn - - gaea9) MACHINE_ID=gaea ;; ### gaea9 - gaea10) MACHINE_ID=gaea ;; ### gaea10 - gaea11) MACHINE_ID=gaea ;; ### gaea11 - gaea12) MACHINE_ID=gaea ;; ### gaea12 - gaea13) MACHINE_ID=gaea ;; ### gaea13 - gaea14) MACHINE_ID=gaea ;; ### gaea14 - gaea15) MACHINE_ID=gaea ;; ### gaea15 - gaea16) MACHINE_ID=gaea ;; ### gaea16 - gaea9.ncrc.gov) MACHINE_ID=gaea ;; ### gaea9 - gaea10.ncrc.gov) MACHINE_ID=gaea ;; ### gaea10 - gaea11.ncrc.gov) MACHINE_ID=gaea ;; ### gaea11 - gaea12.ncrc.gov) MACHINE_ID=gaea ;; ### gaea12 - gaea13.ncrc.gov) MACHINE_ID=gaea ;; ### gaea13 - gaea14.ncrc.gov) MACHINE_ID=gaea ;; ### gaea14 - gaea15.ncrc.gov) MACHINE_ID=gaea ;; ### gaea15 - gaea16.ncrc.gov) MACHINE_ID=gaea ;; ### gaea16 - - hfe01) MACHINE_ID=hera ;; ### hera01 - hfe02) MACHINE_ID=hera ;; ### hera02 - hfe03) MACHINE_ID=hera ;; ### hera03 - hfe04) MACHINE_ID=hera ;; ### hera04 - hfe05) MACHINE_ID=hera ;; ### hera05 - hfe06) MACHINE_ID=hera ;; ### hera06 - hfe07) MACHINE_ID=hera ;; ### hera07 - hfe08) MACHINE_ID=hera ;; ### hera08 - hfe09) MACHINE_ID=hera ;; ### hera09 - hfe10) MACHINE_ID=hera ;; ### hera10 - hfe11) MACHINE_ID=hera ;; ### hera11 - hfe12) MACHINE_ID=hera ;; ### hera12 - hecflow01) MACHINE_ID=hera ;; ### heraecflow01 - - s4-submit.ssec.wisc.edu) MACHINE_ID=s4 ;; ### s4 - - fe1) MACHINE_ID=jet ;; ### jet01 - fe2) MACHINE_ID=jet ;; ### jet02 - fe3) MACHINE_ID=jet ;; ### jet03 - fe4) MACHINE_ID=jet ;; ### jet04 - fe5) MACHINE_ID=jet ;; ### jet05 - fe6) MACHINE_ID=jet ;; ### jet06 - fe7) MACHINE_ID=jet ;; ### jet07 - fe8) MACHINE_ID=jet ;; ### jet08 - tfe1) MACHINE_ID=jet ;; ### jet09 - tfe2) MACHINE_ID=jet ;; ### jet10 - - Orion-login-1.HPC.MsState.Edu) MACHINE_ID=orion ;; ### orion1 - Orion-login-2.HPC.MsState.Edu) MACHINE_ID=orion ;; ### orion2 - Orion-login-3.HPC.MsState.Edu) MACHINE_ID=orion ;; ### orion3 - Orion-login-4.HPC.MsState.Edu) MACHINE_ID=orion ;; ### orion4 - - cheyenne1.cheyenne.ucar.edu) MACHINE_ID=cheyenne ;; ### cheyenne1 - cheyenne2.cheyenne.ucar.edu) MACHINE_ID=cheyenne ;; ### cheyenne2 - cheyenne3.cheyenne.ucar.edu) MACHINE_ID=cheyenne ;; ### cheyenne3 - cheyenne4.cheyenne.ucar.edu) MACHINE_ID=cheyenne ;; ### cheyenne4 - cheyenne5.cheyenne.ucar.edu) MACHINE_ID=cheyenne ;; ### cheyenne5 - cheyenne6.cheyenne.ucar.edu) MACHINE_ID=cheyenne ;; ### cheyenne6 - cheyenne1.ib0.cheyenne.ucar.edu) MACHINE_ID=cheyenne ;; ### cheyenne1 - cheyenne2.ib0.cheyenne.ucar.edu) MACHINE_ID=cheyenne ;; ### cheyenne2 - cheyenne3.ib0.cheyenne.ucar.edu) MACHINE_ID=cheyenne ;; ### cheyenne3 - cheyenne4.ib0.cheyenne.ucar.edu) MACHINE_ID=cheyenne ;; ### cheyenne4 - cheyenne5.ib0.cheyenne.ucar.edu) MACHINE_ID=cheyenne ;; ### cheyenne5 - cheyenne6.ib0.cheyenne.ucar.edu) MACHINE_ID=cheyenne ;; ### cheyenne6 - chadmin1.ib0.cheyenne.ucar.edu) MACHINE_ID=cheyenne ;; ### cheyenne1 - chadmin2.ib0.cheyenne.ucar.edu) MACHINE_ID=cheyenne ;; ### cheyenne1 - chadmin3.ib0.cheyenne.ucar.edu) MACHINE_ID=cheyenne ;; ### cheyenne1 - chadmin4.ib0.cheyenne.ucar.edu) MACHINE_ID=cheyenne ;; ### cheyenne1 - chadmin5.ib0.cheyenne.ucar.edu) MACHINE_ID=cheyenne ;; ### cheyenne1 - chadmin6.ib0.cheyenne.ucar.edu) MACHINE_ID=cheyenne ;; ### cheyenne1 - - login1.stampede2.tacc.utexas.edu) MACHINE_ID=stampede ;; ### stampede1 - login2.stampede2.tacc.utexas.edu) MACHINE_ID=stampede ;; ### stampede2 - login3.stampede2.tacc.utexas.edu) MACHINE_ID=stampede ;; ### stampede3 - login4.stampede2.tacc.utexas.edu) MACHINE_ID=stampede ;; ### stampede4 - - login01.expanse.sdsc.edu) MACHINE_ID=expanse ;; ### expanse1 - login02.expanse.sdsc.edu) MACHINE_ID=expanse ;; ### expanse2 - - nid00193) MACHINE_ID=odin ;; ### Odin1 at NSSL - nid00385) MACHINE_ID=odin ;; ### Odin2 at NSSL -esac - -MACHINE="${MACHINE_ID}" -PLATFORM="${MACHINE}" - -printf "PLATFORM(MACHINE)=${PLATFORM}\n" >&2 diff --git a/env/wflow_cheyenne.env b/env/wflow_cheyenne.env deleted file mode 100644 index 1f0ecb485c..0000000000 --- a/env/wflow_cheyenne.env +++ /dev/null @@ -1,6 +0,0 @@ -# Python environment for workflow on Cheyenne - -module load ncarenv -ncar_pylib /glade/p/ral/jntp/UFS_SRW_app/ncar_pylib/regional_workflow -module use -a /glade/p/ral/jntp/UFS_SRW_app/modules/ -module load rocoto diff --git a/env/wflow_gaea.env b/env/wflow_gaea.env deleted file mode 100644 index 80d336b0f4..0000000000 --- a/env/wflow_gaea.env +++ /dev/null @@ -1,8 +0,0 @@ -#Setup instructions for NOAA RDHPC Hera using Intel-18.0.5.274 (bash shell) - -module use /lustre/f2/pdata/esrl/gsd/contrib/modulefiles -module load rocoto -module load miniconda3 - -export CONDA_DEFAULT_ENV=regional_workflow - diff --git a/env/wflow_hera.env b/env/wflow_hera.env deleted file mode 100644 index f526c434f5..0000000000 --- a/env/wflow_hera.env +++ /dev/null @@ -1,7 +0,0 @@ -# Python environment for workflow on Hera - -module load rocoto - -module use -a /contrib/miniconda3/modulefiles -module load miniconda3 -conda activate regional_workflow diff --git a/env/wflow_jet.env b/env/wflow_jet.env deleted file mode 100644 index f543bed8f1..0000000000 --- a/env/wflow_jet.env +++ /dev/null @@ -1,7 +0,0 @@ -# Python environment for workflow on Jet - -module load rocoto - -module use -a /contrib/miniconda3/modulefiles -module load miniconda3 -conda activate regional_workflow diff --git a/env/wflow_macos.env b/env/wflow_macos.env deleted file mode 100644 index a353faa9c1..0000000000 --- a/env/wflow_macos.env +++ /dev/null @@ -1,18 +0,0 @@ -# Prepare environment for generating and running SRW workflow -# (wflow_macos.env) - -export CMAKE_Platform=macos - -# This path should point to your python virtual environment directory, -# `regional_workflow`, created earlier for -# -export VENV=/Users/username/venv/regional_workflow -source $VENV/bin/activate - -# Uncomment if Rocoto workflow manager is used -# This path should point to your Rocoto module location -# ROCOTOmod=/Users/username/modules -# module use ${ROCOTOmod} -# module load rocoto -# module list - diff --git a/env/wflow_odin.env b/env/wflow_odin.env deleted file mode 100644 index 2e3f60e4ea..0000000000 --- a/env/wflow_odin.env +++ /dev/null @@ -1,25 +0,0 @@ -# >>> conda initialize >>> -# !! Contents within this block are managed by 'conda init' !! -__conda_setup="$('/scratch/software/Odin/python/anaconda2/bin/conda' 'shell.bash' 'hook' 2> /dev/null)" -if [ $? -eq 0 ]; then - eval "$__conda_setup" -else - if [ -f "/scratch/software/Odin/python/anaconda2/etc/profile.d/conda.sh" ]; then - . "/scratch/software/Odin/python/anaconda2/etc/profile.d/conda.sh" - else - export PATH="/scratch/software/Odin/python/anaconda2/bin:$PATH" - fi -fi -unset __conda_setup -# <<< conda initialize <<< - -# To make "regional_workflow" avaiable, -# you should uncomment the following lines, which create file ".condarc" -# or install the environment yourself. -#cat > $HOME/.condarc < ncar_pylib /glade/p/ral/jntp/UFS_SRW_app/ncar_pylib/regional_workflow" +} + diff --git a/modulefiles/wflow_gaea b/modulefiles/wflow_gaea new file mode 100644 index 0000000000..c9889a4403 --- /dev/null +++ b/modulefiles/wflow_gaea @@ -0,0 +1,15 @@ +#%Module + +proc ModulesHelp { } { + put stderr "This module loads python environement for running SRW on" + put stderr "the NOAA RDHPC machine Gaea" +} + +module-whatis "Loads libraries needed for running SRW on Gaea" + +module use /lustre/f2/pdata/esrl/gsd/contrib/modulefiles +module load rocoto +module load miniconda3 + +setenv CONDA_DEFAULT_ENV "regional_workflow" + diff --git a/modulefiles/wflow_hera b/modulefiles/wflow_hera new file mode 100644 index 0000000000..724953e4bb --- /dev/null +++ b/modulefiles/wflow_hera @@ -0,0 +1,19 @@ +#%Module + +proc ModulesHelp { } { + put stderr "This module loads python environement for running SRW on" + put stderr "the NOAA RDHPC machine Hera" +} + +module-whatis "Loads libraries needed for running SRW on Hera" + +module load rocoto + +module use -a /contrib/miniconda3/modulefiles +module load miniconda3 + +if { [module-info mode load] } { + puts stderr "Please do the following to activate conda: + > conda activate regional_workflow" +} + diff --git a/modulefiles/wflow_jet b/modulefiles/wflow_jet new file mode 100644 index 0000000000..e340ca4a12 --- /dev/null +++ b/modulefiles/wflow_jet @@ -0,0 +1,19 @@ +#%Module + +proc ModulesHelp { } { + put stderr "This module loads python environement for running SRW on" + put stderr "the NOAA RDHPC machine Jet" +} + +module-whatis "Loads libraries needed for running SRW on Jet" + +module load rocoto + +module use -a /contrib/miniconda3/modulefiles +module load miniconda3 + +if { [module-info mode load] } { + puts stderr "Please do the following to activate conda: + > conda activate regional_workflow" +} + diff --git a/modulefiles/wflow_macos b/modulefiles/wflow_macos new file mode 100644 index 0000000000..7481b36911 --- /dev/null +++ b/modulefiles/wflow_macos @@ -0,0 +1,26 @@ +#%Module + +proc ModulesHelp { } { + put stderr "This module loads python environement for running SRW on" + put stderr "macOS" +} + +module-whatis "Loads libraries needed for running SRW on macOS" + +setenv CMAKE_Platform macos + +# This path should point to your python virtual environment directory, +# `regional_workflow`, created earlier for +# +setenv VENV "/Users/username/venv/regional_workflow" +if { [module-info mode load] } { + system "source $env(VENV)/bin/activate;" +} + +# Uncomment if Rocoto workflow manager is used +# This path should point to your Rocoto module location +# set ROCOTOmod "/Users/username/modules" +# module use ${ROCOTOmod} +# module load rocoto +# module list + diff --git a/modulefiles/wflow_odin b/modulefiles/wflow_odin new file mode 100644 index 0000000000..ca240f84e1 --- /dev/null +++ b/modulefiles/wflow_odin @@ -0,0 +1,42 @@ +#%Module + +proc ModulesHelp { } { + put stderr "This module loads python environement for running SRW on" + put stderr "NSSL machine Odin" +} + +module-whatis "Loads libraries needed for running SRW on Odin" + +if { [module-info mode load] } { + # >>> conda initialize >>> + # !! Contents within this block are managed by 'conda init' !! + set shell [module-info shelltype] + set conda_path "/scratch/software/Odin/python/anaconda2" + if {$shell == "csh"} { + set conda_file "$conda_path/conda.csh" + } else { + set conda_file "$conda_path/conda.sh" + } + + if {[ file exists "$conda_file" ]} { + system "source $conda_file;" + } else { + prepend-path PATH "$conda_path/bin" + } + # <<< conda initialize <<< + + # To make "regional_workflow" avaiable, + # you should uncomment the following lines, which create file ".condarc" + # or install the environment yourself. + # +# set cmd {cat > $HOME/.condarc < conda config --set changeps1 False + > conda activate regional_workflow" +} + diff --git a/modulefiles/wflow_orion b/modulefiles/wflow_orion new file mode 100644 index 0000000000..fb3439b0b9 --- /dev/null +++ b/modulefiles/wflow_orion @@ -0,0 +1,19 @@ +#%Module + +proc ModulesHelp { } { + put stderr "This module loads python environement for running SRW on" + put stderr "the MSU machine Orion" +} + +module-whatis "Loads libraries needed for running SRW on Orion" + +module load contrib rocoto + +module use -a /apps/contrib/miniconda3-noaa-gsl/modulefiles +module load miniconda3/3.8 + +if { [module-info mode load] } { + puts stderr "Please do the following to activate conda: + > conda activate regional_workflow" +} + diff --git a/modulefiles/wflow_singularity b/modulefiles/wflow_singularity new file mode 100644 index 0000000000..af765e015b --- /dev/null +++ b/modulefiles/wflow_singularity @@ -0,0 +1,16 @@ +#%Module + +proc ModulesHelp { } { + put stderr "This module loads python environement for running SRW in" + put stderr "a singularity container" +} + +module-whatis "Loads libraries needed for running SRW in a singularity container" + +module use -a /opt/hpc-modules/modulefiles/core +module load miniconda3 + +if { [module-info mode load] } { + system "conda activate regional_workflow;" +} + diff --git a/env/wflow_wcoss_dell_p3.env b/modulefiles/wflow_wcoss_dell_p3 similarity index 51% rename from env/wflow_wcoss_dell_p3.env rename to modulefiles/wflow_wcoss_dell_p3 index fe0c07d257..c9e76bd0dd 100644 --- a/env/wflow_wcoss_dell_p3.env +++ b/modulefiles/wflow_wcoss_dell_p3 @@ -1,4 +1,11 @@ -# Python environment for workflow on WCOSS_dell_p3 +#%Module + +proc ModulesHelp { } { + put stderr "This module loads python environement for running SRW on" + put stderr "the WCOSS machine Dell_p3" +} + +module-whatis "Loads libraries needed for running SRW on Dell_p3" module load lsf/10.1 module use /gpfs/dell3/usrx/local/dev/emc_rocoto/modulefiles/ diff --git a/test/README.md b/test/README.md index 84f58027eb..96898ba85c 100644 --- a/test/README.md +++ b/test/README.md @@ -33,7 +33,7 @@ On cheyenne: ``` cd test -./build.sh >& build.out & +./build.sh cheyenne >& build.out & ``` Check the ``${SR_WX_APP_TOP_DIR}/test/build_test$PID.out`` file for PASS/FAIL. diff --git a/test/build.sh b/test/build.sh index 186c6b90e0..83c30910e7 100755 --- a/test/build.sh +++ b/test/build.sh @@ -7,7 +7,7 @@ # # Usage: see function usage below # -# Examples: ./build.sh >& test.out & +# Examples: ./build.sh hera >& test.out & # set -eux # Uncomment for debugging #======================================================================= @@ -16,14 +16,14 @@ fail() { echo -e "\n$1\n" >> ${TEST_OUTPUT} && exit 1; } function usage() { echo - echo "Usage: $0 " + echo "Usage: $0 machine" echo exit 1 } -machines=( hera jet cheyenne orion wcoss_cray wcoss_dell_p3 ) +machines=( hera jet cheyenne orion wcoss_cray wcoss_dell_p3 gaea odin singularity ) -[[ $# -eq 1 ]] && usage +[[ $# -eq 2 ]] && usage #----------------------------------------------------------------------- @@ -34,8 +34,10 @@ TEST_DIR=$( pwd ) # Directory with this script TOP_DIR=${TEST_DIR}/.. # Top level (umbrella repo) directory TEST_OUTPUT=${TEST_DIR}/build_test${PID}.out -# Detect MACHINE -source ${TOP_DIR}/env/detect_machine.sh +# set PLATFORM (MACHINE) +MACHINE="$1" +PLATFORM="${MACHINE}" +printf "PLATFORM(MACHINE)=${PLATFORM}\n" >&2 machine=$(echo "${MACHINE}" | tr '[A-Z]' '[a-z]') # scripts in sorc need lower case machine name @@ -54,6 +56,8 @@ fi #----------------------------------------------------------------------- if [ "${machine}" == "cheyenne" ] ; then compilers=( intel gnu ) +elif [ "${machine}" == "macos" ] || [ "${machine}" == "singularity" ] ; then + compilers=( gnu ) else compilers=( intel ) fi @@ -100,7 +104,7 @@ declare -a executables_created=( chgres_cube \ BIN_DIR=${TOP_DIR}/bin_${compiler} EXEC_DIR=${BIN_DIR}/bin if [ $build_it -eq 0 ] ; then - ./devbuild.sh --compiler=${compiler} --build-dir=${BUILD_DIR} --install-dir=${BIN_DIR} \ + ./devbuild.sh --platform=${machine} --compiler=${compiler} --build-dir=${BUILD_DIR} --install-dir=${BIN_DIR} \ --clean || fail "Build ${machine} ${compiler} FAILED" fi # End of skip build for testing