Skip to content

Commit

Permalink
Merge pull request #718 from ESMCI/jayeshkrishna/machinefiles/add_Anvil
Browse files Browse the repository at this point in the history
Add support for the new "machine" anvil. This machine uses the same login node 
as blues and jobs are  submitted to the "acme" queue.

To use the anvil cluster, create your case on the blues login node by 
specifying the "-mach" explicitly as "anvil"
  • Loading branch information
rljacob authored Nov 7, 2016
2 parents 2e53d7f + 0919054 commit 6c706cc
Show file tree
Hide file tree
Showing 3 changed files with 158 additions and 1 deletion.
11 changes: 11 additions & 0 deletions cime_config/acme/machines/config_batch.xml
Original file line number Diff line number Diff line change
Expand Up @@ -150,6 +150,17 @@
</walltimes>
</batch_system>

<!-- anvil is PBS -->
<batch_system MACH="anvil" type="pbs" version="x.y">
<directives>
<directive>-A {{ PROJECT }}</directive>
<directive>-l nodes={{ num_nodes }}:ppn={{ tasks_per_node }}</directive>
</directives>
<queues>
<queue walltimemax="01:00:00" jobmin="1" jobmax="4320" default="true">acme</queue>
</queues>
</batch_system>

<!-- edison is SLURM as of Jan-4-2016 -->
<batch_system MACH="edison" type="slurm" version="x.y">
<queues>
Expand Down
37 changes: 37 additions & 0 deletions cime_config/acme/machines/config_compilers.xml
Original file line number Diff line number Diff line change
Expand Up @@ -968,6 +968,43 @@ for mct, etc.
<PIO_FILESYSTEM_HINTS>gpfs </PIO_FILESYSTEM_HINTS>
</compiler>

<compiler COMPILER="intel" MACH="anvil">
<PNETCDF_PATH>$(PNETCDFROOT)</PNETCDF_PATH>
<NETCDF_PATH>$(NETCDFROOT)</NETCDF_PATH>
<MPI_PATH MPILIB="mvapich">/blues/gpfs/home/software/spack-0.9.1/opt/spack/linux-centos6-x86_64/intel-16.0.3/mvapich2-2.2b-ow5ikyjehwemgdr4h6k7ii7da6hs6lfz</MPI_PATH>
<MPI_LIB_NAME MPILIB="mvapich">mpi</MPI_LIB_NAME>
<ADD_SLIBS>$(shell $(NETCDF_PATH)/bin/nc-config --flibs) -llapack -lblas </ADD_SLIBS>
<ADD_SLIBS> -Wl,-rpath -Wl,$(NETCDFROOT)/lib </ADD_SLIBS>
<ADD_SLIBS MPILIB="mpich"> -mkl=cluster </ADD_SLIBS>
<ADD_SLIBS MPILIB="mpich2"> -mkl=cluster </ADD_SLIBS>
<ADD_SLIBS MPILIB="mpt"> -mkl=cluster </ADD_SLIBS>
<ADD_SLIBS MPILIB="openmpi"> -mkl=cluster </ADD_SLIBS>
<ADD_SLIBS MPILIB="mvapich"> -mkl=cluster </ADD_SLIBS>
<ADD_SLIBS MPILIB="impi"> -mkl=cluster </ADD_SLIBS>
<ADD_SLIBS MPILIB="mpi-serial"> -mkl </ADD_SLIBS>
<PIO_FILESYSTEM_HINTS>gpfs </PIO_FILESYSTEM_HINTS>
</compiler>

<compiler COMPILER="gnu" MACH="anvil">
<PNETCDF_PATH>$(PNETCDFROOT)</PNETCDF_PATH>
<NETCDF_PATH>$(NETCDFROOT)</NETCDF_PATH>
<MPI_PATH MPILIB="mvapich">/soft/spack/opt/spack/linux-x86_64/gcc-4.4.7/gcc-5.3.0-fygfl7rvyuiteto27dlhmilp5cstw2o2</MPI_PATH>
<MPI_LIB_NAME MPILIB="mvapich">mpi</MPI_LIB_NAME>
<ADD_SLIBS>$(shell $(NETCDF_PATH)/bin/nc-config --flibs) -llapack -lblas</ADD_SLIBS>
<GPTL_CPPDEFS> -DHAVE_NANOTIME -DBIT64 -DHAVE_SLASHPROC -DHAVE_GETTIMEOFDAY</GPTL_CPPDEFS>
<PIO_FILESYSTEM_HINTS>gpfs </PIO_FILESYSTEM_HINTS>
</compiler>

<compiler COMPILER="pgi" MACH="anvil">
<PNETCDF_PATH>$(PNETCDFROOT)</PNETCDF_PATH>
<NETCDF_PATH>$(NETCDFROOT)</NETCDF_PATH>
<MPI_PATH MPILIB="mvapich">soft/spack/opt/spack/linux-x86_64/pgi-16.3-0/mvapich2-2.2b-2t45yukj4ij6ek24fwimzydo2dg6i3n2</MPI_PATH>
<MPI_LIB_NAME MPILIB="mvapich"> mpi</MPI_LIB_NAME>
<ADD_SLIBS>$(shell $(NETCDF_PATH)/bin/nc-config --flibs) -llapack -lblas</ADD_SLIBS>
<ADD_SLIBS> -rpath $(NETCDFROOT)/lib </ADD_SLIBS>
<PIO_FILESYSTEM_HINTS>gpfs </PIO_FILESYSTEM_HINTS>
</compiler>

<compiler COMPILER="pgi" MACH="eastwind">
<ADD_CFLAGS DEBUG="FALSE"> -O2 </ADD_CFLAGS>
<ADD_FFLAGS DEBUG="FALSE"> -O2 </ADD_FFLAGS>
Expand Down
111 changes: 110 additions & 1 deletion cime_config/acme/machines/config_machines.xml
Original file line number Diff line number Diff line change
Expand Up @@ -692,7 +692,6 @@
<environment_variables compiler="gnu" mpilib="mvapich">
<env name="PNETCDFROOT">/soft/climate/pnetcdf/1.6.1/gcc-5.2/mvapich2-2.2b-gcc-5.2-psm</env>
</environment_variables>

<environment_variables compiler="intel">
<env name="NETCDFROOT">/soft/climate/netcdf/4.3.3.1c-4.2cxx-4.4.2f-serial/intel-15.0.1</env>
<env name="NETCDF_INCLUDES">/soft/climate/netcdf/4.3.3.1c-4.2cxx-4.4.2f-serial/intel-15.0.1/include</env>
Expand All @@ -706,6 +705,116 @@

</machine>

<machine MACH="anvil">
<DESC>ANL/LCRC Linux Cluster</DESC>
<NODENAME_REGEX>b.*.lcrc.anl.gov</NODENAME_REGEX>
<TESTS>acme_integration</TESTS>
<COMPILERS>gnu,intel,pgi</COMPILERS>
<MPILIBS>mvapich,openmpi,mpi-serial</MPILIBS>
<CESMSCRATCHROOT>/lcrc/group/acme/$USER/acme_scratch</CESMSCRATCHROOT>
<SAVE_TIMING_DIR>/lcrc/project/$PROJECT</SAVE_TIMING_DIR>
<RUNDIR>$CESMSCRATCHROOT/$CASE/run</RUNDIR>
<EXEROOT>$CESMSCRATCHROOT/$CASE/bld</EXEROOT>
<DIN_LOC_ROOT>/home/ccsm-data/inputdata</DIN_LOC_ROOT>
<DIN_LOC_ROOT_CLMFORC>/home/ccsm-data/inputdata/atm/datm7</DIN_LOC_ROOT_CLMFORC>
<DOUT_S_ROOT>/lcrc/group/acme/$USER/archive/$CASE</DOUT_S_ROOT>
<DOUT_L_MSROOT>/lcrc/project/ACME/$USER/archive/$CASE</DOUT_L_MSROOT>
<CCSM_BASELINE>/lcrc/group/acme/acme_baselines</CCSM_BASELINE>
<CCSM_CPRNC>/home/ccsm-data/tools/cprnc</CCSM_CPRNC>
<OS>LINUX</OS>
<BATCH_SYSTEM>pbs</BATCH_SYSTEM>
<SUPPORTED_BY>acme</SUPPORTED_BY>
<GMAKE_J>4</GMAKE_J>
<MAX_TASKS_PER_NODE>36</MAX_TASKS_PER_NODE>
<PES_PER_NODE>36</PES_PER_NODE>
<PROJECT_REQUIRED>FALSE</PROJECT_REQUIRED>
<PROJECT>ACME</PROJECT>
<mpirun mpilib="default">
<executable>mpiexec</executable>
<arguments>
<arg name="num_tasks"> -n $TOTALPES </arg>
</arguments>
</mpirun>
<mpirun mpilib="mpi-serial">
<executable></executable>
</mpirun>
<module_system type="soft">
<init_path lang="csh">/etc/profile.d/a_softenv.csh</init_path>
<init_path lang="sh">/etc/profile.d/a_softenv.sh</init_path>
<cmd_path lang="csh">soft</cmd_path>
<cmd_path lang="sh">soft</cmd_path>
<modules compiler="intel">
<command name="add">+intel-16.0.3</command>
<command name="add">+netcdf-c-4.4.0-f77-4.4.4-intel-16.0.3-serial</command>
<command name="add">+cmake-2.8.12</command>
<command name="add">+python-2.7</command>
</modules>
<modules compiler="intel" mpilib="mvapich">
<command name="add">+mvapich2-2.2b-intel-16.0.3-acme</command>
<command name="add">+pnetcdf-1.7.0-intel-16.0.3-mvapich2-2.2b-acme</command>
</modules>
<modules compiler="intel" mpilib="openmpi">
<command name="add">+openmpi-1.10.3-intel-16.0.3-acme</command>
<command name="add">+pnetcdf-1.7.0-intel-16.0.3-openmpi-1.10.3-acme</command>
</modules>
<modules compiler="gnu">
<command name="add">+gcc-5.3.0</command>
<command name="add">+netcdf-c-4.4.0-f77-4.4.3-gcc-5.3.0-serial</command>
<command name="add">+cmake-2.8.12</command>
<command name="add">+python-2.7</command>
</modules>
<modules compiler="gnu" mpilib="mvapich">
<command name="add">+mvapich2-2.2b-gcc-5.3.0-acme</command>
<command name="add">+pnetcdf-1.6.1-gcc-5.3.0-mvapich2-2.2b-acme</command>
</modules>
<modules compiler="gnu" mpilib="openmpi">
<command name="add">+openmpi-1.10.2-gcc-5.3.0-acme</command>
<command name="add">+pnetcdf-1.6.1-gcc-5.3.0-openmpi-1.10.2-acme</command>
</modules>
<modules compiler="pgi">
<command name="add">+pgi-16.3</command>
<command name="add">+netcdf-c-4.4.0-f77-4.4.3-pgi-16.3-serial</command>
<command name="add">+cmake-2.8.12</command>
<command name="add">+python-2.7</command>
</modules>
<modules compiler="pgi" mpilib="mvapich">
<command name="add">+mvapich2-2.2b-pgi-16.3-acme</command>
<command name="add">+pnetcdf-1.6.1-pgi-16.3-mvapich2-2.2b-acme</command>
</modules>
<modules compiler="pgi" mpilib="openmpi">
<command name="add">+openmpi-1.10.2-pgi-16.3-acme</command>
<command name="add">+pnetcdf-1.6.1-pgi-16.3-openmpi-1.10.2-acme</command>
</modules>
</module_system>
<environment_variables compiler="intel">
<env name="NETCDFROOT">/soft/spack/opt/spack/linux-centos6-x86_64/intel-16.0.3/netcdf-4.4.0-ds5gqquzjytf4efz3d5njsvpkfmmrwq7</env>
</environment_variables>
<environment_variables compiler="intel" mpilib="mvapich">
<env name="PNETCDFROOT">/soft/spack/opt/spack/linux-centos6-x86_64/intel-16.0.3/parallel-netcdf-1.7.0-gabvxdumr3chrvldpid5sqwtkijq6xkr</env>
</environment_variables>
<environment_variables compiler="intel" mpilib="openmpi">
<env name="PNETCDFROOT">/soft/spack/opt/spack/linux-centos6-x86_64/intel-16.0.3/parallel-netcdf-1.7.0-3gxxcl2twfmvvq7rvs7z3yykaw4t2trr</env>
</environment_variables>
<environment_variables compiler="gnu">
<env name="NETCDFROOT">/soft/spack/opt/spack/linux-x86_64/gcc-5.3.0/netcdf-4.4.0-xezev3j3kyay6obokfjvwfzqc2l6uwh5</env>
</environment_variables>
<environment_variables compiler="gnu" mpilib="mvapich">
<env name="PNETCDFROOT">/soft/spack/opt/spack/linux-x86_64/gcc-5.3.0/parallel-netcdf-1.6.1-njywxqs3qjc75ah5mrziqln3qk2nkzg6</env>
</environment_variables>
<environment_variables compiler="gnu" mpilib="openmpi">
<env name="PNETCDFROOT">/soft/spack/opt/spack/linux-x86_64/gcc-5.3.0/parallel-netcdf-1.6.1-vdvnv2sfg53mug2rcyalhb47nfxk2ki7</env>
</environment_variables>
<environment_variables compiler="pgi">
<env name="NETCDFROOT">/soft/spack/opt/spack/linux-x86_64/pgi-16.3-0/netcdf-4.4.0-3jeeee43ip5lcnvdgxssbqhnvylxw5gp</env>
</environment_variables>
<environment_variables compiler="pgi" mpilib="mvapich">
<env name="PNETCDFROOT">/soft/spack/opt/spack/linux-x86_64/pgi-16.3-0/parallel-netcdf-1.6.1-4uarepsh5u2am2bbv2cbct4fl7jombz4</env>
</environment_variables>
<environment_variables compiler="pgi" mpilib="openmpi">
<env name="PNETCDFROOT">/soft/spack/opt/spack/linux-x86_64/pgi-16.3-0/parallel-netcdf-1.6.1-5lmcmthdcd2ewbvtpncpnryzocj5b3bd</env>
</environment_variables>
</machine>

<machine MACH="cetus">
<DESC>ANL IBM BG/Q, os is BGP, 16 pes/node, batch system is cobalt</DESC>
<NODENAME_REGEX>cetus</NODENAME_REGEX>
Expand Down

0 comments on commit 6c706cc

Please sign in to comment.