From 457d8e292e4f2ba2cb7d31ac3796f541279b0753 Mon Sep 17 00:00:00 2001 From: Vicente Bolea Date: Wed, 17 May 2023 12:55:00 -0400 Subject: [PATCH 1/9] Merge pull request #3623 from vicentebolea/kokkos-use-nvcc-wrapper ci: use nvcc_wrapper in adiosKokkos (cherry picked from commit 189f3842cdafd20235821c33eb911b70b877e9a0) --- CODEOWNERS | 20 +++++++++---- cmake/DetectOptions.cmake | 4 +-- source/adios2/CMakeLists.txt | 28 +++++-------------- source/adios2/helper/adiosGPUFunctions.h | 2 +- source/adios2/helper/kokkos/CMakeLists.txt | 25 +++++++++++++++++ .../helper/{ => kokkos}/adiosKokkos.cpp | 0 .../adios2/helper/{ => kokkos}/adiosKokkos.h | 0 7 files changed, 49 insertions(+), 30 deletions(-) create mode 100644 source/adios2/helper/kokkos/CMakeLists.txt rename source/adios2/helper/{ => kokkos}/adiosKokkos.cpp (100%) rename source/adios2/helper/{ => kokkos}/adiosKokkos.h (100%) diff --git a/CODEOWNERS b/CODEOWNERS index ef9d75102a..2d2c4cd680 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -1,17 +1,25 @@ # KW's stuff -**/CMakeLists.txt @vicentebolea @caitlinross -*.cmake @vicentebolea @caitlinross +CMakeLists.txt @vicentebolea @caitlinross + +# Caitlin's stuff +plugins/ @caitlinross # Vicente's stuff -*.sh @vicentebolea *.bash @vicentebolea -*.in @vicentebolea -*.yml @vicentebolea -*.yaml @vicentebolea +*.cmake @vicentebolea *.in @vicentebolea *.json @vicentebolea +*.sh @vicentebolea *.txt @vicentebolea +*.yaml @vicentebolea +*.yml @vicentebolea +cmake/ @vicentebolea scripts/ @vicentebolea .github/ @vicentebolea .circleci/ @vicentebolea source/adios2/toolkit/sst/dp/mpi_dp.c @vicentebolea + +# GPU-aware specific files +source/adios2/helper/kokkos/ @anagainaru +source/adios2/helper/adiosCUDA.* @anagainaru +source/adios2/helper/adiosGPUFunctions.h @anagainaru diff --git a/cmake/DetectOptions.cmake b/cmake/DetectOptions.cmake index 9876538043..871ad0c42f 100644 --- a/cmake/DetectOptions.cmake +++ b/cmake/DetectOptions.cmake @@ -177,9 +177,9 @@ endif() # Kokkos if(ADIOS2_USE_Kokkos) if(ADIOS2_USE_Kokkos STREQUAL AUTO) - find_package(Kokkos 3.7...<4.0 QUIET) + find_package(Kokkos 3.7 QUIET) else() - find_package(Kokkos 3.7...<4.0 REQUIRED) + find_package(Kokkos 3.7 REQUIRED) endif() if(Kokkos_FOUND) set(ADIOS2_HAVE_Kokkos TRUE) diff --git a/source/adios2/CMakeLists.txt b/source/adios2/CMakeLists.txt index 77899e8ecc..f7f6643bf7 100644 --- a/source/adios2/CMakeLists.txt +++ b/source/adios2/CMakeLists.txt @@ -126,28 +126,14 @@ endif() set(maybe_adios2_core_kokkos) if(ADIOS2_HAVE_Kokkos) - add_library(adios2_core_kokkos helper/adiosKokkos.h helper/adiosKokkos.cpp) - - set_target_properties(adios2_core_kokkos PROPERTIES - VISIBILITY_INLINES_HIDDEN ON - INCLUDE_DIRECTORIES "$;$" - EXPORT_NAME core_kokkos - OUTPUT_NAME adios2${ADIOS2_LIBRARY_SUFFIX}_core_kokkos - ) - - kokkos_compilation(SOURCE helper/adiosKokkos.cpp) - if(Kokkos_ENABLE_CUDA) - set_property(SOURCE helper/adiosKokkos.cpp PROPERTY LANGUAGE CUDA) - set_property(SOURCE helper/adiosKokkos.cpp APPEND PROPERTY COMPILE_FLAGS "--extended-lambda") - set_target_properties(adios2_core_kokkos PROPERTIES - CUDA_VISIBILITY_PRESET hidden - ) - target_compile_features(adios2_core_kokkos PRIVATE cuda_std_17) - endif() - - target_link_libraries(adios2_core_kokkos PRIVATE Kokkos::kokkos) + # Kokkos imposes us to set our CMAKE_CXX_COMPILER to Kokkos_CXX_COMPILER. + # The problem is that we do not want this for the whole project and with + # CMake we cannot set the CXX_COMPILER for a single target. The solution is + # to move the adios2 module that uses Kokkos to its independent subdir and + # set there CMAKE_CXX_COMPILER, which is possible (and scoped to that subdir) + # in cmake. + add_subdirectory(helper/kokkos) target_link_libraries(adios2_core PRIVATE adios2_core_kokkos) - set(maybe_adios2_core_kokkos adios2_core_kokkos) endif() diff --git a/source/adios2/helper/adiosGPUFunctions.h b/source/adios2/helper/adiosGPUFunctions.h index 8069a41b1e..78a2f323a8 100644 --- a/source/adios2/helper/adiosGPUFunctions.h +++ b/source/adios2/helper/adiosGPUFunctions.h @@ -6,7 +6,7 @@ #endif #ifdef ADIOS2_HAVE_KOKKOS -#include "adios2/helper/adiosKokkos.h" +#include "adios2/helper/kokkos/adiosKokkos.h" #endif #endif /* ADIOS2_HELPER_ADIOSGPUFUNCTIONS_H_ */ diff --git a/source/adios2/helper/kokkos/CMakeLists.txt b/source/adios2/helper/kokkos/CMakeLists.txt new file mode 100644 index 0000000000..c37a3c9417 --- /dev/null +++ b/source/adios2/helper/kokkos/CMakeLists.txt @@ -0,0 +1,25 @@ +#------------------------------------------------------------------------------# +# Distributed under the OSI-approved Apache License, Version 2.0. See +# accompanying file Copyright.txt for details. +#------------------------------------------------------------------------------# + +if (NOT DEFINED Kokkos_CXX_COMPILER) + message(FATAL_ERROR "ADIOS: Kokkos module requires the Kokkos_CXX_COMPILER variable") +endif() + +# CXX Compiler settings only in for this subdir +set(CMAKE_CXX_STANDARD 17) +set(CMAKE_CXX_STANDARD_REQUIRED ON) +set(CMAKE_CXX_COMPILER "${Kokkos_CXX_COMPILER}") + +add_library(adios2_core_kokkos adiosKokkos.h adiosKokkos.cpp) + +set_target_properties(adios2_core_kokkos PROPERTIES + VISIBILITY_INLINES_HIDDEN ON + INCLUDE_DIRECTORIES "$;$" + EXPORT_NAME core_kokkos + OUTPUT_NAME adios2${ADIOS2_LIBRARY_SUFFIX}_core_kokkos + ) + +kokkos_compilation(SOURCE adiosKokkos.cpp) +target_link_libraries(adios2_core_kokkos PRIVATE Kokkos::kokkos) diff --git a/source/adios2/helper/adiosKokkos.cpp b/source/adios2/helper/kokkos/adiosKokkos.cpp similarity index 100% rename from source/adios2/helper/adiosKokkos.cpp rename to source/adios2/helper/kokkos/adiosKokkos.cpp diff --git a/source/adios2/helper/adiosKokkos.h b/source/adios2/helper/kokkos/adiosKokkos.h similarity index 100% rename from source/adios2/helper/adiosKokkos.h rename to source/adios2/helper/kokkos/adiosKokkos.h From 72e430ce6eb5c5cd3cc2fb2916097429114496ca Mon Sep 17 00:00:00 2001 From: Vicente Adolfo Bolea Sanchez Date: Fri, 5 May 2023 17:10:02 -0400 Subject: [PATCH 2/9] ci,ascent: enable ascent builds (cherry picked from commit cf7a879f54290a019228e2397e72a8e5e3d05fe1) --- .gitlab/gitlab-ci-ascent.yml | 130 ++++++++++++++++++ .gitlab/gitlab-ci-gitlabdotcom.yml | 6 - .gitlab/gitlab-ci-olcf.yml | 28 ---- .gitlab/kokkos.sh | 30 ++++ CTestCustom.cmake.in | 2 + scripts/ci/cmake-v2/ci-ascent-cuda.cmake | 27 ++++ .../ci/cmake-v2/ci-ascent-kokkos-cuda.cmake | 32 +++++ scripts/ci/cmake-v2/ci-ascent-nvhpc.cmake | 30 ++++ .../ci/{cmake => cmake-v2}/ci-ascent-xl.cmake | 30 ++-- scripts/ci/cmake/ci-ascent-gcc.cmake | 38 ----- scripts/ci/gitlab-ci/run.sh | 73 +++++----- 11 files changed, 304 insertions(+), 122 deletions(-) create mode 100644 .gitlab/gitlab-ci-ascent.yml delete mode 100644 .gitlab/gitlab-ci-gitlabdotcom.yml delete mode 100644 .gitlab/gitlab-ci-olcf.yml create mode 100755 .gitlab/kokkos.sh create mode 100644 scripts/ci/cmake-v2/ci-ascent-cuda.cmake create mode 100644 scripts/ci/cmake-v2/ci-ascent-kokkos-cuda.cmake create mode 100644 scripts/ci/cmake-v2/ci-ascent-nvhpc.cmake rename scripts/ci/{cmake => cmake-v2}/ci-ascent-xl.cmake (53%) delete mode 100644 scripts/ci/cmake/ci-ascent-gcc.cmake diff --git a/.gitlab/gitlab-ci-ascent.yml b/.gitlab/gitlab-ci-ascent.yml new file mode 100644 index 0000000000..1d1e5c0a95 --- /dev/null +++ b/.gitlab/gitlab-ci-ascent.yml @@ -0,0 +1,130 @@ +# Ad-hoc build that runs in the ECP Hardware, concretely in OLCF Ascent. +.setup_env_ecpci: &setup_env_ecpci | + module purge + module load ${JOB_MODULES} + module list + export PATH="/gpfs/wolf/csc303/scratch/vbolea/ci/utils:$PATH" + +.ascent-common: + except: + - schedules + tags: + - batch + interruptible: true + variables: + CCACHE_BASEDIR: "/gpfs/wolf/" + CCACHE_DIR: "/gpfs/wolf/csc303/scratch/vbolea/ci/ccache" + # -isystem= is not affected by CCACHE_BASEDIR, thus we must ignore it + CCACHE_IGNOREOPTIONS: "-isystem=*" + CCACHE_NOHASHDIR: "true" + + CUDAHOSTCXX: "g++" + CUSTOM_CI_BUILDS_DIR: "/gpfs/wolf/csc303/scratch/vbolea/ci/adios2" + GITLAB_SITE: "OLCF Ascent" + SCHEDULER_PARAMETERS: -P CSC303 -W 1:00 -nnodes 1 -alloc_flags gpudefault + before_script: + - *setup_env_ecpci + - ccache -z + script: + - bash scripts/ci/gitlab-ci/run.sh update + - bash scripts/ci/gitlab-ci/run.sh configure + - jsrun -n1 -a1 -g1 -c40 -bpacked:40 bash scripts/ci/gitlab-ci/run.sh build + - jsrun -n1 -a1 -g1 -c2 bash scripts/ci/gitlab-ci/run.sh test + after_script: + - *setup_env_ecpci + - bash scripts/ci/gitlab-ci/run.sh submit + - ccache -s + +ascent-cuda: + variables: + # Order matters + JOB_MODULES: >- + DefApps + zstd + cuda/11.4.2 + git + gcc/10.2.0 + ninja + spectrum-mpi + lsf-tools + libffi + hdf5 + cmake + extends: + - .ascent-common + +ascent-kokkos-cuda: + variables: + # Order matters + JOB_MODULES: >- + DefApps + zstd + cuda/11.4.2 + git + gcc/10.2.0 + ninja + spectrum-mpi + lsf-tools + libffi + hdf5 + cmake + KOKKOS_OPTS: >- + -DKokkos_ARCH_POWER9=ON + -DKokkos_ARCH_VOLTA70=ON + -DKokkos_ENABLE_CUDA=ON + -DKokkos_ENABLE_CUDA_LAMBDA=ON + -DCMAKE_INSTALL_PREFIX:PATH=$CI_BUILDS_DIR/kokkos_install + + Kokkos_DIR: $CI_BUILDS_DIR/kokkos_install + before_script: + - *setup_env_ecpci + - ccache -z + - .gitlab/kokkos.sh "$CI_BUILDS_DIR" "3.7.01" $KOKKOS_OPTS + extends: + - .ascent-common + +ascent-nvhpc: + variables: + # Order matters + JOB_MODULES: >- + DefApps + zstd + nvhpc + git + spectrum-mpi + lsf-tools + libffi + hdf5 + cmake + extends: + - .ascent-common + +ascent-xl: + variables: + # Order matters + JOB_MODULES: >- + DefApps + zstd + cuda/11.4.2 + git + xl + ninja + spectrum-mpi + lsf-tools + libffi + hdf5 + cmake + extends: + - .ascent-common + +sync-github-prs: + tags: + - nobatch + only: + - schedules + variables: + GIT_STRATEGY: none + CUSTOM_CI_BUILDS_DIR: "/gpfs/wolf/csc303/scratch/vbolea/ci/adios2" + script: + - export PATH="/gpfs/wolf/csc303/scratch/vbolea/ci/utils:$PATH" + - SpackCIBridge.py ornladios/ADIOS2 git@code.ornl.gov:ecpcitest/adios2.git https://code.ornl.gov/ ecpcitest/adios2 --prereq-check=format --prereq-check=git_checks diff --git a/.gitlab/gitlab-ci-gitlabdotcom.yml b/.gitlab/gitlab-ci-gitlabdotcom.yml deleted file mode 100644 index eba4958546..0000000000 --- a/.gitlab/gitlab-ci-gitlabdotcom.yml +++ /dev/null @@ -1,6 +0,0 @@ -sync-github-prs: - tags: linux - only: - - schedules - script: - - scripts/ci/scripts/github-prs-to-gitlab.sh ornladios/adios2 code.ornl.gov ecpcitest/adios2 diff --git a/.gitlab/gitlab-ci-olcf.yml b/.gitlab/gitlab-ci-olcf.yml deleted file mode 100644 index ac593185a1..0000000000 --- a/.gitlab/gitlab-ci-olcf.yml +++ /dev/null @@ -1,28 +0,0 @@ -.all-steps: - except: - - schedules - variables: - GITLAB_SITE: "OLCF GitLab" - CMAKE_ENV_MODULE: "cmake" - script: - - bash scripts/ci/gitlab-ci/run.sh update - - bash scripts/ci/gitlab-ci/run.sh configure - - bash scripts/ci/gitlab-ci/run.sh build - - bash scripts/ci/gitlab-ci/run.sh test - -ascent-xl: - extends: - - .all-steps - tags: [nobatch] - -ascent-gcc: - extends: - - .all-steps - tags: [nobatch] - -#ascent-xl-smpi: -# extends: -# - .all-steps -# tags: [batch] -# variables: -# SCHEDULER_PARAMETERS: "-P CSC303 -W 1:00 -nnodes 1" diff --git a/.gitlab/kokkos.sh b/.gitlab/kokkos.sh new file mode 100755 index 0000000000..698fe84ac7 --- /dev/null +++ b/.gitlab/kokkos.sh @@ -0,0 +1,30 @@ +#!/usr/bin/env bash +set -x + +WORKDIR="$1" +VERSION="$2" + +shift 2 + +if [ ! -d "$WORKDIR" ] || [ -z "$VERSION" ] +then + echo "[E] missing args: Invoke as .gitlab/ci/config/kokkos.sh [extra_args]" + exit 1 +fi + +# Build and install Kokkos +curl -L "https://github.com/kokkos/kokkos/archive/refs/tags/$VERSION.tar.gz" \ + | tar -C "$WORKDIR" -xzf - + +cmake -S "$WORKDIR/kokkos-$VERSION" -B "$WORKDIR/kokkos_build" \ + "-DBUILD_SHARED_LIBS=ON" \ + "-DCMAKE_BUILD_TYPE:STRING=release" \ + "-DCMAKE_CXX_COMPILER_LAUNCHER=ccache" \ + "-DCMAKE_CXX_STANDARD:STRING=17" \ + "-DCMAKE_CXX_EXTENSIONS:BOOL=OFF" \ + "-DCMAKE_POSITION_INDEPENDENT_CODE:BOOL=ON" \ + "-DCMAKE_CXX_COMPILER:STRING=$WORKDIR/kokkos-$VERSION/bin/nvcc_wrapper" \ + $* + +cmake --build "$WORKDIR/kokkos_build" +cmake --install "$WORKDIR/kokkos_build" diff --git a/CTestCustom.cmake.in b/CTestCustom.cmake.in index 5dcb601175..834a6a202e 100644 --- a/CTestCustom.cmake.in +++ b/CTestCustom.cmake.in @@ -22,6 +22,8 @@ list(APPEND CTEST_CUSTOM_WARNING_EXCEPTION "warning: template parameter ... is not used in declaring the parameter types of function template" "warning: command-line option '.*' is valid for Fortran but not for C" "Warning #20208-D: '.*' is treated as '.*' in device code" + "Warning: '.*' is treated as '.*' in device code" + ".*was specified as both a system and non-system include directory.*" ) list(APPEND CTEST_CUSTOM_COVERAGE_EXCLUDE ".*/thirdparty/.*" diff --git a/scripts/ci/cmake-v2/ci-ascent-cuda.cmake b/scripts/ci/cmake-v2/ci-ascent-cuda.cmake new file mode 100644 index 0000000000..93febd6614 --- /dev/null +++ b/scripts/ci/cmake-v2/ci-ascent-cuda.cmake @@ -0,0 +1,27 @@ +# Client maintainer: vicente.bolea@kitware.com + +set(dashboard_cache " +ADIOS2_USE_BZip2:BOOL=OFF +ADIOS2_USE_CUDA:BOOL=ON +ADIOS2_USE_DataMan:BOOL=ON +ADIOS2_USE_Fortran:BOOL=ON +ADIOS2_USE_MPI:BOOL=OFF +ADIOS2_USE_PNG:BOOL=OFF +ADIOS2_USE_Python:BOOL=OFF +ADIOS2_USE_SST:BOOL=ON + +CMAKE_C_COMPILER_LAUNCHER=ccache +CMAKE_CXX_COMPILER_LAUNCHER=ccache +CMAKE_CUDA_COMPILER_LAUNCHER=ccache +CMAKE_DISABLE_FIND_PACKAGE_BISON=ON +CMAKE_DISABLE_FIND_PACKAGE_FLEX=ON +CMAKE_Fortran_FLAGS:STRING=-Wall +") + +set(CTEST_TEST_ARGS + PARALLEL_LEVEL 8 + EXCLUDE ".*/BPWRCUDA.ADIOS2BPCUDAWrong/.*BP4.Serial|.*/BPWRCUDA.ADIOS2BPCUDAMemSel/.*BP4.Serial" + ) +set(CTEST_CMAKE_GENERATOR "Ninja") +list(APPEND CTEST_UPDATE_NOTES_FILES "${CMAKE_CURRENT_LIST_FILE}") +include(${CMAKE_CURRENT_LIST_DIR}/ci-common.cmake) diff --git a/scripts/ci/cmake-v2/ci-ascent-kokkos-cuda.cmake b/scripts/ci/cmake-v2/ci-ascent-kokkos-cuda.cmake new file mode 100644 index 0000000000..b906bed2e6 --- /dev/null +++ b/scripts/ci/cmake-v2/ci-ascent-kokkos-cuda.cmake @@ -0,0 +1,32 @@ +# Client maintainer: vicente.bolea@kitware.com + +set(kokkos_install_path $ENV{Kokkos_DIR}) + +set(ENV{CC} gcc) +set(ENV{CXX} g++) +set(ENV{FC} gfortran) + +set(dashboard_cache " +ADIOS2_USE_BZip2:BOOL=OFF +ADIOS2_USE_DataMan:BOOL=ON +ADIOS2_USE_Fortran:BOOL=OFF +ADIOS2_USE_MPI:BOOL=OFF +ADIOS2_USE_PNG:BOOL=OFF +ADIOS2_USE_Python:BOOL=OFF +ADIOS2_USE_SST:BOOL=ON +ADIOS2_USE_Kokkos=ON + +CMAKE_C_COMPILER_LAUNCHER=ccache +CMAKE_CXX_COMPILER_LAUNCHER=ccache +CMAKE_CUDA_COMPILER_LAUNCHER=ccache +CMAKE_DISABLE_FIND_PACKAGE_BISON=ON +CMAKE_DISABLE_FIND_PACKAGE_FLEX=ON +") + +set(CTEST_TEST_ARGS + PARALLEL_LEVEL 8 + EXCLUDE ".*/BPWRCUDA.ADIOS2BPCUDAWrong/.*BP4.Serial|.*/BPWRCUDA.ADIOS2BPCUDAMemSel/.*BP4.Serial|Engine.Staging.TestThreads.*" + ) +set(CTEST_CMAKE_GENERATOR "Ninja") +list(APPEND CTEST_UPDATE_NOTES_FILES "${CMAKE_CURRENT_LIST_FILE}") +include(${CMAKE_CURRENT_LIST_DIR}/ci-common.cmake) diff --git a/scripts/ci/cmake-v2/ci-ascent-nvhpc.cmake b/scripts/ci/cmake-v2/ci-ascent-nvhpc.cmake new file mode 100644 index 0000000000..c6aa75d3b9 --- /dev/null +++ b/scripts/ci/cmake-v2/ci-ascent-nvhpc.cmake @@ -0,0 +1,30 @@ +# Client maintainer: vicente.bolea@kitware.com + +set(ENV{CC} nvc) +set(ENV{CXX} nvc++) +set(ENV{FC} nvfortran) + +set(dashboard_cache " +ADIOS2_USE_BZip2:BOOL=OFF +ADIOS2_USE_CUDA:BOOL=ON +ADIOS2_USE_DataMan:BOOL=ON +ADIOS2_USE_Fortran:BOOL=ON +ADIOS2_USE_MPI:BOOL=OFF +ADIOS2_USE_PNG:BOOL=OFF +ADIOS2_USE_Python:BOOL=OFF +ADIOS2_USE_SST:BOOL=ON + +CMAKE_C_COMPILER_LAUNCHER=ccache +CMAKE_CXX_COMPILER_LAUNCHER=ccache +CMAKE_DISABLE_FIND_PACKAGE_BISON=ON +CMAKE_DISABLE_FIND_PACKAGE_FLEX=ON +CMAKE_NINJA_FORCE_RESPONSE_FILE=OFF +") + +set(CTEST_TEST_ARGS + PARALLEL_LEVEL 8 + EXCLUDE ".*/BPWRCUDA.ADIOS2BPCUDAWrong/.*BP4.Serial|.*/BPWRCUDA.ADIOS2BPCUDAMemSel/.*BP4.Serial|Install.*Fortran" + ) +set(CTEST_CMAKE_GENERATOR "Unix Makefiles") +list(APPEND CTEST_UPDATE_NOTES_FILES "${CMAKE_CURRENT_LIST_FILE}") +include(${CMAKE_CURRENT_LIST_DIR}/ci-common.cmake) diff --git a/scripts/ci/cmake/ci-ascent-xl.cmake b/scripts/ci/cmake-v2/ci-ascent-xl.cmake similarity index 53% rename from scripts/ci/cmake/ci-ascent-xl.cmake rename to scripts/ci/cmake-v2/ci-ascent-xl.cmake index fec3729755..9f73bab0de 100644 --- a/scripts/ci/cmake/ci-ascent-xl.cmake +++ b/scripts/ci/cmake-v2/ci-ascent-xl.cmake @@ -1,15 +1,4 @@ -# Client maintainer: chuck.atkins@kitware.com - -find_package(EnvModules REQUIRED) - -env_module(purge) -env_module(load git) -env_module(load xl) -env_module(load hdf5) -env_module(load libfabric) -env_module(load python/3.7.0) -env_module(load zfp) -env_module(load zeromq) +# Client maintainer: vicente.bolea@kitware.com set(ENV{CC} xlc) set(ENV{CXX} xlc++) @@ -17,20 +6,25 @@ set(ENV{FC} xlf) set(dashboard_cache " ADIOS2_USE_BZip2:BOOL=OFF -ADIOS2_USE_Blosc:BOOL=OFF ADIOS2_USE_DataMan:BOOL=ON -ADIOS2_USE_Fortran:BOOL=ON -ADIOS2_USE_HDF5:BOOL=ON +ADIOS2_USE_Fortran:BOOL=OFF ADIOS2_USE_MPI:BOOL=OFF +ADIOS2_USE_PNG:BOOL=OFF ADIOS2_USE_Python:BOOL=OFF ADIOS2_USE_SST:BOOL=ON -ADIOS2_USE_SZ:BOOL=OFF -ADIOS2_USE_ZeroMQ:STRING=ON +ADIOS2_USE_ZeroMQ:STRING=OFF ADIOS2_USE_ZFP:BOOL=OFF +ADIOS2_USE_SZ:BOOL=OFF +ADIOS2_USE_Blosc:BOOL=OFF + +CMAKE_C_COMPILER_LAUNCHER=ccache +CMAKE_CXX_COMPILER_LAUNCHER=ccache +CMAKE_DISABLE_FIND_PACKAGE_BISON=ON +CMAKE_DISABLE_FIND_PACKAGE_FLEX=ON ") set(NCPUS 4) set(CTEST_TEST_ARGS PARALLEL_LEVEL 8) -set(CTEST_CMAKE_GENERATOR "Unix Makefiles") +set(CTEST_CMAKE_GENERATOR "Ninja") list(APPEND CTEST_UPDATE_NOTES_FILES "${CMAKE_CURRENT_LIST_FILE}") include(${CMAKE_CURRENT_LIST_DIR}/ci-common.cmake) diff --git a/scripts/ci/cmake/ci-ascent-gcc.cmake b/scripts/ci/cmake/ci-ascent-gcc.cmake deleted file mode 100644 index ebf1fd0743..0000000000 --- a/scripts/ci/cmake/ci-ascent-gcc.cmake +++ /dev/null @@ -1,38 +0,0 @@ -# Client maintainer: chuck.atkins@kitware.com - -find_package(EnvModules REQUIRED) - -env_module(purge) -env_module(load git) -env_module(load gcc/8.1.1) -env_module(load hdf5) -env_module(load libfabric) -env_module(load python/3.7.0) -env_module(load zfp) -env_module(load zeromq) - -set(ENV{CC} gcc) -set(ENV{CXX} g++) -set(ENV{FC} gfortran) - -set(dashboard_cache " -ADIOS2_USE_BZip2:BOOL=OFF -ADIOS2_USE_Blosc:BOOL=OFF -ADIOS2_USE_DataMan:BOOL=ON -ADIOS2_USE_Fortran:BOOL=ON -ADIOS2_USE_HDF5:BOOL=ON -ADIOS2_USE_MPI:BOOL=OFF -ADIOS2_USE_Python:BOOL=OFF -ADIOS2_USE_SST:BOOL=ON -ADIOS2_USE_SZ:BOOL=OFF -ADIOS2_USE_ZeroMQ:STRING=ON -ADIOS2_USE_ZFP:BOOL=OFF - -CMAKE_Fortran_FLAGS:STRING=-Wall -") - -set(NCPUS 4) -set(CTEST_TEST_ARGS PARALLEL_LEVEL 8) -set(CTEST_CMAKE_GENERATOR "Unix Makefiles") -list(APPEND CTEST_UPDATE_NOTES_FILES "${CMAKE_CURRENT_LIST_FILE}") -include(${CMAKE_CURRENT_LIST_DIR}/ci-common.cmake) diff --git a/scripts/ci/gitlab-ci/run.sh b/scripts/ci/gitlab-ci/run.sh index 05ccc39c9a..02ee8c630f 100755 --- a/scripts/ci/gitlab-ci/run.sh +++ b/scripts/ci/gitlab-ci/run.sh @@ -1,48 +1,57 @@ #!/bin/bash --login - -if [ -n "${GITLAB_SITE}" ] -then - export CI_SITE_NAME="${GITLAB_SITE}" -else - export CI_SITE_NAME="GitLab CI" -fi +set -e export CI_BUILD_NAME="${CI_COMMIT_BRANCH#github/}_${CI_JOB_NAME}" -export CI_SOURCE_DIR="${CI_PROJECT_DIR}" -export CI_ROOT_DIR="${CI_PROJECT_DIR}/.." -export CI_BIN_DIR="${CI_ROOT_DIR}/${CI_BUILD_NAME}" export CI_COMMIT_REF=${CI_COMMIT_SHA} +export CI_ROOT_DIR="${CI_PROJECT_DIR}/.." +export CI_SITE_NAME="${GITLAB_SITE}" +export CI_SOURCE_DIR="${CI_PROJECT_DIR}" -STEP=$1 -CTEST_SCRIPT=scripts/ci/cmake/ci-${CI_JOB_NAME}.cmake - -# Update and Test steps enable an extra step -CTEST_STEP_ARGS="" -case ${STEP} in - test) CTEST_STEP_ARGS="${CTEST_STEP_ARGS} -Ddashboard_do_end=ON" ;; -esac -CTEST_STEP_ARGS="${CTEST_STEP_ARGS} -Ddashboard_do_${STEP}=ON" +export CI_BIN_DIR="${CI_ROOT_DIR}/${CI_BUILD_NAME}" -if [ -n "${CMAKE_ENV_MODULE}" ] +readonly CTEST_SCRIPT=scripts/ci/cmake-v2/ci-${CI_JOB_NAME}.cmake +if [ ! -f "$CTEST_SCRIPT" ] then - module load ${CMAKE_ENV_MODULE} + echo "[E] Variable files does not exits: $CTEST_SCRIPT" + exit 1 +fi - echo "**********module avail Begin************" - module avail - echo "**********module avail End**************" +readonly STEP=$1 +if [ -z "$STEP" ] +then + echo "[E] No argument given: $*" + exit 2 fi -CTEST=ctest +# In OLCF Gitlab our PRs branches tip commit is not the head commit of the PR, +# it is instead the so called merged_commit_sha as described in the GitHub Rest +# API for pull requests. We need to report to the CDASH the original commit +# thus, we set it here using the CTEST_UPDATE_VERSION_OVERRIDE CMake variable +if [[ ${CI_COMMIT_BRANCH} =~ ^pr[0-9]+_.*$ ]] +then + # Original commit it is always its 2nd parent + original_sha=$(git rev-parse "${CI_COMMIT_REF}^2") + readonly UPDATE_ARGS="-DCTEST_UPDATE_VERSION_OVERRIDE=${original_sha}" +fi -echo "**********Env Begin**********" -env | sort -echo "**********Env End************" +declare -a CTEST_STEP_ARGS=("-Ddashboard_full=OFF") +case ${STEP} in + update) CTEST_STEP_ARGS+=("${UPDATE_ARGS}") ;; + build) CTEST_STEP_ARGS+=("-Ddashboard_do_submit=OFF") ;; + test) CTEST_STEP_ARGS+=("-Ddashboard_do_submit=OFF") ;; + submit) CTEST_STEP_ARGS+=("-Ddashboard_do_submit_only=ON" "-Ddashboard_do_build=ON" "-Ddashboard_do_test=ON") ;; +esac +CTEST_STEP_ARGS+=("-Ddashboard_do_${STEP}=ON") echo "**********CTest Begin**********" -${CTEST} --version -echo ${CTEST} -VV -S ${CTEST_SCRIPT} -Ddashboard_full=OFF ${CTEST_STEP_ARGS} -${CTEST} -VV -S ${CTEST_SCRIPT} -Ddashboard_full=OFF ${CTEST_STEP_ARGS} +echo "ctest -VV -S ${CTEST_SCRIPT} ${CTEST_STEP_ARGS[*]}" +ctest -VV -S "${CTEST_SCRIPT}" "${CTEST_STEP_ARGS[@]}" RET=$? echo "**********CTest End************" -exit ${RET} +# EC: 0-127 this script errors, 128-INF ctest errors +if [ $RET -ne 0 ] +then + (( RET += 127 )) +fi +exit $RET From 6bbf4333e2daa54c35588f2aa792d8f6399034cf Mon Sep 17 00:00:00 2001 From: Vicente Adolfo Bolea Sanchez Date: Fri, 12 May 2023 12:45:24 -0400 Subject: [PATCH 3/9] ci,olcf,crusher: enable Crusher CI (cherry picked from commit e09518946caa1b94deeba6f9543f982302ab8f4a) --- .gitlab/config/SpackCIBridge.py | 692 ++++++++++++++++++ .gitlab/config/ccache.cmake | 61 ++ .gitlab/config/dynamic_pipeline.yml.in | 9 + .gitlab/config/generate_pipelines.py | 90 +++ .gitlab/{ => config}/kokkos.sh | 1 - .gitlab/gitlab-ci-ascent.yml | 15 +- .gitlab/gitlab-ci-crusher.yml | 223 ++++++ flake8.cfg | 2 +- scripts/ci/cmake-v2/ci-crusher-cray.cmake | 32 + .../ci/cmake-v2/ci-crusher-kokkos-hip.cmake | 31 + scripts/ci/gitlab-ci/run.sh | 29 +- scripts/ci/gitlab-ci/setup-vars.sh | 41 ++ 12 files changed, 1197 insertions(+), 29 deletions(-) create mode 100755 .gitlab/config/SpackCIBridge.py create mode 100644 .gitlab/config/ccache.cmake create mode 100644 .gitlab/config/dynamic_pipeline.yml.in create mode 100755 .gitlab/config/generate_pipelines.py rename .gitlab/{ => config}/kokkos.sh (90%) create mode 100644 .gitlab/gitlab-ci-crusher.yml create mode 100644 scripts/ci/cmake-v2/ci-crusher-cray.cmake create mode 100644 scripts/ci/cmake-v2/ci-crusher-kokkos-hip.cmake create mode 100755 scripts/ci/gitlab-ci/setup-vars.sh diff --git a/.gitlab/config/SpackCIBridge.py b/.gitlab/config/SpackCIBridge.py new file mode 100755 index 0000000000..72c189e480 --- /dev/null +++ b/.gitlab/config/SpackCIBridge.py @@ -0,0 +1,692 @@ +#!/usr/bin/env python3 + +import argparse +import atexit +import base64 +from datetime import datetime, timedelta, timezone +import dateutil.parser +from github import Github +import json +import os +import re +import subprocess +import sys +import tempfile +import urllib.parse +import urllib.request + + +class SpackCIBridge(object): + + def __init__(self, gitlab_repo="", gitlab_host="", gitlab_project="", github_project="", + disable_status_post=True, sync_draft_prs=False, + main_branch=None, prereq_checks=[]): + self.gitlab_repo = gitlab_repo + self.github_project = github_project + github_token = os.environ.get('GITHUB_TOKEN') + self.github_repo = "https://{0}@github.com/{1}.git".format(github_token, self.github_project) + self.py_github = Github(github_token) + self.py_gh_repo = self.py_github.get_repo(self.github_project, lazy=True) + + self.merge_msg_regex = re.compile(r"Merge\s+([^\s]+)\s+into\s+([^\s]+)") + self.unmergeable_shas = [] + + self.post_status = not disable_status_post + self.sync_draft_prs = sync_draft_prs + self.main_branch = main_branch + self.currently_running_sha = None + self.latest_tested_main_commit = None + + self.prereq_checks = prereq_checks + + dt = datetime.now(timezone.utc) + timedelta(minutes=-60) + self.time_threshold_brief = urllib.parse.quote_plus(dt.isoformat(timespec="seconds")) + + self.pipeline_api_template = gitlab_host + self.pipeline_api_template += "/api/v4/projects/" + self.pipeline_api_template += urllib.parse.quote_plus(gitlab_project) + self.pipeline_api_template += "/pipelines?ref={0}" + + self.commit_api_template = gitlab_host + self.commit_api_template += "/api/v4/projects/" + self.commit_api_template += urllib.parse.quote_plus(gitlab_project) + self.commit_api_template += "/repository/commits/{0}" + + self.cached_commits = {} + + @atexit.register + def cleanup(): + """Shutdown ssh-agent upon program termination.""" + if "SSH_AGENT_PID" in os.environ: + print(" Shutting down ssh-agent({0})".format(os.environ["SSH_AGENT_PID"])) + subprocess.run(["ssh-agent", "-k"], check=True) + + def setup_ssh(self, ssh_key_base64): + """Start the ssh agent.""" + print("Starting ssh-agent") + output = subprocess.run(["ssh-agent", "-s"], check=True, stdout=subprocess.PIPE).stdout + + # Search for PID in output. + pid_regexp = re.compile(r"SSH_AGENT_PID=([0-9]+)") + match = pid_regexp.search(output.decode("utf-8")) + if match is None: + print("WARNING: could not detect ssh-agent PID.", file=sys.stderr) + print("ssh-agent will not be killed upon program termination", file=sys.stderr) + else: + pid = match.group(1) + os.environ["SSH_AGENT_PID"] = pid + self.cleanup_ssh_agent = True + + # Search for socket in output. + socket_regexp = re.compile(r"SSH_AUTH_SOCK=([^;]+);") + match = socket_regexp.search(output.decode("utf-8")) + if match is None: + print("WARNING: could not detect ssh-agent socket.", file=sys.stderr) + print("Key will be added to caller's ssh-agent (if any)", file=sys.stderr) + else: + socket = match.group(1) + os.environ["SSH_AUTH_SOCK"] = socket + + # Add the key. + ssh_key = base64.b64decode(ssh_key_base64) + ssh_key = ssh_key.replace(b"\r", b"") + with tempfile.NamedTemporaryFile() as fp: + fp.write(ssh_key) + fp.seek(0) + subprocess.run(["ssh-add", fp.name], check=True) + + def get_commit(self, commit): + """ Check our cache for a commit on GitHub. + If we don't have it yet, use the GitHub API to retrieve it.""" + if commit not in self.cached_commits: + self.cached_commits[commit] = self.py_gh_repo.get_commit(sha=commit) + return self.cached_commits[commit] + + def list_github_prs(self): + """ Return two dicts of data about open PRs on GitHub: + one for all open PRs, and one for open PRs that are not up-to-date on GitLab.""" + pr_dict = {} + pulls = self.py_gh_repo.get_pulls(state="open") + print("Rate limit after get_pulls(): {}".format(self.py_github.rate_limiting[0])) + for pull in pulls: + backlogged = False + push = True + if pull.draft and not self.sync_draft_prs: + print("Skipping draft PR {0} ({1})".format(pull.number, pull.head.ref)) + backlogged = "draft" + push = False + + pr_string = "pr{0}_{1}".format(pull.number, pull.head.ref) + + if push and pull.updated_at < datetime.now() + timedelta(minutes=-2880): + # Skip further analysis of this PR if it hasn't been updated in 48 hours. + # This helps us avoid wasting our rate limit on PRs with merge conflicts. + print("Skip pushing stale PR {0}".format(pr_string)) + backlogged = "stale" + push = False + + if push: + # Determine if this PR still needs to be pushed to GitLab. This happens in one of two cases: + # 1) we have never pushed it before + # 2) we have pushed it before, but the HEAD sha has changed since we pushed it last + log_args = ["git", "log", "--pretty=%s", "gitlab/{0}".format(pr_string)] + try: + merge_commit_msg = subprocess.run( + log_args, check=True, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL).stdout + match = self.merge_msg_regex.match(merge_commit_msg.decode("utf-8")) + if match and (match.group(1) == pull.head.sha or match.group(2) == pull.head.sha): + print("Skip pushing {0} because GitLab already has HEAD {1}".format(pr_string, pull.head.sha)) + push = False + except subprocess.CalledProcessError: + # This occurs when it's a new PR that hasn't been pushed to GitLab yet. + pass + + if push: + # Check the PRs-to-be-pushed to see if any of them should be considered "backlogged". + # We currently recognize three types of backlogged PRs: + # 1) Some required "prerequisite checks" have not yet completed successfully. + # 2) The PR is based on a version of the "main branch" that has not yet been tested + # 3) Draft PRs. Handled earlier in this function. + if not backlogged and self.prereq_checks: + checks_desc = "waiting for {} check to succeed" + checks_to_verify = self.prereq_checks.copy() + pr_check_runs = self.get_commit(pull.head.sha).get_check_runs() + for check in pr_check_runs: + if check.name in checks_to_verify: + checks_to_verify.remove(check.name) + if check.conclusion != "success": + backlogged = checks_desc.format(check.name) + push = False + break + if not backlogged and checks_to_verify: + backlogged = checks_desc.format(checks_to_verify[0]) + push = False + if backlogged: + print("Skip pushing {0} because of {1}".format(pr_string, backlogged)) + + if not backlogged: + if self.main_branch and pull.base.ref == self.main_branch: + # Check if we should defer pushing/testing this PR because it is based on "too new" of a commit + # of the main branch. + tmp_pr_branch = f"temporary_{pr_string}" + subprocess.run(["git", "fetch", "--unshallow", "github", + f"refs/pull/{pull.number}/head:{tmp_pr_branch}"], check=True) + # Get the merge base between this PR and the main branch. + try: + merge_base_sha = subprocess.run( + ["git", "merge-base", tmp_pr_branch, f"github/{self.main_branch}"], + check=True, stdout=subprocess.PIPE).stdout.strip() + except subprocess.CalledProcessError: + print(f"'git merge-base {tmp_pr_branch} github/{self.main_branch}' " + "returned non-zero. Skipping") + self.unmergeable_shas.append(pull.head.sha) + continue + + repo_head_sha = subprocess.run( + ["git", "rev-parse", tmp_pr_branch], + check=True, stdout=subprocess.PIPE).stdout.decode("utf-8").strip() + + if pull.head.sha != repo_head_sha: + # If gh repo and api don't agree on what the head sha is, don't + # push. Instead log an error message and backlog the PR. + a_sha, r_sha = pull.head.sha[:7], repo_head_sha[:7] + print(f"Skip pushing {pr_string} because api says HEAD is {a_sha}, " + f"while repo says HEAD is {r_sha}") + backlogged = f"GitHub HEAD shas out of sync (repo={r_sha}, API={a_sha})" + push = False + # Check if our PR's merge base is an ancestor of the latest tested main branch commit. + elif subprocess.run( + ["git", "merge-base", "--is-ancestor", merge_base_sha, self.latest_tested_main_commit] + ).returncode == 0: + print(f"{tmp_pr_branch}'s merge base IS an ancestor of latest_tested_main " + f"{merge_base_sha} vs. {self.latest_tested_main_commit}") + try: + subprocess.run(["git", "checkout", self.latest_tested_main_commit], check=True) + subprocess.run(["git", "checkout", "-b", pr_string], check=True) + commit_msg = f"Merge {pull.head.sha} into {self.latest_tested_main_commit}" + subprocess.run( + ["git", "merge", "--no-ff", "-m", commit_msg, tmp_pr_branch], + check=True) + print(f"Merge succeeded, ready to push {pr_string} to GitLab for CI pipeline testing") + except subprocess.CalledProcessError: + print(f"Failed to merge PR {pull.number} ({pull.head.ref}) with latest tested " + f"{self.main_branch} ({self.latest_tested_main_commit}). Skipping") + self.unmergeable_shas.append(pull.head.sha) + subprocess.run(["git", "merge", "--abort"]) + backlogged = "merge conflicts with {}".format(self.main_branch) + push = False + continue + else: + print(f"Skip pushing {pr_string} because its merge base is NOT an ancestor of " + f"latest_tested_main {merge_base_sha} vs. {self.latest_tested_main_commit}") + backlogged = "base" + push = False + else: + # If the --main-branch CLI argument wasn't passed, or if this PR doesn't target that branch, + # then we will push the merge commit that was automatically created by GitHub to GitLab + # where it will kick off a CI pipeline. + try: + subprocess.run(["git", "fetch", "--unshallow", "github", + f"{pull.merge_commit_sha}:{pr_string}"], check=True) + except subprocess.CalledProcessError: + print("Failed to locally checkout PR {0} ({1}). Skipping" + .format(pull.number, pull.merge_commit_sha)) + backlogged = "GitLab failed to checkout this branch" + push = False + continue + + pr_dict[pr_string] = { + 'base_sha': pull.base.sha, + 'head_sha': pull.head.sha, + 'push': push, + 'backlogged': backlogged, + } + + def listify_dict(d): + pr_strings = sorted(d.keys()) + base_shas = [d[s]['base_sha'] for s in pr_strings] + head_shas = [d[s]['head_sha'] for s in pr_strings] + b_logged = [d[s]['backlogged'] for s in pr_strings] + return { + "pr_strings": pr_strings, + "base_shas": base_shas, + "head_shas": head_shas, + "backlogged": b_logged, + } + all_open_prs = listify_dict(pr_dict) + filtered_pr_dict = {k: v for (k, v) in pr_dict.items() if v['push']} + filtered_open_prs = listify_dict(filtered_pr_dict) + print("All Open PRs:") + for pr_string in all_open_prs['pr_strings']: + print(" {0}".format(pr_string)) + print("Filtered Open PRs:") + for pr_string in filtered_open_prs['pr_strings']: + print(" {0}".format(pr_string)) + print("Rate limit at the end of list_github_prs(): {}".format(self.py_github.rate_limiting[0])) + return [all_open_prs, filtered_open_prs] + + def list_github_protected_branches(self): + """ Return a list of protected branch names from GitHub.""" + branches = self.py_gh_repo.get_branches() + print("Rate limit after get_branches(): {}".format(self.py_github.rate_limiting[0])) + protected_branches = [br.name for br in branches if br.protected] + protected_branches = sorted(protected_branches) + if self.currently_running_sha: + print("Skip pushing {0} because it already has a pipeline running ({1})" + .format(self.main_branch, self.currently_running_sha)) + protected_branches.remove(self.main_branch) + print("Protected branches:") + for protected_branch in protected_branches: + print(" {0}".format(protected_branch)) + return protected_branches + + def list_github_tags(self): + """ Return a list of tag names from GitHub.""" + tag_list = self.py_gh_repo.get_tags() + print("Rate limit after get_tags(): {}".format(self.py_github.rate_limiting[0])) + tags = sorted([tag.name for tag in tag_list]) + print("Tags:") + for tag in tags: + print(" {0}".format(tag)) + return tags + + def setup_git_repo(self): + """Initialize a bare git repository with two remotes: + one for GitHub and one for GitLab. + If main_branch was specified, we also fetch that branch from GitHub. + """ + subprocess.run(["git", "init"], check=True) + subprocess.run(["git", "config", "user.email", "noreply@spack.io"], check=True) + subprocess.run(["git", "config", "user.name", "spackbot"], check=True) + subprocess.run(["git", "config", "advice.detachedHead", "false"], check=True) + subprocess.run(["git", "remote", "add", "github", self.github_repo], check=True) + subprocess.run(["git", "remote", "add", "gitlab", self.gitlab_repo], check=True) + + # Shallow fetch from GitLab. + self.gitlab_shallow_fetch() + + if self.main_branch: + subprocess.run(["git", "fetch", "--unshallow", "github", self.main_branch], check=True) + + def get_gitlab_pr_branches(self): + """Query GitLab for branches that have already been copied over from GitHub PRs. + Return the string output of `git branch --remotes --list gitlab/pr*`. + """ + branch_args = ["git", "branch", "--remotes", "--list", "gitlab/pr*"] + self.gitlab_pr_output = \ + subprocess.run(branch_args, check=True, stdout=subprocess.PIPE).stdout + + def gitlab_shallow_fetch(self): + """Perform a shallow fetch from GitLab""" + fetch_args = ["git", "fetch", "-q", "--depth=1", "gitlab"] + subprocess.run(fetch_args, check=True, stdout=subprocess.PIPE).stdout + + def get_open_refspecs(self, open_prs): + """Return a list of refspecs to push given a list of open PRs.""" + print("Building initial lists of refspecs to fetch and push") + pr_strings = open_prs["pr_strings"] + base_shas = open_prs["base_shas"] + backlogged = open_prs["backlogged"] + open_refspecs = [] + for open_pr, base_sha, backlog in zip(pr_strings, base_shas, backlogged): + open_refspecs.append("{0}:{0}".format(open_pr)) + print(" pushing {0} (based on {1})".format(open_pr, base_sha)) + return open_refspecs + + def update_refspecs_for_protected_branches(self, protected_branches, open_refspecs, fetch_refspecs): + """Update our refspecs lists for protected branches from GitHub.""" + for protected_branch in protected_branches: + fetch_refspecs.append("+refs/heads/{0}:refs/remotes/{0}".format(protected_branch)) + open_refspecs.append("refs/heads/{0}:refs/heads/{0}".format(protected_branch)) + return open_refspecs, fetch_refspecs + + def update_refspecs_for_tags(self, tags, open_refspecs, fetch_refspecs): + """Update our refspecs lists for tags from GitHub.""" + for tag in tags: + fetch_refspecs.append("+refs/tags/{0}:refs/tags/{0}".format(tag)) + open_refspecs.append("refs/tags/{0}:refs/tags/{0}".format(tag)) + return open_refspecs, fetch_refspecs + + def fetch_github_branches(self, fetch_refspecs): + """Perform `git fetch` for a given list of refspecs.""" + print("Fetching GitHub refs for open PRs") + fetch_args = ["git", "fetch", "-q", "--unshallow", "github"] + fetch_refspecs + subprocess.run(fetch_args, check=True) + + def build_local_branches(self, protected_branches): + """Create local branches for a list of protected branches.""" + print("Building local branches for protected branches") + for branch in protected_branches: + local_branch_name = "{0}".format(branch) + remote_branch_name = "refs/remotes/{0}".format(branch) + subprocess.run(["git", "branch", "-q", local_branch_name, remote_branch_name], check=True) + + def make_status_for_pipeline(self, pipeline): + """Generate POST data to create a GitHub status from a GitLab pipeline + API response + """ + post_data = {} + if "status" not in pipeline: + return post_data + + if pipeline["status"] == "created": + post_data["state"] = "pending" + post_data["description"] = "Pipeline has been created" + + elif pipeline["status"] == "waiting_for_resource": + post_data["state"] = "pending" + post_data["description"] = "Pipeline is waiting for resources" + + elif pipeline["status"] == "preparing": + post_data["state"] = "pending" + post_data["description"] = "Pipeline is preparing" + + elif pipeline["status"] == "pending": + post_data["state"] = "pending" + post_data["description"] = "Pipeline is pending" + + elif pipeline["status"] == "running": + post_data["state"] = "pending" + post_data["description"] = "Pipeline is running" + + elif pipeline["status"] == "manual": + post_data["state"] = "pending" + post_data["description"] = "Pipeline is running manually" + + elif pipeline["status"] == "scheduled": + post_data["state"] = "pending" + post_data["description"] = "Pipeline is scheduled" + + elif pipeline["status"] == "failed": + post_data["state"] = "error" + post_data["description"] = "Pipeline failed" + + elif pipeline["status"] == "canceled": + # Do not post canceled pipeline status to GitHub, it's confusing to our users. + # This usually happens when a PR gets force-pushed. The next time the sync script runs + # it will post a status for the newly force-pushed commit. + return {} + + elif pipeline["status"] == "skipped": + post_data["state"] = "failure" + post_data["description"] = "Pipeline was skipped" + + elif pipeline["status"] == "success": + post_data["state"] = "success" + post_data["description"] = "Pipeline succeeded" + + post_data["target_url"] = pipeline["web_url"] + return post_data + + def dedupe_pipelines(self, api_response): + """Prune pipelines API response to only include the most recent result for each SHA""" + pipelines = {} + for response in api_response: + sha = response['sha'] + if sha not in pipelines: + pipelines[sha] = response + else: + existing_datetime = dateutil.parser.parse(pipelines[sha]['updated_at']) + current_datetime = dateutil.parser.parse(response['updated_at']) + if current_datetime > existing_datetime: + pipelines[sha] = response + return pipelines + + def find_pr_sha(self, tested_sha): + api_url = self.commit_api_template.format(tested_sha) + + try: + request = urllib.request.Request(api_url) + if "GITLAB_TOKEN" in os.environ: + request.add_header("Authorization", "Bearer %s" % os.environ["GITLAB_TOKEN"]) + response = urllib.request.urlopen(request) + except OSError: + print('Failed to fetch commit for tested sha {0}'.format(tested_sha)) + return None + + response_data = response.read() + + try: + tested_commit_info = json.loads(response_data) + except json.decoder.JSONDecodeError: + print('Failed to parse response as json ({0})'.format(response_data)) + return None + + if 'title' not in tested_commit_info: + print('Returned commit object missing "Title" field') + return None + + merge_commit_msg = tested_commit_info['title'] + m = self.merge_msg_regex.match(merge_commit_msg) + + if m is None: + print('Failed to find pr_sha in merge commit message') + return None + + return m.group(1) + + def get_pipelines_for_branch(self, branch, time_threshold=None): + # Use gitlab's API to get pipeline results for the corresponding ref. + api_url = self.pipeline_api_template.format( + urllib.parse.quote_plus(branch) + ) + + # Optionally constrain the query with the provided time_threshold + if time_threshold: + api_url = "{0}&updated_after={1}".format(api_url, time_threshold) + + try: + request = urllib.request.Request(api_url) + if "GITLAB_TOKEN" in os.environ: + request.add_header("Authorization", "Bearer %s" % os.environ["GITLAB_TOKEN"]) + response = urllib.request.urlopen(request) + except OSError as inst: + print("GitLab API request error accessing {0}".format(api_url)) + print(inst) + return None + try: + pipelines = json.loads(response.read()) + except json.decoder.JSONDecodeError as inst: + print("Error parsing response to {0}".format(api_url)) + print(inst) + return None + + return self.dedupe_pipelines(pipelines) + + def post_pipeline_status(self, open_prs, protected_branches): + print("Rate limit at the beginning of post_pipeline_status(): {}".format(self.py_github.rate_limiting[0])) + pipeline_branches = [] + backlog_branches = [] + # Split up the open_prs branches into two piles: branches we force-pushed to gitlab + # and branches we deferred pushing. + for pr_branch, base_sha, head_sha, backlog in zip(open_prs["pr_strings"], + open_prs["base_shas"], + open_prs["head_shas"], + open_prs["backlogged"]): + if not backlog: + pipeline_branches.append(pr_branch) + else: + backlog_branches.append((pr_branch, head_sha, backlog)) + + pipeline_branches.extend(protected_branches) + + print('Querying pipelines to post status for:') + for branch in pipeline_branches: + # Post status to GitHub for each pipeline found. + pipelines = self.get_pipelines_for_branch(branch, self.time_threshold_brief) + if not pipelines: + continue + for sha, pipeline in pipelines.items(): + post_data = self.make_status_for_pipeline(pipeline) + if not post_data: + continue + # TODO: associate shas with protected branches, so we do not have to + # hit an endpoint here, but just use the sha we already know just like + # we do below for backlogged PR statuses. + pr_sha = self.find_pr_sha(sha) + if not pr_sha: + print('Could not find github PR sha for tested commit: {0}'.format(sha)) + print('Using tested commit to post status') + pr_sha = sha + self.create_status_for_commit(pr_sha, + branch, + post_data["state"], + post_data["target_url"], + post_data["description"]) + + # Post a status of pending/backlogged for branches we deferred pushing + print("Posting backlogged status to the following:") + base_backlog_desc = \ + "This branch's merge-base with {} is newer than the latest commit tested by GitLab".format(self.main_branch) + for branch, head_sha, reason in backlog_branches: + if reason == "stale": + print("Skip posting status for {} because it has not been updated recently".format(branch)) + continue + elif reason == "base": + desc = base_backlog_desc + url = "https://github.com/spack/spack-infrastructure/blob/main/docs/deferred_pipelines.md" + elif reason == "draft": + desc = "GitLab CI is disabled for draft PRs" + url = "" + else: + desc = reason + url = "" + self.create_status_for_commit(head_sha, branch, "pending", url, desc) + + # Post errors to any PRs that we couldn't merge to latest_tested_main_commit. + print('Posting unmergeable status to the following:') + for sha in self.unmergeable_shas: + print(' {0}'.format(sha)) + self.create_status_for_commit(sha, "", "error", "", f"PR could not be merged with {self.main_branch}") + print("Rate limit at the end of post_pipeline_status(): {}".format(self.py_github.rate_limiting[0])) + + def create_status_for_commit(self, sha, branch, state, target_url, description): + context = "OLCF Ascent (Summit)" + commit = self.get_commit(sha) + existing_statuses = commit.get_combined_status() + for status in existing_statuses.statuses: + if (status.context == context and + status.state == state and + status.description == description and + status.target_url == target_url): + print("Not posting duplicate status to {} / {}".format(branch, sha)) + return + try: + status_response = self.get_commit(sha).create_status( + state=state, + target_url=target_url, + description=description, + context=context + ) + if status_response.state != state: + print("Expected CommitStatus state {0}, got {1}".format( + state, status_response.state)) + except Exception as e_inst: + print('Caught exception posting status for {0}/{1}'.format(branch, sha)) + print(e_inst) + print(" {0} -> {1}".format(branch, sha)) + + def sync(self): + """Synchronize pull requests from GitHub as branches on GitLab.""" + + print("Initial rate limit: {}".format(self.py_github.rate_limiting[0])) + reset_time = datetime.utcfromtimestamp(self.py_github.rate_limiting_resettime).strftime('%Y-%m-%d %H:%M:%S') + print("Rate limit will refresh at: {} UTC".format(reset_time)) + + # Setup SSH command for communicating with GitLab. + os.environ["GIT_SSH_COMMAND"] = "ssh -F /dev/null -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no" + + # Work inside a temporary directory that will be deleted when this script terminates. + with tempfile.TemporaryDirectory() as tmpdirname: + os.chdir(tmpdirname) + + # Setup the local repo with two remotes. + self.setup_git_repo() + + if self.main_branch: + # Find the currently running main branch pipeline, if any, and get the sha. + # Also get the latest commit on the main branch that has a completed pipeline. + main_branch_pipelines = self.get_pipelines_for_branch(self.main_branch) + for sha, pipeline in main_branch_pipelines.items(): + if self.latest_tested_main_commit is None and \ + (pipeline['status'] == "success" or pipeline['status'] == "failed"): + self.latest_tested_main_commit = sha + + if self.currently_running_sha is None and pipeline['status'] == "running": + self.currently_running_sha = sha + + if self.latest_tested_main_commit and self.currently_running_sha: + break + + print("Latest completed {0} pipeline: {1}".format(self.main_branch, self.latest_tested_main_commit)) + print("Currently running {0} pipeline: {1}".format(self.main_branch, self.currently_running_sha)) + + # Retrieve open PRs from GitHub. + all_open_prs, open_prs = self.list_github_prs() + + # Get protected branches on GitHub. + protected_branches = self.list_github_protected_branches() + + # Get tags on GitHub. + tags = self.list_github_tags() + + # Get refspecs for open PRs and protected branches. + open_refspecs = self.get_open_refspecs(open_prs) + fetch_refspecs = [] + self.update_refspecs_for_protected_branches(protected_branches, open_refspecs, fetch_refspecs) + self.update_refspecs_for_tags(tags, open_refspecs, fetch_refspecs) + + # Sync open GitHub PRs and protected branches to GitLab. + self.fetch_github_branches(fetch_refspecs) + self.build_local_branches(protected_branches) + if open_refspecs: + print("Syncing to GitLab") + push_args = ["git", "push", "--porcelain", "-f", "gitlab"] + open_refspecs + subprocess.run(push_args, check=True) + + # Post pipeline status to GitHub for each open PR, if enabled + if self.post_status: + print('Posting pipeline status for open PRs and protected branches') + self.post_pipeline_status(all_open_prs, protected_branches) + + +if __name__ == "__main__": + # Parse command-line arguments. + parser = argparse.ArgumentParser(description="Sync GitHub PRs to GitLab") + parser.add_argument("github_project", help="GitHub project (org/repo or user/repo)") + parser.add_argument("gitlab_repo", help="Full clone URL for GitLab") + parser.add_argument("gitlab_host", help="GitLab web host") + parser.add_argument("gitlab_project", help="GitLab project (org/repo or user/repo)") + parser.add_argument("--disable-status-post", action="store_true", default=False, + help="Do not post pipeline status to each GitHub PR") + parser.add_argument("--sync-draft-prs", action="store_true", default=False, + help="Copy draft PRs from GitHub to GitLab") + parser.add_argument("--pr-mirror-bucket", default=None, + help="Delete mirrors for closed PRs from the specified S3 bucket") + parser.add_argument("--main-branch", default=None, + help="""If provided, we check if there is a currently running +pipeline for this branch. If so, we defer syncing any subsequent commits in an effort +to not interrupt this pipeline. We also defer pushing any PR branches that are based +on a commit of the main branch that is newer than the latest commit tested by GitLab.""") + parser.add_argument("--prereq-check", nargs="+", default=False, + help="Only push branches that have already passed this GitHub check") + + args = parser.parse_args() + + ssh_key_base64 = os.getenv("GITLAB_SSH_KEY_BASE64") + if ssh_key_base64 is None: + raise Exception("GITLAB_SSH_KEY_BASE64 environment is not set") + + if "GITHUB_TOKEN" not in os.environ: + raise Exception("GITHUB_TOKEN environment is not set") + + bridge = SpackCIBridge(gitlab_repo=args.gitlab_repo, + gitlab_host=args.gitlab_host, + gitlab_project=args.gitlab_project, + github_project=args.github_project, + disable_status_post=args.disable_status_post, + sync_draft_prs=args.sync_draft_prs, + main_branch=args.main_branch, + prereq_checks=args.prereq_check) + bridge.setup_ssh(ssh_key_base64) + bridge.sync() diff --git a/.gitlab/config/ccache.cmake b/.gitlab/config/ccache.cmake new file mode 100644 index 0000000000..5e5f90c66b --- /dev/null +++ b/.gitlab/config/ccache.cmake @@ -0,0 +1,61 @@ +##============================================================================ +## Copyright (c) Kitware, Inc. +## All rights reserved. +## See LICENSE.txt for details. +## +## This software is distributed WITHOUT ANY WARRANTY; without even +## the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR +## PURPOSE. See the above copyright notice for more information. +##============================================================================ + +cmake_minimum_required(VERSION 3.0 FATAL_ERROR) + +set(version 4.6.1) +set(arch x86_64) + +if(CMAKE_HOST_SYSTEM_NAME STREQUAL "Linux") + set(sha256sum da1e1781bc1c4b019216fa16391af3e1daaee7e7f49a8ec9b0cdc8a1d05c50e2) + set(base_url https://github.com/ccache/ccache/releases/download) + set(platform linux) + set(extension tar.xz) +elseif(CMAKE_HOST_SYSTEM_NAME STREQUAL "Darwin") + set(sha256sum 3e36ba8c80fbf7f2b95fe0227b9dd1ca6143d721aab052caf0d5729769138059) + set(full_url https://gitlab.kitware.com/utils/ci-utilities/-/package_files/534/download) + set(filename ccache) + set(extension tar.gz) +elseif(CMAKE_HOST_SYSTEM_NAME STREQUAL "Windows") + set(sha256sum a6c6311973aa3d2aae22424895f2f968e5d661be003b25f1bd854a5c0cd57563) + set(base_url https://github.com/ccache/ccache/releases/download) + set(platform windows) + set(extension zip) +else() + message(FATAL_ERROR "Unrecognized platform ${CMAKE_HOST_SYSTEM_NAME}") +endif() + +if(NOT DEFINED filename) + set(filename "ccache-${version}-${platform}-${arch}") +endif() + +set(tarball "${filename}.${extension}") + +if(NOT DEFINED full_url) + set(full_url "${base_url}/v${version}/${tarball}") +endif() + +file(DOWNLOAD + "${full_url}" $ENV{CCACHE_INSTALL_DIR}/${tarball} + EXPECTED_HASH SHA256=${sha256sum} + SHOW_PROGRESS + ) + +execute_process( + COMMAND ${CMAKE_COMMAND} -E tar xf ${tarball} + WORKING_DIRECTORY $ENV{CCACHE_INSTALL_DIR} + RESULT_VARIABLE extract_results + ) + +if(extract_results) + message(FATAL_ERROR "Extracting `${tarball}` failed: ${extract_results}.") +endif() + +file(RENAME $ENV{CCACHE_INSTALL_DIR}/${filename} $ENV{CCACHE_INSTALL_DIR}/ccache) diff --git a/.gitlab/config/dynamic_pipeline.yml.in b/.gitlab/config/dynamic_pipeline.yml.in new file mode 100644 index 0000000000..b1a1ba01c9 --- /dev/null +++ b/.gitlab/config/dynamic_pipeline.yml.in @@ -0,0 +1,9 @@ +child_pipeline_{branch}: + variables: + DOWNSTREAM_COMMIT_SHA: '{commit}' + DOWNSTREAM_BRANCH_REF: '{branch}' + trigger: + include: + - project: 'ci/csc303_crusher/dev/adios2' + ref: '{branch}' + file: '.gitlab/gitlab-ci-crusher.yml' diff --git a/.gitlab/config/generate_pipelines.py b/.gitlab/config/generate_pipelines.py new file mode 100755 index 0000000000..4d75b55bd3 --- /dev/null +++ b/.gitlab/config/generate_pipelines.py @@ -0,0 +1,90 @@ +#!/usr/bin/env python3 + +# Distributed under the OSI-approved Apache License, Version 2.0. See +# accompanying file Copyright.txt for details. +# +# generate_pipeline.py +# +# Created: May 19, 2023 +# Author: Vicente Adolfo Bolea Sanchez + +from datetime import datetime +import argparse +import requests +import time +import re +import urllib3 +# Remove annoying warning about insecure connection (self-signed cert). +urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) + + +def is_date_after(date, days): + deadline_sec = int(time.time()) - (days * 86400) + utc_dt = datetime.strptime(date, '%Y-%m-%dT%H:%M:%SZ') + timestamp_sec = (utc_dt - datetime(1970, 1, 1)).total_seconds() + return timestamp_sec > deadline_sec + + +def request_dict(url): + r = requests.get(url + '?per_page=100', verify=False) + return r.json() + + +parser = argparse.ArgumentParser( + prog='generate_pipeline.py', + description='Generate Dynamic pipelines for Gitlab') +parser.add_argument( + '-u', '--gl-url', required=True, + help='Base URL for Gitlab remote. Ex: https://code.olcf.ornl.gov/') +parser.add_argument( + '-n', '--gh-name', required=True, + help='Full name of the GitHub project. Ex: ornladios/ADIOS2') +parser.add_argument( + '-c', '--gh-context', default='OLCF Crusher (Frontier)', + help='Name of the status in GitHub (A.K.A context)') +parser.add_argument( + '-p', '--project_id', required=True, + help='Gitlab internal project ID of the project.') +parser.add_argument( + '-d', '--days', type=int, default=1, + help='How many days back to search for commits') +parser.add_argument( + '-m', '--max', type=int, default=3, + help='Maximum amount of pipelines computed') +parser.add_argument( + '-f', '--template_file', required=True, + help='Template file of the pipeline `{branch}` will be substituted') +args = parser.parse_args() + + +with open(args.template_file, "r") as fd: + template_str = fd.read() + gl_url = args.gl_url + "/api/v4/projects/" + str(args.project_id) + gh_url = 'https://api.github.com/repos/' + args.gh_name + branches = request_dict(gl_url + "/repository/branches") + num_pipeline = 0 + for branch in branches: + # Convert to ISO 8601 date format. + date_stamp = branch['commit']['committed_date'].split('.')[0] + "Z" + if num_pipeline < args.max and is_date_after(date_stamp, args.days): + commit_sha = branch['commit']['id'] + # Backported branches use the merge head + gh_commit_sha = commit_sha + if re.fullmatch(r'^pr\d+_.*$', branch['name']): + gh_commit_sha = branch['commit']['parent_ids'][1] + + # Quit if GitHub does not have the commit + if 'sha' not in request_dict(gh_url + "/commits/" + gh_commit_sha): + continue + + # Query GitHub for the status of this commit + commit = request_dict(gh_url + "/commits/" + + gh_commit_sha + "/status") + status_found = False + for status in commit['statuses']: + if status['context'] == args.gh_context: + status_found = True + if not status_found: + num_pipeline += 1 + print(template_str.format( + branch=branch['name'], commit=commit_sha)) diff --git a/.gitlab/kokkos.sh b/.gitlab/config/kokkos.sh similarity index 90% rename from .gitlab/kokkos.sh rename to .gitlab/config/kokkos.sh index 698fe84ac7..2504b90637 100755 --- a/.gitlab/kokkos.sh +++ b/.gitlab/config/kokkos.sh @@ -23,7 +23,6 @@ cmake -S "$WORKDIR/kokkos-$VERSION" -B "$WORKDIR/kokkos_build" \ "-DCMAKE_CXX_STANDARD:STRING=17" \ "-DCMAKE_CXX_EXTENSIONS:BOOL=OFF" \ "-DCMAKE_POSITION_INDEPENDENT_CODE:BOOL=ON" \ - "-DCMAKE_CXX_COMPILER:STRING=$WORKDIR/kokkos-$VERSION/bin/nvcc_wrapper" \ $* cmake --build "$WORKDIR/kokkos_build" diff --git a/.gitlab/gitlab-ci-ascent.yml b/.gitlab/gitlab-ci-ascent.yml index 1d1e5c0a95..02e50f4584 100644 --- a/.gitlab/gitlab-ci-ascent.yml +++ b/.gitlab/gitlab-ci-ascent.yml @@ -21,6 +21,7 @@ CUDAHOSTCXX: "g++" CUSTOM_CI_BUILDS_DIR: "/gpfs/wolf/csc303/scratch/vbolea/ci/adios2" GITLAB_SITE: "OLCF Ascent" + CI_BIN_DIR: "$CI_PROJECT_DIR/build" SCHEDULER_PARAMETERS: -P CSC303 -W 1:00 -nnodes 1 -alloc_flags gpudefault before_script: - *setup_env_ecpci @@ -68,18 +69,22 @@ ascent-kokkos-cuda: libffi hdf5 cmake + KOKKOS_VER: 3.7.01 + Kokkos_DIR: "$CI_PROJECT_DIR/deps/kokkos_install" + # Cmake would not install a RPATH inside the source dir + LD_LIBRARY_PATH: "$Kokkos_DIR/lib64/:$LD_LIBRARY_PATH" KOKKOS_OPTS: >- -DKokkos_ARCH_POWER9=ON -DKokkos_ARCH_VOLTA70=ON -DKokkos_ENABLE_CUDA=ON -DKokkos_ENABLE_CUDA_LAMBDA=ON - -DCMAKE_INSTALL_PREFIX:PATH=$CI_BUILDS_DIR/kokkos_install - - Kokkos_DIR: $CI_BUILDS_DIR/kokkos_install + -DCMAKE_INSTALL_PREFIX:PATH=$Kokkos_DIR + -DCMAKE_CXX_COMPILER:STRING=$CI_PROJECT_DIR/deps/kokkos-$KOKKOS_VER/bin/nvcc_wrapper before_script: - *setup_env_ecpci + - mkdir -p "$CI_PROJECT_DIR/deps" - ccache -z - - .gitlab/kokkos.sh "$CI_BUILDS_DIR" "3.7.01" $KOKKOS_OPTS + - .gitlab/config/kokkos.sh "$CI_PROJECT_DIR/deps" "$KOKKOS_VER" $KOKKOS_OPTS extends: - .ascent-common @@ -127,4 +132,4 @@ sync-github-prs: CUSTOM_CI_BUILDS_DIR: "/gpfs/wolf/csc303/scratch/vbolea/ci/adios2" script: - export PATH="/gpfs/wolf/csc303/scratch/vbolea/ci/utils:$PATH" - - SpackCIBridge.py ornladios/ADIOS2 git@code.ornl.gov:ecpcitest/adios2.git https://code.ornl.gov/ ecpcitest/adios2 --prereq-check=format --prereq-check=git_checks + - .gitlab/config/SpackCIBridge.py ornladios/ADIOS2 git@code.ornl.gov:ecpcitest/adios2.git https://code.ornl.gov/ ecpcitest/adios2 --prereq-check=format --prereq-check=git_checks diff --git a/.gitlab/gitlab-ci-crusher.yml b/.gitlab/gitlab-ci-crusher.yml new file mode 100644 index 0000000000..a78a3db90d --- /dev/null +++ b/.gitlab/gitlab-ci-crusher.yml @@ -0,0 +1,223 @@ +# Ad-hoc build that runs in the ECP Hardware, concretely in OLCF Crusher. + +stages: + - pre + - setup + - build + - post + +.setup_env_ecpci: &setup_env_ecpci | + git fetch + source scripts/ci/gitlab-ci/setup-vars.sh + git checkout "$CI_COMMIT_REF" + module purge + module load ${JOB_MODULES} + module list + export PATH="${CCACHE_INSTALL_DIR}/ccache:$PATH" + +.install_ccache: &install_ccache | + mkdir -p "$CCACHE_INSTALL_DIR" + cmake --version + cmake -VV -P .gitlab/config/ccache.cmake + ccache -z + ccache -s + +.crusher-common: + rules: + - if: $CI_PIPELINE_SOURCE =~ /parent_pipeline|web/ + interruptible: true + variables: + CCACHE_BASEDIR: "/lustre/orion/csc303/scratch/" + CCACHE_DIR: "/lustre/orion/csc303/scratch/vbolea/ci/adios2/ccache" + CUSTOM_CI_BUILDS_DIR: "/lustre/orion/csc303/scratch/vbolea/ci/adios2/runtime" + + # -isystem= is not affected by CCACHE_BASEDIR, thus we must ignore it + CCACHE_IGNOREOPTIONS: "-isystem=*" + CCACHE_NOHASHDIR: "true" + CCACHE_INSTALL_DIR: "$CI_PROJECT_DIR/deps/ccache_install" + + CMAKE_BUILD_TYPE: "RelWithDebInfo" + CMAKE_GENERATOR: "Ninja" + CMAKE_PREFIX_PATH: "$CI_PROJECT_DIR/deps/kokkos_install" + + # We do not want to use the user's ~/.gitconfig + GIT_CONFIG_GLOBAL: "true" + GITLAB_SITE: "OLCF Crusher" + CI_BIN_DIR: "$CI_PROJECT_DIR/build" + +.setup-common: + stage: setup + tags: [ shell ] + before_script: + - *setup_env_ecpci + - *install_ccache + script: + - bash scripts/ci/gitlab-ci/run.sh update + artifacts: + expire_in: 24 hours + when: always + paths: + - deps/*install/ + - build/ + +.build-common: + stage: build + tags: [ slurm ] + variables: + SCHEDULER_PARAMETERS: "-ACSC303_crusher -t30 --nice=0 -c32 --gpus=4 -N 1" + before_script: + - *setup_env_ecpci + script: + - bash scripts/ci/gitlab-ci/run.sh configure + - bash scripts/ci/gitlab-ci/run.sh build + - bash scripts/ci/gitlab-ci/run.sh test + after_script: + - *setup_env_ecpci + - bash scripts/ci/gitlab-ci/run.sh submit + - ccache -s + +.kokkos-hip-common: + variables: + Kokkos_DIR: "$CI_PROJECT_DIR/deps/kokkos_install" + # Cmake would not install a RPATH inside the source dir + LD_LIBRARY_PATH: "$Kokkos_DIR/lib64/:$LD_LIBRARY_PATH" + # Order matters + JOB_MODULES: >- + craype-accel-amd-gfx90a + gcc/12 + cmake + rocm/5.4.3 + git + ninja + libffi + hdf5 + zstd + +setup:crusher-kokkos-hip: + variables: + KOKKOS_VER: 3.7.01 + KOKKOS_OPTS: >- + -DCMAKE_INSTALL_PREFIX:PATH=$Kokkos_DIR + -DCMAKE_CXX_COMPILER:FILEPATH=/opt/rocm-5.4.3/hip/bin/hipcc + -DKokkos_ARCH_VEGA90A:BOOL=ON + -DKokkos_ENABLE_HIP:BOOL=ON + -DKokkos_ENABLE_HIP_RELOCATABLE_DEVICE_CODE:BOOL=OFF + -DKokkos_ENABLE_SERIAL:BOOL=ON + extends: + - .crusher-common + - .setup-common + - .kokkos-hip-common + before_script: + - *setup_env_ecpci + - *install_ccache + - .gitlab/config/kokkos.sh "$CI_PROJECT_DIR/deps" "$KOKKOS_VER" $KOKKOS_OPTS + +build:crusher-kokkos-hip: + extends: + - .crusher-common + - .build-common + - .kokkos-hip-common + before_script: + - *setup_env_ecpci + needs: + - setup:crusher-kokkos-hip + dependencies: + - setup:crusher-kokkos-hip + +.cray-common: + variables: + # Order matters + JOB_MODULES: >- + PrgEnv-cray + cmake + git + ninja + libffi + zstd + DefApps + extends: + - .crusher-common + +setup:crusher-cray: + extends: + - .setup-common + - .cray-common + +build:crusher-cray: + extends: + - .build-common + - .cray-common + needs: + - setup:crusher-cray + dependencies: + - setup:crusher-cray + +.report-status: + rules: + - if: $CI_PIPELINE_SOURCE =~ /parent_pipeline|web/ + tags: [ shell ] + variables: + STATUS_PROJECT: ornladios/ADIOS2 + STATUS_NAME: OLCF Crusher (Frontier) + before_script: | + git fetch + source scripts/ci/gitlab-ci/setup-vars.sh + git checkout "$CI_COMMIT_REF" + script: > + curl -X POST -H @${GITHUB_CURL_HEADERS} + "https://api.github.com/repos/${STATUS_PROJECT}/statuses/${CI_ORIGINAL_SHA}" + -d "{\"state\":\"${CI_JOB_NAME}\", \"context\":\"${STATUS_NAME}\",\"target_url\":\"${CI_PIPELINE_URL}\",\"description\":\"${STATUS_DESC}\"}" + environment: + name: reporting-github + +pending: + stage: pre + variables: + STATUS_DESC: Pipeline is running + extends: + - .report-status +success: + stage: post + variables: + STATUS_DESC: Pipeline succeeded + extends: + - .report-status + dependencies: + - build:crusher-kokkos-hip + - build:crusher-cray +failure: + stage: post + rules: + - if: $CI_PIPELINE_SOURCE =~ /parent_pipeline|web/ + when: on_failure + variables: + STATUS_DESC: Pipeline failed + extends: + - .report-status + dependencies: + - build:crusher-kokkos-hip + - build:crusher-cray + +generate_pipelines: + stage: setup + tags: [ shell ] + rules: + - if: $CI_PIPELINE_SOURCE == "schedule" + variables: + CUSTOM_CI_BUILDS_DIR: "/lustre/orion/csc303/scratch/vbolea/ci/adios2/runtime" + script: + - .gitlab/config/generate_pipelines.py -u "https://code.olcf.ornl.gov/" -p 78 -n ornladios/ADIOS2 -f .gitlab/config/dynamic_pipeline.yml.in > generated_pipelines.yml + artifacts: + paths: + - generated_pipelines.yml + +launch_pipelines: + stage: build + rules: + - if: $CI_PIPELINE_SOURCE == "schedule" + variables: + CUSTOM_CI_BUILDS_DIR: "/lustre/orion/csc303/scratch/vbolea/ci/adios2/runtime" + trigger: + include: + - artifact: generated_pipelines.yml + job: generate_pipelines diff --git a/flake8.cfg b/flake8.cfg index 5525546057..39b0361140 100644 --- a/flake8.cfg +++ b/flake8.cfg @@ -3,4 +3,4 @@ max-line-length = 80 max-complexity = 1000 format = pylint ignore = E302,F401,F403,F405,F999,W504 -exclude = thirdparty/ +exclude = thirdparty/,.gitlab/config/SpackCIBridge.py diff --git a/scripts/ci/cmake-v2/ci-crusher-cray.cmake b/scripts/ci/cmake-v2/ci-crusher-cray.cmake new file mode 100644 index 0000000000..a148430830 --- /dev/null +++ b/scripts/ci/cmake-v2/ci-crusher-cray.cmake @@ -0,0 +1,32 @@ +# Client maintainer: vicente.bolea@kitware.com + +set(ENV{CC} craycc) +set(ENV{CXX} craycxx) + +set(dashboard_cache " +ADIOS2_USE_BZip2:BOOL=OFF +ADIOS2_USE_DataMan:BOOL=ON +ADIOS2_USE_Fortran:BOOL=OFF +ADIOS2_USE_MPI:BOOL=OFF +ADIOS2_USE_HDF5:BOOL=OFF +ADIOS2_USE_PNG:BOOL=OFF +ADIOS2_USE_Python:BOOL=OFF +ADIOS2_USE_SST:BOOL=ON +ADIOS2_USE_ZeroMQ:STRING=OFF +ADIOS2_USE_ZFP:BOOL=OFF +ADIOS2_USE_SZ:BOOL=OFF +ADIOS2_USE_Blosc:BOOL=OFF + +CMAKE_C_COMPILER_LAUNCHER=ccache +CMAKE_CXX_COMPILER_LAUNCHER=ccache +CMAKE_DISABLE_FIND_PACKAGE_BISON=ON +CMAKE_DISABLE_FIND_PACKAGE_FLEX=ON +") + +set(CTEST_TEST_ARGS + PARALLEL_LEVEL 8 + EXCLUDE "Engine.Staging.TestThreads.*" + ) +set(CTEST_CMAKE_GENERATOR "Ninja") +list(APPEND CTEST_UPDATE_NOTES_FILES "${CMAKE_CURRENT_LIST_FILE}") +include(${CMAKE_CURRENT_LIST_DIR}/ci-common.cmake) diff --git a/scripts/ci/cmake-v2/ci-crusher-kokkos-hip.cmake b/scripts/ci/cmake-v2/ci-crusher-kokkos-hip.cmake new file mode 100644 index 0000000000..afe52c92e6 --- /dev/null +++ b/scripts/ci/cmake-v2/ci-crusher-kokkos-hip.cmake @@ -0,0 +1,31 @@ +# Client maintainer: vicente.bolea@kitware.com + +set(ENV{CC} gcc) +set(ENV{CXX} g++) +set(ENV{FC} gfortran) + +set(dashboard_cache " +ADIOS2_USE_BZip2:BOOL=OFF +ADIOS2_USE_DataMan:BOOL=ON +ADIOS2_USE_Fortran:BOOL=OFF +ADIOS2_USE_MPI:BOOL=OFF +ADIOS2_USE_HDF5:BOOL=OFF +ADIOS2_USE_PNG:BOOL=OFF +ADIOS2_USE_Python:BOOL=OFF +ADIOS2_USE_SST:BOOL=ON +ADIOS2_USE_Kokkos=ON + +CMAKE_C_COMPILER_LAUNCHER=ccache +CMAKE_CXX_COMPILER_LAUNCHER=ccache +CMAKE_CUDA_COMPILER_LAUNCHER=ccache +CMAKE_DISABLE_FIND_PACKAGE_BISON=ON +CMAKE_DISABLE_FIND_PACKAGE_FLEX=ON +") + +set(CTEST_TEST_ARGS + PARALLEL_LEVEL 8 + EXCLUDE "Engine.Staging.TestThreads.*" + ) +set(CTEST_CMAKE_GENERATOR "Ninja") +list(APPEND CTEST_UPDATE_NOTES_FILES "${CMAKE_CURRENT_LIST_FILE}") +include(${CMAKE_CURRENT_LIST_DIR}/ci-common.cmake) diff --git a/scripts/ci/gitlab-ci/run.sh b/scripts/ci/gitlab-ci/run.sh index 02ee8c630f..1ddfb0c9dc 100755 --- a/scripts/ci/gitlab-ci/run.sh +++ b/scripts/ci/gitlab-ci/run.sh @@ -1,13 +1,8 @@ #!/bin/bash --login +# shellcheck disable=SC1091 set -e -export CI_BUILD_NAME="${CI_COMMIT_BRANCH#github/}_${CI_JOB_NAME}" -export CI_COMMIT_REF=${CI_COMMIT_SHA} -export CI_ROOT_DIR="${CI_PROJECT_DIR}/.." -export CI_SITE_NAME="${GITLAB_SITE}" -export CI_SOURCE_DIR="${CI_PROJECT_DIR}" - -export CI_BIN_DIR="${CI_ROOT_DIR}/${CI_BUILD_NAME}" +source scripts/ci/gitlab-ci/setup-vars.sh readonly CTEST_SCRIPT=scripts/ci/cmake-v2/ci-${CI_JOB_NAME}.cmake if [ ! -f "$CTEST_SCRIPT" ] @@ -23,23 +18,13 @@ then exit 2 fi -# In OLCF Gitlab our PRs branches tip commit is not the head commit of the PR, -# it is instead the so called merged_commit_sha as described in the GitHub Rest -# API for pull requests. We need to report to the CDASH the original commit -# thus, we set it here using the CTEST_UPDATE_VERSION_OVERRIDE CMake variable -if [[ ${CI_COMMIT_BRANCH} =~ ^pr[0-9]+_.*$ ]] -then - # Original commit it is always its 2nd parent - original_sha=$(git rev-parse "${CI_COMMIT_REF}^2") - readonly UPDATE_ARGS="-DCTEST_UPDATE_VERSION_OVERRIDE=${original_sha}" -fi - declare -a CTEST_STEP_ARGS=("-Ddashboard_full=OFF") case ${STEP} in - update) CTEST_STEP_ARGS+=("${UPDATE_ARGS}") ;; - build) CTEST_STEP_ARGS+=("-Ddashboard_do_submit=OFF") ;; - test) CTEST_STEP_ARGS+=("-Ddashboard_do_submit=OFF") ;; - submit) CTEST_STEP_ARGS+=("-Ddashboard_do_submit_only=ON" "-Ddashboard_do_build=ON" "-Ddashboard_do_test=ON") ;; + update) CTEST_STEP_ARGS+=("${CI_UPDATE_ARGS}") ;; + configure) CTEST_STEP_ARGS+=("-Ddashboard_do_submit=OFF") ;; + build) CTEST_STEP_ARGS+=("-Ddashboard_do_submit=OFF") ;; + test) CTEST_STEP_ARGS+=("-Ddashboard_do_submit=OFF") ;; + submit) CTEST_STEP_ARGS+=("-Ddashboard_do_submit_only=ON" "-Ddashboard_do_configure=ON" "-Ddashboard_do_build=ON" "-Ddashboard_do_test=ON") ;; esac CTEST_STEP_ARGS+=("-Ddashboard_do_${STEP}=ON") diff --git a/scripts/ci/gitlab-ci/setup-vars.sh b/scripts/ci/gitlab-ci/setup-vars.sh new file mode 100755 index 0000000000..67d1435ef5 --- /dev/null +++ b/scripts/ci/gitlab-ci/setup-vars.sh @@ -0,0 +1,41 @@ +#!/bin/bash --login +set -e + +# Strip the job name prefix +export CI_JOB_NAME="${CI_JOB_NAME#*:}" +export CI_BUILD_NAME="${CI_COMMIT_BRANCH#github/}_${CI_JOB_NAME}" +export CI_ROOT_DIR="${CI_PROJECT_DIR}/.." +export CI_SITE_NAME="${GITLAB_SITE}" +export CI_SOURCE_DIR="${CI_PROJECT_DIR}" + +if [ -z "$DOWNSTREAM_COMMIT_SHA" ] +then + export CI_COMMIT_REF="${CI_COMMIT_SHA}" +else + export CI_COMMIT_REF="${DOWNSTREAM_COMMIT_SHA}" +fi + +if [ -z "$DOWNSTREAM_BRANCH_REF" ] +then + export CI_BRANCH_REF="${CI_COMMIT_REF_NAME}" +else + export CI_BRANCH_REF="${DOWNSTREAM_BRANCH_REF}" +fi + +# In OLCF Crusher we must fix the build directory in the yml. +if [ -z "$CI_BIN_DIR" ] +then + export CI_BIN_DIR="${CI_ROOT_DIR}/${CI_BUILD_NAME}" +fi + +# In OLCF Gitlab our PRs branches tip commit is not the head commit of the PR, +# it is instead the so called merged_commit_sha as described in the GitHub Rest +# API for pull requests. We need to report to the CDASH the original commit +# thus, we set it here using the CTEST_UPDATE_VERSION_OVERRIDE CMake variable +if [[ ${CI_BRANCH_REF} =~ ^pr[0-9]+_.*$ ]] +then + # Original commit it is always its 2nd parent + ci_original_sha=$(git rev-parse "${CI_COMMIT_REF}^2") + export CI_ORIGINAL_SHA="$ci_original_sha" + export CI_UPDATE_ARGS="-DCTEST_UPDATE_VERSION_OVERRIDE=${CI_ORIGINAL_SHA}" +fi From 55fe563fa2941995f67af748308feb331af14a72 Mon Sep 17 00:00:00 2001 From: Vicente Adolfo Bolea Sanchez Date: Thu, 25 May 2023 18:31:52 -0400 Subject: [PATCH 4/9] ci,crusher: minor tweaks (cherry picked from commit 731616ff4922b67bd271cb39ecdb4fa29e2ea97d) --- .gitlab/config/generate_pipelines.py | 2 +- .gitlab/gitlab-ci-ascent.yml | 1 - scripts/ci/gitlab-ci/setup-vars.sh | 2 ++ 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/.gitlab/config/generate_pipelines.py b/.gitlab/config/generate_pipelines.py index 4d75b55bd3..17b8068b99 100755 --- a/.gitlab/config/generate_pipelines.py +++ b/.gitlab/config/generate_pipelines.py @@ -49,7 +49,7 @@ def request_dict(url): '-d', '--days', type=int, default=1, help='How many days back to search for commits') parser.add_argument( - '-m', '--max', type=int, default=3, + '-m', '--max', type=int, default=2, help='Maximum amount of pipelines computed') parser.add_argument( '-f', '--template_file', required=True, diff --git a/.gitlab/gitlab-ci-ascent.yml b/.gitlab/gitlab-ci-ascent.yml index 02e50f4584..c4e453df0d 100644 --- a/.gitlab/gitlab-ci-ascent.yml +++ b/.gitlab/gitlab-ci-ascent.yml @@ -128,7 +128,6 @@ sync-github-prs: only: - schedules variables: - GIT_STRATEGY: none CUSTOM_CI_BUILDS_DIR: "/gpfs/wolf/csc303/scratch/vbolea/ci/adios2" script: - export PATH="/gpfs/wolf/csc303/scratch/vbolea/ci/utils:$PATH" diff --git a/scripts/ci/gitlab-ci/setup-vars.sh b/scripts/ci/gitlab-ci/setup-vars.sh index 67d1435ef5..6ca88e616c 100755 --- a/scripts/ci/gitlab-ci/setup-vars.sh +++ b/scripts/ci/gitlab-ci/setup-vars.sh @@ -38,4 +38,6 @@ then ci_original_sha=$(git rev-parse "${CI_COMMIT_REF}^2") export CI_ORIGINAL_SHA="$ci_original_sha" export CI_UPDATE_ARGS="-DCTEST_UPDATE_VERSION_OVERRIDE=${CI_ORIGINAL_SHA}" +else + export CI_ORIGINAL_SHA="${CI_COMMIT_REF}" fi From 86b792f61367b3f610e5ee34a6c0e33d76722ba1 Mon Sep 17 00:00:00 2001 From: Vicente Adolfo Bolea Sanchez Date: Wed, 24 May 2023 20:34:20 -0400 Subject: [PATCH 5/9] docs: update whatsnew (cherry picked from commit 49e80a17ed4dd68ee6a333a93eb8736d9cda382c) --- .../source/introduction/whatsnew.rst | 46 +++++++++++-------- 1 file changed, 27 insertions(+), 19 deletions(-) diff --git a/docs/user_guide/source/introduction/whatsnew.rst b/docs/user_guide/source/introduction/whatsnew.rst index af8aae7c08..39cefb7509 100644 --- a/docs/user_guide/source/introduction/whatsnew.rst +++ b/docs/user_guide/source/introduction/whatsnew.rst @@ -1,26 +1,34 @@ -################## -What's new in 2.8? -################## +================== +What's new in 2.9? +================== -Important changes to the API +Summary +======= - * **adios2::Mode::ReadRandomAccess** mode is introduced for reading files with access to all steps. - BeginStep/EndStep calls are *NOT allowed*. SetStepSelection() can be used to access specific steps in the file. - * **adios2::Mode::Read** mode now requires using BeginStep/EndStep loop to access steps serially one by one. Variable inquiry - fails outside BeginStep/EndStep sections. You need to modify your Open() statement to use the random-access mode if your - code wants to access all steps in any order in an existing file. - * **adios2::ADIOS::EnterComputationBlock()**, **adios2::ADIOS::ExitComputationBlock()** are hints to ADIOS that a process is in a computing (i.e. non-communicating) phase. BP5 asynchronous I/O operations can schedule writing during such phases to avoid interfering with the application's own communication. - * GPU-aware I/O supports passing device-memory data pointers to the ADIOS2 `Put()/Get()` functions, and ADIOS2 will automatically download/upload data from/to the device during I/O. Alternatively, an extra member function of the Variable class, **SetMemorySpace(const adios2::MemorySpace mem)** can explicitly tell ADIOS2 whether the pointer points to device memory or host memory. +This is a major release with new features and lots of bug fixes. -New features +General +------- - * **BP5** data format and engine. This new engine optimizes for many variables and many steps at large scale. - It is also more memory efficient than previous engines, see :ref:`BP5`. - * **Plugin** architecture to support external *engines* and *operators* outside the ADIOS2 installation, see :ref:`Plugins` - * **GPU-Aware I/O** for reading/writing data to/from device memory, using CUDA (NVidia GPUs only), see :ref:`GPU-aware I/O` +- GPU-Aware I/O enabled by using Kokkos. Device pointers can be passed to Put()/Get() calls directly. Kokkos 3.7.x required for this release. Works with CUDA, HIP and Kokkos applications. https://adios2.readthedocs.io/en/latest/advanced/gpu_aware.html#gpu-aware-i-o +- GPU-compression. MGARD and ZFP operators can compress data on GPU if they are built for GPU. MGARD operator can be fed with host/device pointers and will move data automaticaly. ZFP operator requires matching data and compressor location. +- Joined Array concept (besides Global Array and Local Array), which lets writers dump Local Arrays (no offsets no global shape) that are put together into a Global Array by the reader. One dimension of the arrays is selected for this join operation, while other dimensions must be the same for all writers. https://adios2.readthedocs.io/en/latest/components/components.html?highlight=Joined#shapes -Other changes +File I/O +-------- - * SST scales better for large N-to-1 staging, by managing the limits of outstanding remote direct memory access requests. - Of course one still introduces a literal bottleneck with such a pattern into an in situ workflow. +- Default File engine is now BP5. If for some reason this causes problems, manually specify using "BP4" for your application. +- BP5 engine supports multithreaded reading to accelerate read performance for low-core counts. +- BP5 Two level metadata aggregation and reduction reduced memory impact of collecting metadata and therefore is more scalable in terms of numbers of variables and writers than BP4. +- Uses Blosc-2 instead of Blosc for lossless compression. The new compression operator is backward compatible with old files compressed with blosc. The name of the operator remains "blosc". +Staging +------- + +- UCX dataplane added for SST staging engine to support networks under the UCX consortium +- MPI dataplane added for SST staging engine. It relies on MPI intercommunicators to connect multiple independent MPI applications for staging purposes. Applications must enable multithreaded MPI for this dataplane. + +Experimental features +--------------------- + +- Preliminary support for data structs. A struct can have single variables of basic types, and 1D fixed size arrays of basic types. Supported by BP5, SST and SSC engines. From 4e640c137be13d6970e10aa001428c58507c5159 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 23 May 2023 05:43:13 +0000 Subject: [PATCH 6/9] Bump requests from 2.28.1 to 2.31.0 in /docs Bumps [requests](https://github.com/psf/requests) from 2.28.1 to 2.31.0. - [Release notes](https://github.com/psf/requests/releases) - [Changelog](https://github.com/psf/requests/blob/main/HISTORY.md) - [Commits](https://github.com/psf/requests/compare/v2.28.1...v2.31.0) --- updated-dependencies: - dependency-name: requests dependency-type: direct:production ... Signed-off-by: dependabot[bot] (cherry picked from commit 8385796eeb6d77805e52379a11bec2f7a3e21f10) --- docs/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/requirements.txt b/docs/requirements.txt index 542cffd5aa..4e74cf0aab 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -25,7 +25,7 @@ Pygments==2.14.0 pyOpenSSL==23.0.0 PySocks==1.7.1 pytz==2022.7 -requests==2.28.1 +requests==2.31.0 setuptools==65.6.3 snowballstemmer==2.2.0 Sphinx==4.5.0 From be4fcbcf07bf658d76e7bf012ef13adbe51a3cd1 Mon Sep 17 00:00:00 2001 From: Vicente Adolfo Bolea Sanchez Date: Mon, 22 May 2023 13:48:55 -0400 Subject: [PATCH 7/9] CI,windows: change MSMPI URL (cherry picked from commit 61073724c148087dbd0e80ff0b291b4e8772b6d6) --- scripts/ci/gh-actions/windows-setup.ps1 | 57 +++++++++++++------------ 1 file changed, 29 insertions(+), 28 deletions(-) diff --git a/scripts/ci/gh-actions/windows-setup.ps1 b/scripts/ci/gh-actions/windows-setup.ps1 index 3f96c7060c..18b9248e58 100644 --- a/scripts/ci/gh-actions/windows-setup.ps1 +++ b/scripts/ci/gh-actions/windows-setup.ps1 @@ -10,38 +10,39 @@ Write-Host "::endgroup::" if($Env:GH_YML_MATRIX_PARALLEL -eq "mpi") { - $rooturl = "https://github.com/microsoft/Microsoft-MPI/releases/download" - $version = "10.1.1" - $baseurl = "$rooturl/v$version" + # This is taken from the MSMPI VCPKG + $baseurl = "https://download.microsoft.com/download/a/5/2/a5207ca5-1203-491a-8fb8-906fd68ae623" + $version = "10.1.12498" - $tempdir = $Env:RUNNER_TEMP - $msmpisdk = Join-Path $tempdir msmpisdk.msi - $msmpisetup = Join-Path $tempdir msmpisetup.exe + $tempdir = $Env:RUNNER_TEMP + $msmpisdk = Join-Path $tempdir msmpisdk.msi + $msmpisetup = Join-Path $tempdir msmpisetup.exe - Write-Host "::group::Downloading Microsoft MPI SDK $version" - Invoke-WebRequest "$baseurl/msmpisdk.msi" -OutFile $msmpisdk - Write-Host "::endgroup::" - Write-Host "::group::Installing Microsoft MPI SDK $version" - Start-Process msiexec.exe -ArgumentList "/quiet /passive /qn /i $msmpisdk" -Wait - Write-Host "::endgroup::" + Write-Host "::group::Downloading Microsoft MPI SDK $version" + Invoke-WebRequest "$baseurl/msmpisdk.msi" -OutFile $msmpisdk + Write-Host "::endgroup::" + Write-Host "::group::Installing Microsoft MPI SDK $version" + Start-Process msiexec.exe -ArgumentList "/quiet /passive /qn /i $msmpisdk" -Wait + Write-Host "::endgroup::" - Write-Host "::group::Downloading Microsoft MPI Runtime $version" - Invoke-WebRequest "$baseurl/msmpisetup.exe" -OutFile $msmpisetup - Write-Host "::endgroup::" - Write-Host "::group::Installing Microsoft MPI Runtime $version" - Start-Process $msmpisetup -ArgumentList "-unattend" -Wait - Write-Host "::endgroup::" + Write-Host "::group::Downloading Microsoft MPI Runtime $version" - if ($Env:GITHUB_ENV) { - Write-Host '::group::Adding environment variables to $GITHUB_ENV' - $envlist = @("MSMPI_BIN", "MSMPI_INC", "MSMPI_LIB32", "MSMPI_LIB64") - foreach ($name in $envlist) { - $value = [Environment]::GetEnvironmentVariable($name, "Machine") - Write-Host "$name=$value" - Add-Content $Env:GITHUB_ENV "$name=$value" - } - Write-Host "::endgroup::" - } + Invoke-WebRequest "$baseurl/msmpisetup.exe" -OutFile $msmpisetup + Write-Host "::endgroup::" + Write-Host "::group::Installing Microsoft MPI Runtime $version" + Start-Process $msmpisetup -ArgumentList "-unattend" -Wait + Write-Host "::endgroup::" + + if ($Env:GITHUB_ENV) { + Write-Host '::group::Adding environment variables to $GITHUB_ENV' + $envlist = @("MSMPI_BIN", "MSMPI_INC", "MSMPI_LIB32", "MSMPI_LIB64") + foreach ($name in $envlist) { + $value = [Environment]::GetEnvironmentVariable($name, "Machine") + Write-Host "$name=$value" + Add-Content $Env:GITHUB_ENV "$name=$value" + } + Write-Host "::endgroup::" + } if ($Env:GITHUB_PATH) { Write-Host '::group::Adding $MSMPI_BIN to $GITHUB_PATH' From bc048048cb6745b7716ef04ce7493a512c1ff822 Mon Sep 17 00:00:00 2001 From: Junmin Gu Date: Fri, 26 May 2023 11:43:25 -0700 Subject: [PATCH 8/9] fixed https://github.com/ornladios/ADIOS2/issues/3638 (cherry picked from commit 59c589e912172bdb2d5867b7a256258b9fd00dbd) --- source/h5vol/H5VolUtil.c | 6 +++--- source/h5vol/H5VolUtil.h | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/source/h5vol/H5VolUtil.c b/source/h5vol/H5VolUtil.c index 6f44a18c70..80f463eaaf 100644 --- a/source/h5vol/H5VolUtil.c +++ b/source/h5vol/H5VolUtil.c @@ -56,16 +56,16 @@ void *safe_ralloc(void *ptr, size_t newsize, unsigned long line) return p; } -void gUtilConvert(hsize_t *fromH5, size_t *to, uint ndims) +void gUtilConvert(hsize_t *fromH5, size_t *to, size_t ndims) { - uint i = 0; + size_t i = 0; for (i = 0; i < ndims; i++) { to[i] = fromH5[i]; } } -int gUtilADIOS2GetShape(hid_t space_id, size_t *shape, uint ndims) +int gUtilADIOS2GetShape(hid_t space_id, size_t *shape, size_t ndims) { if (gUtilADIOS2IsScalar(space_id)) { diff --git a/source/h5vol/H5VolUtil.h b/source/h5vol/H5VolUtil.h index 305e6ab695..4797bd36f9 100644 --- a/source/h5vol/H5VolUtil.h +++ b/source/h5vol/H5VolUtil.h @@ -37,9 +37,9 @@ int gUtilADIOS2GetDim(hid_t space_id); // h5 uses hsize_t for dimensions (unsigned long long) // adios uses size_t // -void gUtilConvert(hsize_t *fromH5, size_t *to, uint ndims); +void gUtilConvert(hsize_t *fromH5, size_t *to, size_t ndims); -int gUtilADIOS2GetShape(hid_t space_id, size_t *shape, uint ndims); +int gUtilADIOS2GetShape(hid_t space_id, size_t *shape, size_t ndims); int gUtilADIOS2GetBlockInfo(hid_t hyperSlab_id, size_t *start, size_t *count, hsize_t ndims); From c55cf751bf7e81f4f7d7e704a27935079b5b699b Mon Sep 17 00:00:00 2001 From: Vicente Adolfo Bolea Sanchez Date: Mon, 29 May 2023 18:49:58 -0400 Subject: [PATCH 9/9] crusher,ci: set unique env per pipeline (cherry picked from commit 583a507fbc6e69ae3df3716065ea9f5e42adda7e) --- .gitlab/gitlab-ci-crusher.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitlab/gitlab-ci-crusher.yml b/.gitlab/gitlab-ci-crusher.yml index a78a3db90d..3e07a59794 100644 --- a/.gitlab/gitlab-ci-crusher.yml +++ b/.gitlab/gitlab-ci-crusher.yml @@ -168,7 +168,7 @@ build:crusher-cray: "https://api.github.com/repos/${STATUS_PROJECT}/statuses/${CI_ORIGINAL_SHA}" -d "{\"state\":\"${CI_JOB_NAME}\", \"context\":\"${STATUS_NAME}\",\"target_url\":\"${CI_PIPELINE_URL}\",\"description\":\"${STATUS_DESC}\"}" environment: - name: reporting-github + name: report-$DOWNSTREAM_COMMIT_SHA pending: stage: pre