diff --git a/.cicd/.helpers b/.cicd/.helpers index 511b50d7456..acc3bfe69b3 100644 --- a/.cicd/.helpers +++ b/.cicd/.helpers @@ -3,12 +3,11 @@ VERBOSE=${VERBOSE:-true} export PROJECT_NAME="eosio" # Set IMAGE_TAG using arg (generate-base-images.sh) or env (travis.yml) -( [[ $(uname) != 'Darwin' ]] && [[ -z $IMAGE_TAG ]] && [[ -z $1 ]] ) && echo "You must provide the distro IMAGE_TAG name (example: ubuntu-18.04) as argument \$1 or set it within your ENV" && exit 1 export IMAGE_TAG=${IMAGE_TAG:-$1} function execute() { - $VERBOSE && echo "--- Executing: $@" - $DRYRUN || "$@" + ${VERBOSE:-true} && echo "--- Executing: $@" + ${DRYRUN:-false} || "$@" } function determine-hash() { @@ -29,10 +28,8 @@ function determine-hash() { function generate_docker_image() { # If we cannot pull the image, we build and push it first. docker login -u $DOCKERHUB_USERNAME -p $DOCKERHUB_PASSWORD - cd ./.cicd - docker build -t $FULL_TAG -f ./${IMAGE_TAG}.dockerfile . + docker build -t $FULL_TAG -f ./docker/${IMAGE_TAG}.dockerfile . docker push $FULL_TAG - cd - } function docker_tag_exists() { @@ -42,8 +39,9 @@ function docker_tag_exists() { ( [[ $EXISTS =~ '404 page not found' ]] || [[ $EXISTS =~ 'manifest unknown' ]] ) && return 1 || return 0 } -if [[ $(uname) != 'Darwin' ]]; then # Darwin doesn't need hash (yet) - determine-hash ".cicd/${IMAGE_TAG}.dockerfile" +# If you're on mac and you want to generate the image, use "force" as $2 +if [[ ! -z $IMAGE_TAG ]]; then # Darwin doesn't need hash (yet) + determine-hash "docker/${IMAGE_TAG}.dockerfile" [[ -z $DETERMINED_HASH ]] && echo "DETERMINED_HASH empty! (check determine-hash function)" && exit 1 export FULL_TAG="eosio/producer:${PROJECT_NAME}-$HASHED_IMAGE_TAG" fi \ No newline at end of file diff --git a/.cicd/docker/.helpers-v33 b/.cicd/docker/.helpers-v33 new file mode 100755 index 00000000000..4f9ff752367 --- /dev/null +++ b/.cicd/docker/.helpers-v33 @@ -0,0 +1,8 @@ +function fold-execute() { + ${TRAVIS:-false} && echo -en "travis_fold:start:$(echo $@)\r" || true + echo "--- Executing: $@" + "$@" + rcode=$? + [ $rcode -eq 0 ] || exit $rcode + ${TRAVIS:-false} && echo -en "travis_fold:end:$(echo $@)\r" || true +} \ No newline at end of file diff --git a/.cicd/amazonlinux-2.dockerfile b/.cicd/docker/amazonlinux-2.dockerfile similarity index 69% rename from .cicd/amazonlinux-2.dockerfile rename to .cicd/docker/amazonlinux-2.dockerfile index 5bdc15d98c5..1c1e8a099b7 100644 --- a/.cicd/amazonlinux-2.dockerfile +++ b/.cicd/docker/amazonlinux-2.dockerfile @@ -72,11 +72,23 @@ ENV PATH=${PATH}:/mongodb-linux-x86_64-amazon-3.6.3/bin RUN curl -LO http://download-ib01.fedoraproject.org/pub/epel/7/x86_64/Packages/c/ccache-3.3.4-1.el7.x86_64.rpm \ && yum install -y ccache-3.3.4-1.el7.x86_64.rpm +# Install Buildkite Agent +RUN echo -e "[buildkite-agent]\nname = Buildkite Pty Ltd\nbaseurl = https://yum.buildkite.com/buildkite-agent/stable/x86_64/\nenabled=1\ngpgcheck=0\npriority=1" > /etc/yum.repos.d/buildkite-agent.repo && \ + yum -y install buildkite-agent + # PRE_COMMANDS: Executed pre-cmake # CMAKE_EXTRAS: Executed right before the cmake path (on the end) -ENV PRE_COMMANDS="export PATH=/usr/lib64/ccache:$PATH &&" +ENV PRE_COMMANDS="export PATH=/usr/lib64/ccache:\$PATH" ENV CMAKE_EXTRAS="-DCMAKE_CXX_COMPILER='clang++' -DCMAKE_C_COMPILER='clang'" -CMD bash -c "$PRE_COMMANDS ccache -s && \ - mkdir /workdir/build && cd /workdir/build && cmake -DCMAKE_BUILD_TYPE='Release' -DCORE_SYMBOL_NAME='SYS' -DOPENSSL_ROOT_DIR='/usr/include/openssl' -DBUILD_MONGO_DB_PLUGIN=true $CMAKE_EXTRAS /workdir && make -j $(getconf _NPROCESSORS_ONLN) && \ - ctest -j$(getconf _NPROCESSORS_ONLN) -LE _tests --output-on-failure -T Test" \ No newline at end of file +# Bring in helpers that provides execute function so we can get better logging in BK and TRAV +COPY ./docker/.helpers-v33 /tmp/.helpers + +CMD bash -c ". /tmp/.helpers && $PRE_COMMANDS && \ + fold-execute ccache -s && \ + mkdir /workdir/build && cd /workdir/build && fold-execute cmake -DCMAKE_BUILD_TYPE='Release' -DCORE_SYMBOL_NAME='SYS' -DOPENSSL_ROOT_DIR='/usr/include/openssl' -DBUILD_MONGO_DB_PLUGIN=true $CMAKE_EXTRAS /workdir && \ + fold-execute make -j $(getconf _NPROCESSORS_ONLN) && \ + if ${ENABLE_PARALLEL_TESTS:-true}; then fold-execute ctest -j$(getconf _NPROCESSORS_ONLN) -LE _tests --output-on-failure -T Test; fi && \ + if ${ENABLE_SERIAL_TESTS:-true}; then mkdir -p ./mongodb && fold-execute mongod --dbpath ./mongodb --fork --logpath mongod.log && fold-execute ctest -L nonparallelizable_tests --output-on-failure -T Test; fi && \ + if ${ENABLE_LR_TESTS:-false}; then fold-execute ctest -L long_running_tests --output-on-failure -T Test; fi && \ + if ! ${TRAVIS:-false}; then cd .. && tar -pczf build.tar.gz build && buildkite-agent artifact upload build.tar.gz --agent-access-token $BUILDKITE_AGENT_ACCESS_TOKEN; fi" diff --git a/.cicd/centos-7.dockerfile b/.cicd/docker/centos-7.dockerfile similarity index 73% rename from .cicd/centos-7.dockerfile rename to .cicd/docker/centos-7.dockerfile index a0a342c8857..b53ca471ef7 100644 --- a/.cicd/centos-7.dockerfile +++ b/.cicd/docker/centos-7.dockerfile @@ -7,7 +7,7 @@ RUN yum update -y \ && yum --enablerepo=extras install -y which git autoconf automake libtool make bzip2 doxygen \ graphviz bzip2-devel openssl-devel gmp-devel ocaml libicu-devel \ python python-devel rh-python36 gettext-devel file libusbx-devel \ - libcurl-devel patch + libcurl-devel patch # Build appropriate version of CMake. RUN curl -LO https://cmake.org/files/v3.13/cmake-3.13.2.tar.gz \ @@ -88,10 +88,22 @@ RUN cd /usr/lib64/ccache && ln -s ../../bin/ccache c++ ## We need to tell ccache to actually use devtoolset-8 instead of the default system one (ccache resets anything set in PATH when it launches) ENV CCACHE_PATH="/opt/rh/devtoolset-8/root/usr/bin" +# Install Buildkite Agent +RUN echo -e "[buildkite-agent]\nname = Buildkite Pty Ltd\nbaseurl = https://yum.buildkite.com/buildkite-agent/stable/x86_64/\nenabled=1\ngpgcheck=0\npriority=1" > /etc/yum.repos.d/buildkite-agent.repo && \ + yum -y install buildkite-agent + # PRE_COMMANDS: Executed pre-cmake # CMAKE_EXTRAS: Executed right before the cmake path (on the end) -ENV PRE_COMMANDS="source /opt/rh/devtoolset-8/enable && source /opt/rh/rh-python36/enable && export PATH=/usr/lib64/ccache:$PATH &&" +ENV PRE_COMMANDS="source /opt/rh/devtoolset-8/enable && source /opt/rh/rh-python36/enable && export PATH=/usr/lib64/ccache:\$PATH" + +# Bring in helpers that provides execute function so we can get better logging in BK and TRAV +COPY ./docker/.helpers-v33 /tmp/.helpers -CMD bash -c "$PRE_COMMANDS ccache -s && \ - mkdir /workdir/build && cd /workdir/build && cmake -DCMAKE_BUILD_TYPE='Release' -DCORE_SYMBOL_NAME='SYS' -DOPENSSL_ROOT_DIR='/usr/include/openssl' -DBUILD_MONGO_DB_PLUGIN=true $CMAKE_EXTRAS /workdir && make -j $(getconf _NPROCESSORS_ONLN) && \ - ctest -j$(getconf _NPROCESSORS_ONLN) -LE _tests --output-on-failure -T Test" \ No newline at end of file +CMD bash -c ". /tmp/.helpers && $PRE_COMMANDS && \ + fold-execute ccache -s && \ + mkdir /workdir/build && cd /workdir/build && fold-execute cmake -DCMAKE_BUILD_TYPE='Release' -DCORE_SYMBOL_NAME='SYS' -DOPENSSL_ROOT_DIR='/usr/include/openssl' -DBUILD_MONGO_DB_PLUGIN=true $CMAKE_EXTRAS /workdir && \ + fold-execute make -j $(getconf _NPROCESSORS_ONLN) && \ + if ${ENABLE_PARALLEL_TESTS:-true}; then fold-execute ctest -j$(getconf _NPROCESSORS_ONLN) -LE _tests --output-on-failure -T Test; fi && \ + if ${ENABLE_SERIAL_TESTS:-true}; then mkdir -p ./mongodb && fold-execute mongod --dbpath ./mongodb --fork --logpath mongod.log && fold-execute ctest -L nonparallelizable_tests --output-on-failure -T Test; fi && \ + if ${ENABLE_LR_TESTS:-false}; then fold-execute ctest -L long_running_tests --output-on-failure -T Test; fi && \ + if ! ${TRAVIS:-false}; then cd .. && tar -pczf build.tar.gz build && buildkite-agent artifact upload build.tar.gz --agent-access-token $BUILDKITE_AGENT_ACCESS_TOKEN; fi" diff --git a/.cicd/pinned_toolchain.cmake b/.cicd/docker/pinned_toolchain.cmake old mode 100644 new mode 100755 similarity index 100% rename from .cicd/pinned_toolchain.cmake rename to .cicd/docker/pinned_toolchain.cmake diff --git a/.cicd/ubuntu-16.04.dockerfile b/.cicd/docker/ubuntu-16.04.dockerfile similarity index 77% rename from .cicd/ubuntu-16.04.dockerfile rename to .cicd/docker/ubuntu-16.04.dockerfile index e83384cea65..797cd0fdb12 100644 --- a/.cicd/ubuntu-16.04.dockerfile +++ b/.cicd/docker/ubuntu-16.04.dockerfile @@ -5,7 +5,7 @@ RUN apt-get update && apt-get upgrade -y \ && DEBIAN_FRONTEND=noninteractive apt-get install -y build-essential git automake \ libbz2-dev libssl-dev doxygen graphviz libgmp3-dev autotools-dev libicu-dev \ python2.7 python2.7-dev python3 python3-dev autoconf libtool curl zlib1g-dev \ - sudo ruby libusb-1.0-0-dev libcurl4-gnutls-dev pkg-config + sudo ruby libusb-1.0-0-dev libcurl4-gnutls-dev pkg-config apt-transport-https # Build appropriate version of CMake. RUN curl -LO https://cmake.org/files/v3.13/cmake-3.13.2.tar.gz \ @@ -20,10 +20,9 @@ RUN curl -LO https://cmake.org/files/v3.13/cmake-3.13.2.tar.gz \ # Build appropriate version of Clang. RUN mkdir -p /root/tmp && cd /root/tmp && git clone --single-branch --branch release_80 https://git.llvm.org/git/llvm.git clang8 && cd clang8 && git checkout 18e41dc && cd tools && git clone --single-branch --branch release_80 https://git.llvm.org/git/lld.git && cd lld && git checkout d60a035 && cd ../ && git clone --single-branch --branch release_80 https://git.llvm.org/git/polly.git && cd polly && git checkout 1bc06e5 && cd ../ && git clone --single-branch --branch release_80 https://git.llvm.org/git/clang.git clang && cd clang && git checkout a03da8b && cd tools && mkdir extra && cd extra && git clone --single-branch --branch release_80 https://git.llvm.org/git/clang-tools-extra.git && cd clang-tools-extra && git checkout 6b34834 && cd .. && cd ../../../../projects && git clone --single-branch --branch release_80 https://git.llvm.org/git/libcxx.git && cd libcxx && git checkout 1853712 && cd ../ && git clone --single-branch --branch release_80 https://git.llvm.org/git/libcxxabi.git && cd libcxxabi && git checkout d7338a4 && cd ../ && git clone --single-branch --branch release_80 https://git.llvm.org/git/libunwind.git && cd libunwind && git checkout 57f6739 && cd ../ && git clone --single-branch --branch release_80 https://git.llvm.org/git/compiler-rt.git && cd compiler-rt && git checkout 5bc7979 && cd ../ && cd /root/tmp/clang8 && mkdir build && cd build && cmake -G 'Unix Makefiles' -DCMAKE_INSTALL_PREFIX='/usr/local' -DLLVM_BUILD_EXTERNAL_COMPILER_RT=ON -DLLVM_BUILD_LLVM_DYLIB=ON -DLLVM_ENABLE_LIBCXX=ON -DLLVM_ENABLE_RTTI=ON -DLLVM_INCLUDE_DOCS=OFF -DLLVM_OPTIMIZED_TABLEGEN=ON -DLLVM_TARGETS_TO_BUILD=all -DCMAKE_BUILD_TYPE=Release .. && make -j$(nproc) && make install \ && cd / && rm -rf /root/tmp/clang8 +COPY ./docker/pinned_toolchain.cmake /tmp/pinned_toolchain.cmake -COPY ./pinned_toolchain.cmake /tmp/pinned_toolchain.cmake - -# # Build appropriate version of LLVM. +# Build appropriate version of LLVM. RUN git clone --depth 1 --single-branch --branch release_40 https://github.com/llvm-mirror/llvm.git llvm \ && cd llvm \ && mkdir build \ @@ -83,11 +82,23 @@ RUN curl -LO https://github.com/ccache/ccache/releases/download/v3.4.1/ccache-3. && make install \ && cd / && rm -rf ccache-3.4.1/ +RUN echo "deb https://apt.buildkite.com/buildkite-agent stable main" > /etc/apt/sources.list.d/buildkite-agent.list \ + && apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 32A37959C2FA5C3C99EFBC32A79206696452D198 \ + && apt-get update && apt-get install -y buildkite-agent + # PRE_COMMANDS: Executed pre-cmake # CMAKE_EXTRAS: Executed right before the cmake path (on the end) -ENV PRE_COMMANDS="export PATH=/usr/lib/ccache:$PATH &&" +ENV PRE_COMMANDS="export PATH=/usr/lib/ccache:\$PATH" ENV CMAKE_EXTRAS="$CMAKE_EXTRAS -DCMAKE_TOOLCHAIN_FILE='/tmp/pinned_toolchain.cmake' -DCMAKE_CXX_COMPILER_LAUNCHER=ccache" -CMD bash -c "$PRE_COMMANDS ccache -s && \ - mkdir /workdir/build && cd /workdir/build && cmake -DCMAKE_BUILD_TYPE='Release' -DCORE_SYMBOL_NAME='SYS' -DOPENSSL_ROOT_DIR='/usr/include/openssl' -DBUILD_MONGO_DB_PLUGIN=true $CMAKE_EXTRAS /workdir && make -j $(getconf _NPROCESSORS_ONLN) && \ - ctest -j$(getconf _NPROCESSORS_ONLN) -LE _tests --output-on-failure -T Test" \ No newline at end of file +# Bring in helpers that provides execute function so we can get better logging in BK and TRAV +COPY ./docker/.helpers-v33 /tmp/.helpers + +CMD bash -c ". /tmp/.helpers && $PRE_COMMANDS && \ + fold-execute ccache -s && \ + mkdir /workdir/build && cd /workdir/build && fold-execute cmake -DCMAKE_BUILD_TYPE='Release' -DCORE_SYMBOL_NAME='SYS' -DOPENSSL_ROOT_DIR='/usr/include/openssl' -DBUILD_MONGO_DB_PLUGIN=true $CMAKE_EXTRAS /workdir && \ + fold-execute make -j $(getconf _NPROCESSORS_ONLN) && \ + if ${ENABLE_PARALLEL_TESTS:-true}; then fold-execute ctest -j$(getconf _NPROCESSORS_ONLN) -LE _tests --output-on-failure -T Test; fi && \ + if ${ENABLE_SERIAL_TESTS:-true}; then mkdir -p ./mongodb && fold-execute mongod --dbpath ./mongodb --fork --logpath mongod.log && fold-execute ctest -L nonparallelizable_tests --output-on-failure -T Test; fi && \ + if ${ENABLE_LR_TESTS:-false}; then fold-execute ctest -L long_running_tests --output-on-failure -T Test; fi && \ + if ! ${TRAVIS:-false}; then cd .. && tar -pczf build.tar.gz build && buildkite-agent artifact upload build.tar.gz --agent-access-token $BUILDKITE_AGENT_ACCESS_TOKEN; fi" \ No newline at end of file diff --git a/.cicd/ubuntu-18.04.dockerfile b/.cicd/docker/ubuntu-18.04.dockerfile similarity index 65% rename from .cicd/ubuntu-18.04.dockerfile rename to .cicd/docker/ubuntu-18.04.dockerfile index 1561eccfc2b..bd440d86f7b 100644 --- a/.cicd/ubuntu-18.04.dockerfile +++ b/.cicd/docker/ubuntu-18.04.dockerfile @@ -59,11 +59,23 @@ RUN curl -L https://github.com/mongodb/mongo-cxx-driver/archive/r3.4.0.tar.gz -o ENV PATH=${PATH}:/mongodb-linux-x86_64-ubuntu1804-4.1.1/bin +RUN echo "deb https://apt.buildkite.com/buildkite-agent stable main" > /etc/apt/sources.list.d/buildkite-agent.list \ + && apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 32A37959C2FA5C3C99EFBC32A79206696452D198 \ + && apt-get update && apt-get install -y apt-transport-https && apt-get install -y buildkite-agent + # PRE_COMMANDS: Executed pre-cmake # CMAKE_EXTRAS: Executed right before the cmake path (on the end) -ENV PRE_COMMANDS="export PATH=/usr/lib/ccache:$PATH &&" +ENV PRE_COMMANDS="export PATH=/usr/lib/ccache:\$PATH" ENV CMAKE_EXTRAS="-DCMAKE_CXX_COMPILER='clang++' -DCMAKE_C_COMPILER='clang'" -CMD bash -c "$PRE_COMMANDS ccache -s && \ - mkdir /workdir/build && cd /workdir/build && cmake -DCMAKE_BUILD_TYPE='Release' -DCORE_SYMBOL_NAME='SYS' -DOPENSSL_ROOT_DIR='/usr/include/openssl' -DBUILD_MONGO_DB_PLUGIN=true $CMAKE_EXTRAS /workdir && make -j $(getconf _NPROCESSORS_ONLN) && \ - ctest -j$(getconf _NPROCESSORS_ONLN) -LE _tests --output-on-failure -T Test" +# Bring in helpers that provides execute function so we can get better logging in BK and TRAV +COPY ./docker/.helpers-v33 /tmp/.helpers + +CMD bash -c ". /tmp/.helpers && $PRE_COMMANDS && \ + fold-execute ccache -s && \ + mkdir /workdir/build && cd /workdir/build && fold-execute cmake -DCMAKE_BUILD_TYPE='Release' -DCORE_SYMBOL_NAME='SYS' -DOPENSSL_ROOT_DIR='/usr/include/openssl' -DBUILD_MONGO_DB_PLUGIN=true $CMAKE_EXTRAS /workdir && \ + fold-execute make -j $(getconf _NPROCESSORS_ONLN) && \ + if ${ENABLE_PARALLEL_TESTS:-true}; then fold-execute ctest -j$(getconf _NPROCESSORS_ONLN) -LE _tests --output-on-failure -T Test; fi && \ + if ${ENABLE_SERIAL_TESTS:-true}; then mkdir -p ./mongodb && fold-execute mongod --dbpath ./mongodb --fork --logpath mongod.log && fold-execute ctest -L nonparallelizable_tests --output-on-failure -T Test; fi && \ + if ${ENABLE_LR_TESTS:-false}; then fold-execute ctest -L long_running_tests --output-on-failure -T Test; fi && \ + if ! ${TRAVIS:-false}; then cd .. && tar -pczf build.tar.gz build && buildkite-agent artifact upload build.tar.gz --agent-access-token $BUILDKITE_AGENT_ACCESS_TOKEN; fi" diff --git a/.cicd/generate-base-images.sh b/.cicd/generate-base-images.sh index 86fff371438..71cd07f1878 100755 --- a/.cicd/generate-base-images.sh +++ b/.cicd/generate-base-images.sh @@ -1,6 +1,7 @@ #!/usr/bin/env bash set -eo pipefail -cd $( dirname "${BASH_SOURCE[0]}" )/.. # Ensure we're in the repo root and not inside of scripts -. ./.cicd/.helpers +cd $( dirname "${BASH_SOURCE[0]}" ) # Ensure we're in the .cicd dir +. ./.helpers +( [[ -z $IMAGE_TAG ]] && [[ -z $1 ]] ) && echo "You must provide the distro IMAGE_TAG name (example: ubuntu-18.04) as argument \$1 or set it within your ENV" && exit 1 echo "Looking for $FULL_TAG" docker_tag_exists $FULL_TAG && echo "$FULL_TAG already exists" || generate_docker_image diff --git a/.cicd/metrics/test-metrics.js b/.cicd/metrics/test-metrics.js new file mode 100755 index 00000000000..b995134d9b1 --- /dev/null +++ b/.cicd/metrics/test-metrics.js @@ -0,0 +1,431 @@ +#!/usr/bin/env node +/* includes */ +const execSync = require('child_process').execSync; // run shell commands +const fetch = require('node-fetch'); // downloading +const fs = require('fs'); // file stream +const XML = require('xml2js'); // parse xml + +/* globals */ +const buildkiteAccessToken = `?access_token=${process.env.BUILDKITE_API_KEY}`; // import buildkite access token from environment +const debug = (process.env.DEBUG === 'true') ? true : false; +let errorCount = 0; // count number of jobs which caused an error +const EXIT_SUCCESS = 0; +const inBuildkite = (process.env.BUILDKITE === 'true') ? true : false; +const outputFile = 'test-metrics.json'; +const pipelineWhitelist = // the pipelines for which we run diagnostics +[ + 'eosio', + 'eosio-base-images', + 'eosio-beta', + 'eosio-build-unpinned', + 'eosio-debug', + 'eosio-lrt', + 'eosio-security' +]; + +/* functions */ +// given a url string, download a text document +async function download(url) +{ + if (debug) console.log(`download(${url.replace(buildkiteAccessToken, '')})`); // DEBUG + const httpResponse = await fetch(url); + const body = await httpResponse.text(); + if (isNullOrEmpty(body)) + { + console.log(`ERROR: URL returned nothing! URL: ${url.replace(buildkiteAccessToken, '')}`); + const error = + { + http: { body, response: httpResponse, url}, + message: 'http body is null or empty', + origin: 'download()', + } + throw error; + } + if (debug) console.log('Download complete.'); // DEBUG + return body; +} + +// given a pipeline and a build number, get a build object +async function getBuild(pipeline, buildNumber) +{ + if (debug) console.log(`getBuild(${pipeline}, ${buildNumber})`); // DEBUG + const httpResponse = await fetch(`https://api.buildkite.com/v2/organizations/EOSIO/pipelines/${pipeline}/builds/${buildNumber}${buildkiteAccessToken}`); + return httpResponse.json(); +} + +// given a buildkite job, return the environmental variables +async function getEnvironment(job) +{ + if (debug) console.log('getEnvironment()'); // DEBUG + const httpResponse = await fetch(`${job.build_url}/jobs/${job.id}/env${buildkiteAccessToken}`); + const environment = await httpResponse.json(); + return environment.env; +} + +// given a string to search, a key as regex or a string, and optionally a start index, return the lowest line number containing the key +function getLineNumber(text, key, startIndex) +{ + if (debug) console.log('getLineNumber()'); // DEBUG + const begin = (isNullOrEmpty(startIndex) || !Number.isInteger(startIndex) || startIndex < 1) ? 0 : startIndex; + let found = false; + let lineNumber = 0; + const regex = (key instanceof RegExp); + text.split('\n').some((line) => + { + if (lineNumber >= begin && ((regex && key.test(line)) || (!regex && line.includes(key)))) + { + found = true; + return true; // c-style break + } + lineNumber += 1; + return false; // for the linter, plz delete when linter is fixed + }); + return (found) ? lineNumber : -1; +} + +// given a buildkite job, return a sanitized log file +async function getLog(job) +{ + if (debug) console.log(`getLog(${job.raw_log_url})`); // DEBUG + const logText = await download(job.raw_log_url + buildkiteAccessToken); + // returns log lowercase, with single spaces and '\n' only, and only ascii-printable characters + return sanitize(logText); // made this a separate function for unit testing purposes +} + +// given a Buildkite environment, return the operating system used +function getOS(environment) +{ + if (debug) console.log(`getOS(${environment.BUILDKITE_LABEL})`); // DEBUG + if (isNullOrEmpty(environment) || isNullOrEmpty(environment.BUILDKITE_LABEL)) + { + console.log('ERROR: getOS() called with empty environment.BUILDKITE_LABEL!'); + console.log(JSON.stringify(environment)); + return null; + } + const label = environment.BUILDKITE_LABEL.toLowerCase(); + if ((/aws(?!.*[23])/.test(label) || /amazon(?!.*[23])/.test(label))) + return 'Amazon Linux 1'; + if (/aws.*2/.test(label) || /amazon.*2/.test(label)) + return 'Amazon Linux 2'; + if (/centos(?!.*[89])/.test(label)) + return 'CentOS 7'; + if (/fedora(?!.*2[89])/.test(label) && /fedora(?!.*3\d)/.test(label)) + return 'Fedora 27'; + if (/high.*sierra/.test(label)) + return 'High Sierra'; + if (/mojave/.test(label)) + return 'Mojave'; + if (/ubuntu.*16.*04/.test(label) || /ubuntu.*16(?!.*10)/.test(label)) + return 'Ubuntu 16.04'; + if (/ubuntu.*18.*04/.test(label) || /ubuntu.*18(?!.*10)/.test(label)) + return 'Ubuntu 18.04'; + if (/docker/.test(label)) + return 'Docker'; + return 'Unknown'; +} + +// given a Buildkite job, return the test-results.xml file as JSON +async function getXML(job) +{ + if (debug) console.log('getXML()'); // DEBUG + const xmlFilename = 'test-results.xml'; + const artifacts = await download(job.artifacts_url + buildkiteAccessToken); + const testResultsArtifact = JSON.parse(artifacts).filter(artifact => artifact.filename === xmlFilename); + if (isNullOrEmpty(testResultsArtifact)) + { + console.log(`WARNING: No ${xmlFilename} found for "${job.name}"! Link: ${job.web_url}`); + return null; + } + const urlBuildkite = testResultsArtifact[0].download_url; + const rawXML = await download(urlBuildkite + buildkiteAccessToken); + const xmlOptions = + { + attrNameProcessors: [function lower(name) { return name.toLowerCase(); }], + explicitArray: false, // do not put single strings in single-element arrays + mergeAttrs: true, // make attributes children of their node + normalizeTags: true, // convert all tag names to lowercase + }; + let xmlError, xmlTestResults; + await XML.parseString(rawXML, xmlOptions, (err, result) => {xmlTestResults = result; xmlError = err;}); + if (isNullOrEmpty(xmlError)) + return xmlTestResults; + console.log(`WARNING: Failed to parse xml for "${job.name}" job! Link: ${job.web_url}`); + console.log(JSON.stringify(xmlError)); + return null; +} + +// test if variable is empty +function isNullOrEmpty(str) +{ + return (str === null || str === undefined || str.length === 0 || /^\s*$/.test(str)); +} + +// return array of test results from a buildkite job log +function parseLog(logText) +{ + if (debug) console.log('parseLog()'); // DEBUG + const lines = logText.split('\n'); + const resultLines = lines.filter(line => /test\s+#\d+/.test(line)); // 'grep' for the test result lines + // parse the strings and make test records + return resultLines.map((line) => + { + const y = line.trim().split(/test\s+#\d+/).pop(); // remove everything before the test declaration + const parts = y.split(/\s+/).slice(1, -1); // split the line and remove the test number and time unit + const testName = parts[0]; + const testTime = parts[(parts.length - 1)]; + const rawResult = parts.slice(1, -1).join(); + let testResult; + if (rawResult.includes('failed')) + testResult = 'Failed'; + else if (rawResult.includes('passed')) + testResult = 'Passed'; + else + testResult = 'Exception'; + return { testName, testResult, testTime }; // create a test record + }); +} + +// return array of test results from an xUnit-formatted JSON object +function parseXunit(xUnit) +{ + if (debug) console.log('parseXunit()'); // DEBUG + if (isNullOrEmpty(xUnit)) + { + console.log('WARNING: xUnit is empty!'); + return null; + } + return xUnit.site.testing.test.map((test) => + { + const testName = test.name; + const testTime = test.results.namedmeasurement.filter(x => /execution\s+time/.test(x.name.toLowerCase()))[0].value; + let testResult; + if (test.status.includes('failed')) + testResult = 'Failed'; + else if (test.status.includes('passed')) + testResult = 'Passed'; + else + testResult = 'Exception'; + return { testName, testResult, testTime }; + }); +} + +// returns text lowercase, with single spaces and '\n' only, and only ascii-printable characters +function sanitize(text) +{ + if (debug) console.log(`sanitize(text) where text.length = ${text.length} bytes`); // DEBUG + const chunkSize = 131072; // process text in 128 kB chunks + if (text.length > chunkSize) + return sanitize(text.slice(0, chunkSize)).concat(sanitize(text.slice(chunkSize))); + return text + .replace(/(?!\n)\r(?!\n)/g, '\n').replace(/\r/g, '') // convert all line endings to '\n' + .replace(/[^\S\n]+/g, ' ') // convert all whitespace to ' ' + .replace(/[^ -~\n]+/g, '') // remove non-printable characters + .toLowerCase(); +} + +// input is array of whole lines containing "test #" and ("failed" or "exception") +function testDiagnostics(test, logText) +{ + if (debug) + { + console.log(`testDiagnostics(test, logText) where logText.length = ${logText.length} bytes and test is`); // DEBUG + console.log(JSON.stringify(test)); + } + // get basic information + const testResultLine = new RegExp(`test\\s+#\\d+.*${test.testName}`, 'g'); // regex defining "test #" line + const startIndex = getLineNumber(logText, testResultLine); + const output = { errorMsg: null, lineNumber: startIndex + 1, stackTrace: null }; // default output + // filter tests + if (test.testResult.toLowerCase() === 'passed') + return output; + output.errorMsg = 'test diangostics are not enabled for this pipeline'; + if (!pipelineWhitelist.includes(test.pipeline)) + return output; + // diagnostics + if (debug) console.log('Running diagnostics...'); // DEBUG + output.errorMsg = 'uncategorized'; + const testLog = logText.split(testResultLine)[1].split(/test\s*#/)[0].split('\n'); // get log output from this test only, as array of lines + let errorLine = testLog[0]; // first line, from "test ## name" to '\n' exclusive + if (/\.+ *\** *not run\s+0+\.0+ sec$/.test(errorLine)) // not run + output.errorMsg = 'test not run'; + else if (/\.+ *\** *time *out\s+\d+\.\d+ sec$/.test(errorLine)) // timeout + output.errorMsg = 'test timeout'; + else if (/exception/.test(errorLine)) // test exception + output.errorMsg = errorLine.split('exception')[1].replace(/[: \d.]/g, '').replace(/sec$/, ''); // isolate the error message after exception + else if (/fc::.*exception/.test(testLog.filter(line => !isNullOrEmpty(line))[1])) // fc exception + { + [, errorLine] = testLog.filter(line => !isNullOrEmpty(line)); // get first line + output.errorMsg = `fc::${errorLine.split('::')[1].replace(/['",]/g, '').split(' ')[0]}`; // isolate fx exception body + } + else if (testLog.join('\n').includes('ctest:')) // ctest exception + { + [errorLine] = testLog.filter(line => line.includes('ctest:')); + output.errorMsg = `ctest:${errorLine.split('ctest:')[1]}`; + } + else if (!isNullOrEmpty(testLog.filter(line => /boost.+exception/.test(line)))) // boost exception + { + [errorLine] = testLog.filter(line => /boost.+exception/.test(line)); + output.errorMsg = `boost: ${errorLine.replace(/[()]/g, '').split(/: (.+)/)[1]}`; // capturing parenthesis, split only at first ' :' + output.stackTrace = testLog.filter(line => /thread-\d+/.test(line))[0].split('thread-')[1].replace(/^\d+/, '').trim().replace(/[[]\d+m$/, ''); // get the bottom of the stack trace + } + else if (/unit[-_. ]+test/.test(test.testName) || /plugin[-_. ]+test/.test(test.testName)) // unit test, application exception + { + if (!isNullOrEmpty(testLog.filter(line => line.includes('exception: ')))) + { + [errorLine] = testLog.filter(line => line.includes('exception: ')); + [, output.errorMsg] = errorLine.replace(/[()]/g, '').split(/: (.+)/); // capturing parenthesis, split only at first ' :' + output.stackTrace = testLog.filter(line => /thread-\d+/.test(line))[0].split('thread-')[1].replace(/^\d+/, '').trim().replace(/[[]\d+m$/, ''); // get the bottom of the stack trace + } + // else uncategorized unit test + } + // else integration test, add cross-referencing code here (or uncategorized) + if (errorLine !== testLog[0]) // get real line number from log file + output.lineNumber = getLineNumber(logText, errorLine, startIndex) + 1; + return output; +} + +// return test metrics given a buildkite job or build +async function testMetrics(buildkiteObject) +{ + if (!isNullOrEmpty(buildkiteObject.type)) // input is a Buildkite job object + { + const job = buildkiteObject; + console.log(`Processing test metrics for "${job.name}"${(inBuildkite) ? '' : ` at ${job.web_url}`}...`); + if (isNullOrEmpty(job.exit_status)) + { + console.log(`${(inBuildkite) ? '+++ :warning: ' : ''}WARNING: "${job.name}" was skipped!`); + return null; + } + // get test results + const logText = await getLog(job); + let testResults; + let xUnit; + try + { + xUnit = await getXML(job); + testResults = parseXunit(xUnit); + } + catch (error) + { + console.log(`XML processing failed for "${job.name}"! Link: ${job.web_url}`); + console.log(JSON.stringify(error)); + testResults = null; + } + finally + { + if (isNullOrEmpty(testResults)) + testResults = parseLog(logText); + } + // get test metrics + const env = await getEnvironment(job); + env.BUILDKITE_REPO = env.BUILDKITE_REPO.replace(new RegExp('^git@github.com:(EOSIO/)?'), '').replace(new RegExp('.git$'), ''); + const metrics = []; + const os = getOS(env); + testResults.forEach((result) => + { + // add test properties + const test = + { + ...result, // add testName, testResult, testTime + agentName: env.BUILDKITE_AGENT_NAME, + agentRole: env.BUILDKITE_AGENT_META_DATA_QUEUE || env.BUILDKITE_AGENT_META_DATA_ROLE, + branch: env.BUILDKITE_BRANCH, + buildNumber: env.BUILDKITE_BUILD_NUMBER, + commit: env.BUILDKITE_COMMIT, + job: env.BUILDKITE_LABEL, + os, + pipeline: env.BUILDKITE_PIPELINE_SLUG, + repo: env.BUILDKITE_REPO, + testTime: parseFloat(result.testTime), + url: job.web_url, + }; + metrics.push({ ...test, ...testDiagnostics(test, logText) }); + }); + return metrics; + } + else if (!isNullOrEmpty(buildkiteObject.number)) // input is a Buildkite build object + { + const build = buildkiteObject; + console.log(`Processing test metrics for ${build.pipeline.slug} build ${build.number}${(inBuildkite) ? '' : ` at ${build.web_url}`}...`); + let metrics = [], promises = []; + // process test metrics + build.jobs.filter(job => job.type === 'script' && /test/.test(job.name.toLowerCase()) && ! /test metrics/.test(job.name.toLowerCase())).forEach((job) => + { + promises.push( + testMetrics(job) + .then((moreMetrics) => { + if (!isNullOrEmpty(moreMetrics)) + metrics = metrics.concat(moreMetrics); + else + console.log(`${(inBuildkite) ? '+++ :warning: ' : ''}WARNING: "${job.name}" metrics are empty!\nmetrics = ${JSON.stringify(moreMetrics)}`); + }).catch((error) => { + console.log(`${(inBuildkite) ? '+++ :no_entry: ' : ''}ERROR: Failed to process test metrics for "${job.name}"! Link: ${job.web_url}`); + console.log(JSON.stringify(error)); + errorCount++; + }) + ); + }); + await Promise.all(promises); + return metrics; + } + else // something else + { + console.log(`${(inBuildkite) ? '+++ :no_entry: ' : ''}ERROR: Buildkite object not recognized or not a test step!`); + console.log(JSON.stringify({buildkiteObject})); + return null; + } +} + +/* main */ +async function main() +{ + if (debug) console.log(`$ ${process.argv.join(' ')}`); + let build, metrics = null; + console.log(`${(inBuildkite) ? '+++ :evergreen_tree: ' : ''}Getting information from enviroment...`); + const buildNumber = process.env.BUILDKITE_BUILD_NUMBER || process.argv[2]; + const pipeline = process.env.BUILDKITE_PIPELINE_SLUG || process.argv[3]; + if (debug) + { + console.log(`BUILDKITE=${process.env.BUILDKITE}`); + console.log(`BUILDKITE_BUILD_NUMBER=${process.env.BUILDKITE_BUILD_NUMBER}`); + console.log(`BUILDKITE_PIPELINE_SLUG=${process.env.BUILDKITE_PIPELINE_SLUG}`); + console.log(' State:') + console.log(`inBuildkite = "${inBuildkite}"`); + console.log(`buildNumber = "${buildNumber}"`); + console.log(`pipeline = "${pipeline}"`); + } + if (isNullOrEmpty(buildNumber) || isNullOrEmpty(pipeline) || isNullOrEmpty(process.env.BUILDKITE_API_KEY)) + { + console.log(`${(inBuildkite) ? '+++ :no_entry: ' : ''}ERROR: Missing required inputs!`); + if (isNullOrEmpty(process.env.BUILDKITE_API_KEY)) console.log('- Buildkite API key, as BUILDKITE_API_KEY environment variable'); + if (isNullOrEmpty(buildNumber)) console.log('- Build Number, as BUILDKITE_BUILD_NUMBER or argument 1'); + if (isNullOrEmpty(pipeline)) console.log('- Pipeline Slug, as BUILDKITE_PIPELINE_SLUG or argument 2'); + errorCount = -1; + } + else + { + console.log(`${(inBuildkite) ? '+++ :bar_chart: ' : ''}Processing test metrics...`); + build = await getBuild(pipeline, buildNumber); + metrics = await testMetrics(build); + console.log('Done processing test metrics.'); + } + console.log(`${(inBuildkite) ? '+++ :pencil: ' : ''}Writing to file...`); + fs.writeFileSync(outputFile, JSON.stringify({ metrics })); + console.log(`Saved metrics to "${outputFile}" in "${process.cwd()}".`); + if (inBuildkite) + { + console.log('+++ :arrow_up: Uploading artifact...'); + execSync(`buildkite-agent artifact upload ${outputFile}`); + } + if (errorCount === 0) + console.log(`${(inBuildkite) ? '+++ :white_check_mark: ' : ''}Done!`); + else + { + console.log(`${(inBuildkite) ? '+++ :warning: ' : ''}Finished with errors.`); + console.log(`Please send automation a link to this job${(isNullOrEmpty(build)) ? '.' : `: ${build.web_url}`}`); + console.log('@kj4ezj or @zreyn on Telegram'); + } + return (inBuildkite) ? process.exit(EXIT_SUCCESS) : process.exit(errorCount); +}; + +main(); \ No newline at end of file diff --git a/.cicd/metrics/test-metrics.tar.gz b/.cicd/metrics/test-metrics.tar.gz new file mode 100644 index 00000000000..2381787ca06 Binary files /dev/null and b/.cicd/metrics/test-metrics.tar.gz differ diff --git a/.cicd/package-builder.sh b/.cicd/package-builder.sh new file mode 100755 index 00000000000..ed21eb374a4 --- /dev/null +++ b/.cicd/package-builder.sh @@ -0,0 +1,54 @@ +#!/usr/bin/env bash +set -eo pipefail +cd $( dirname "${BASH_SOURCE[0]}" ) # Ensure we're in the .cicd dir +. ./.helpers +if [[ $(uname) == 'Darwin' ]]; then + cd ../ + echo 'Darwin family detected, building for brew.' + [[ -z $ARTIFACT ]] && ARTIFACT='*.rb;*.tar.gz' + PACKAGE_TYPE='brew' +else + cd /workdir + . /etc/os-release + if [[ "$ID_LIKE" == 'debian' || "$ID" == 'debian' ]]; then + echo 'Debian family detected, building for dpkg.' + [[ -z $ARTIFACT ]] && ARTIFACT='*.deb' + PACKAGE_TYPE='deb' + elif [[ "$ID_LIKE" == 'rhel fedora' || "$ID" == 'fedora' ]]; then + echo 'Fedora family detected, building for RPM.' + [[ -z $ARTIFACT ]] && ARTIFACT='*.rpm' + PACKAGE_TYPE='rpm' + mkdir -p ~/rpmbuild/BUILD + mkdir -p ~/rpmbuild/BUILDROOT + mkdir -p ~/rpmbuild/RPMS + mkdir -p ~/rpmbuild/SOURCES + mkdir -p ~/rpmbuild/SPECS + mkdir -p ~/rpmbuild/SRPMS + yum install -y rpm-build + elif [[ $ID == 'amzn' ]]; then + echo "SKIPPED: We do not generate $NAME packages since they use rpms created from Centos." + exit 0 + else + echo 'ERROR: Could not determine which operating system this script is running on!' + uname + echo "ID_LIKE=\"$ID_LIKE\"" + cat /etc/os-release + exit 1 + fi +fi +BASE_COMMIT=$(cat build/programs/nodeos/config.hpp | grep 'version' | awk '{print $5}' | tr -d ';') +BASE_COMMIT="${BASE_COMMIT:2:42}" +echo "Found build against $BASE_COMMIT." +cd build/packages +chmod 755 ./*.sh +./generate_package.sh $PACKAGE_TYPE +[[ -d x86_64 ]] && cd 'x86_64' # backwards-compatibility with release/1.6.x +buildkite-agent artifact upload "./$ARTIFACT" --agent-access-token $BUILDKITE_AGENT_ACCESS_TOKEN +for A in $(echo $ARTIFACT | tr ';' ' '); do + if [[ $(ls $A | grep -c '') == 0 ]]; then + echo "+++ :no_entry: ERROR: Expected artifact \"$A\" not found!" + pwd + ls -la + exit 1 + fi +done \ No newline at end of file diff --git a/.cicd/pipeline.yml b/.cicd/pipeline.yml index 457906e6f84..89ad710e52e 100644 --- a/.cicd/pipeline.yml +++ b/.cicd/pipeline.yml @@ -3,77 +3,99 @@ env: TEST_TIMEOUT: 60 TIMEOUT: 120 VERBOSE: true + DEBUG: true steps: - label: ":webhook: Trigger Travis CI Build" command: - - "./.cicd/travis-trigger.sh" + - "./.cicd/travis/travis-trigger.sh" agents: - queue: "automation-eos-builder-fleet" + queue: "automation-basic-builder-fleet" - trigger: "eosio-base-images-beta" - label: ":docker: Ensure base images exist" + label: ":docker: Ensure Base Images" build: commit: "${BUILDKITE_COMMIT}" branch: "${BUILDKITE_BRANCH}" - wait - - label: ":aws: [Amazon] 2 Build" + - label: ":aws: [Amazon] 2 Build & Test" agents: queue: "automation-eos-builder-fleet" plugins: - docker#v3.2.0: debug: $DEBUG - image: "eosio/producer:eosio-amazonlinux-2-1c266e25276ad1f1147a4df5c12921968079c49b" + image: "eosio/producer:eosio-amazonlinux-2-cc8d19ea479663d98189ce2473aacc2a7567a8be" + environment: + - "BUILDKITE_AGENT_ACCESS_TOKEN" # Needed for buildkite-agent upload/download + mount-buildkite-agent: false # Mounting bk-agent doesn't work, so disable it + propagate-environment: true # Need for buildkite-agent upload/download (JOB_ID, etc) timeout: $BUILD_TIMEOUT skip: $SKIP_AMAZON_LINUX_2 - - label: ":centos: [CentOS] 7 Build" + - label: ":centos: [CentOS] 7 Build & Test" command: - "" agents: queue: "automation-eos-builder-fleet" plugins: - docker#v3.2.0: - debug: $DEBUG - image: "eosio/producer:eosio-centos-7-d45789dfb1a5b830a62381afd205b34d3d7c2d63" + debug: true + image: "eosio/producer:eosio-centos-7-61150e3338126b91bd9384821be93de5e9f8b3fe" + environment: + # - "ENABLE_LR_TESTS=true" + - "BUILDKITE_AGENT_ACCESS_TOKEN" # Needed for buildkite-agent upload/download + mount-buildkite-agent: false # Mounting bk-agent doesn't work, so disable it + propagate-environment: true # Need for buildkite-agent upload/download (JOB_ID, etc) timeout: $BUILD_TIMEOUT skip: $SKIP_CENTOS_7 - - label: ":ubuntu: [Ubuntu] 16.04 Build" + - label: ":ubuntu: [Ubuntu] 16.04 Build & Test" agents: queue: "automation-eos-builder-fleet" plugins: - docker#v3.2.0: debug: $DEBUG - image: "eosio/producer:eosio-ubuntu-16.04-4f430ef21e359ef2581c25255c99e5411adfaed9" + image: "eosio/producer:eosio-ubuntu-16.04-8c7529f2357cadbf6dc8d1be8fca379efd25af64" + environment: + - "BUILDKITE_AGENT_ACCESS_TOKEN" # Needed for buildkite-agent upload/download + mount-buildkite-agent: false # Mounting bk-agent doesn't work, so disable it + propagate-environment: true # Need for buildkite-agent upload/download (JOB_ID, etc) timeout: $BUILD_TIMEOUT skip: $SKIP_UBUNTU_16 - - label: ":ubuntu: [Ubuntu] 18.04 Build" + - label: ":ubuntu: [Ubuntu] 18.04 Build & Test" agents: queue: "automation-eos-builder-fleet" plugins: - docker#v3.2.0: debug: $DEBUG - image: "eosio/producer:eosio-ubuntu-18.04-14091ac0e5618b0ea5ce026cd75b648efbde4be7" + image: "eosio/producer:eosio-ubuntu-18.04-45ddde8129587d53014a8cb7ef2214a01ea346da" + environment: + - "BUILDKITE_AGENT_ACCESS_TOKEN" # Needed for buildkite-agent upload/download + mount-buildkite-agent: false # Mounting bk-agent doesn't work, so disable it + propagate-environment: true # Need for buildkite-agent upload/download (JOB_ID, etc) timeout: $BUILD_TIMEOUT skip: $SKIP_UBUNTU_18 - - label: ":darwin: [Darwin] 10.14 Build" + - label: ":darwin: [Darwin] 10.14 Build & Test" command: - - "brew install git graphviz libtool gmp llvm@4 pkgconfig python python@2 doxygen libusb openssl boost@1.70 cmake" + - "brew install git graphviz libtool gmp llvm@4 pkgconfig python python@2 doxygen libusb openssl boost@1.70 cmake mongodb" - "git clone $BUILDKITE_REPO eos && cd eos && git checkout $BUILDKITE_COMMIT && git submodule update --init --recursive" - - "cd eos && mkdir build && cd build && cmake .. && make -j$(getconf _NPROCESSORS_ONLN)" - - "cd eos/build && ctest -j$(getconf _NPROCESSORS_ONLN) -LE _tests --output-on-failure -T Test" + - "cd eos && mkdir -p build && cd build && cmake .." + - "cd eos/build && make -j$(getconf _NPROCESSORS_ONLN)" + - "if ${ENABLE_PARALLEL_TESTS:-true}; then cd eos/build && ctest -j$(getconf _NPROCESSORS_ONLN) -LE _tests --output-on-failure -T Test; fi" + - "if ${ENABLE_SERIAL_TESTS:-true}; then cd eos/build && mkdir -p ./mongodb && mongod --dbpath ./mongodb --fork --logpath mongod.log && ctest -L nonparallelizable_tests --output-on-failure -T Test; fi" + - "if ${ENABLE_LR_TESTS:-false}; then cd eos/build && ctest -L long_running_tests --output-on-failure -T Test; fi" + - "cd eos && tar -pczf build.tar.gz build && buildkite-agent artifact upload build.tar.gz" plugins: - chef/anka#v0.5.1: no-volume: true inherit-environment-vars: true vm-name: 10.14.4_6C_14G_40G - vm-registry-tag: "clean::cicd::git-ssh::nas::brew" + vm-registry-tag: "clean::cicd::git-ssh::nas::brew::buildkite-agent" modify-cpu: 12 modify-ram: 24 always-pull: true @@ -83,4 +105,93 @@ steps: - "queue=mac-anka-large-node-fleet" timeout: $BUILD_TIMEOUT skip: $SKIP_MOJAVE - \ No newline at end of file + + - wait: + continue_on_failure: true + + # - command: | + # echo '+++ :compression: Extracting Test Metrics Code' + # tar -zxf .cicd/metrics/test-metrics.tar.gz + # echo '+++ :javascript: Running test-metrics.js' + # node --max-old-space-size=32768 test-metrics.js + # label: ":bar_chart: Test Metrics" + # agents: + # queue: "automation-eos-builder-fleet" + # timeout: 10 + # soft_fail: true + + # - wait + + - label: ":centos: [Centos] 7 PB" + command: + - "cd /workdir && buildkite-agent artifact download build.tar.gz . --step ':centos: [CentOS] 7 Build & Test' --agent-access-token $$BUILDKITE_AGENT_ACCESS_TOKEN && tar -xzf build.tar.gz" + - "./.cicd/package-builder.sh" + plugins: + - docker#v3.2.0: + debug: true + image: "eosio/producer:eosio-centos-7-61150e3338126b91bd9384821be93de5e9f8b3fe" + environment: + # - "ENABLE_LR_TESTS=true" + - "BUILDKITE_AGENT_ACCESS_TOKEN" # Needed for buildkite-agent upload/download + mount-buildkite-agent: false # Mounting bk-agent doesn't work, so disable it + propagate-environment: true # Need for buildkite-agent upload/download (JOB_ID, etc) + agents: + queue: "automation-eos-builder-fleet" + timeout: 10 + + - label: ":ubuntu: [Ubuntu] 16.04 PB" + command: + - "cd /workdir && buildkite-agent artifact download build.tar.gz . --step ':ubuntu: [Ubuntu] 16.04 Build & Test' --agent-access-token $$BUILDKITE_AGENT_ACCESS_TOKEN && tar -xzf build.tar.gz" + - "./.cicd/package-builder.sh" + plugins: + - docker#v3.2.0: + debug: $DEBUG + image: "eosio/producer:eosio-ubuntu-16.04-8c7529f2357cadbf6dc8d1be8fca379efd25af64" + environment: + - "BUILDKITE_AGENT_ACCESS_TOKEN" # Needed for buildkite-agent upload/download + mount-buildkite-agent: false # Mounting bk-agent doesn't work, so disable it + propagate-environment: true # Need for buildkite-agent upload/download (JOB_ID, etc) + agents: + queue: "automation-eos-builder-fleet" + timeout: 10 + + - label: ":ubuntu: [Ubuntu] 18.04 PB" + command: + - "cd /workdir && buildkite-agent artifact download build.tar.gz . --step ':ubuntu: [Ubuntu] 18.04 Build & Test' --agent-access-token $$BUILDKITE_AGENT_ACCESS_TOKEN && tar -xzf build.tar.gz" + - "./.cicd/package-builder.sh" + plugins: + - docker#v3.2.0: + debug: $DEBUG + image: "eosio/producer:eosio-ubuntu-18.04-45ddde8129587d53014a8cb7ef2214a01ea346da" + environment: + - "BUILDKITE_AGENT_ACCESS_TOKEN" # Needed for buildkite-agent upload/download + mount-buildkite-agent: false # Mounting bk-agent doesn't work, so disable it + propagate-environment: true # Need for buildkite-agent upload/download (JOB_ID, etc) + agents: + queue: "automation-eos-builder-fleet" + timeout: 10 + + - label: ":darwin: [Mojave] PB" + command: + - "git clone $BUILDKITE_REPO eos && cd eos && git checkout $BUILDKITE_COMMIT" + - "cd eos && buildkite-agent artifact download build.tar.gz . --step ':darwin: [Darwin] 10.14 Build & Test' && tar -xzf build.tar.gz" + - "cd eos && ./.cicd/package-builder.sh" + plugins: + - chef/anka#v0.5.1: + no-volume: true + inherit-environment-vars: true + vm-name: 10.14.4_6C_14G_40G + vm-registry-tag: "clean::cicd::git-ssh::nas::brew::buildkite-agent" + always-pull: true + debug: true + wait-network: true + agents: + - "queue=mac-anka-node-fleet" + timeout: 10 + + - label: ":git: Git Submodule Regression Check" + command: + - "./.cicd/submodule-regression-checker.sh" + agents: + queue: "automation-basic-builder-fleet" + timeout: 5 \ No newline at end of file diff --git a/.cicd/submodule-regression-checker.sh b/.cicd/submodule-regression-checker.sh new file mode 100755 index 00000000000..df15b56f1fb --- /dev/null +++ b/.cicd/submodule-regression-checker.sh @@ -0,0 +1,44 @@ +#!/usr/bin/env bash +set -eo pipefail + +declare -A PR_MAP +declare -A BASE_MAP + +# Support Travis and BK +if ${TRAVIS:-false}; then + BASE_BRANCH=$TRAVIS_BRANCH + CURRENT_BRANCH=${TRAVIS_PULL_REQUEST_BRANCH:-$TRAVIS_BRANCH} # We default to TRAVIS_BRANCH if it's not a PR so it passes on non PR runs +else + BASE_BRANCH=${BUILDKITE_PULL_REQUEST_BASE_BRANCH:-$BUILDKITE_BRANCH} + CURRENT_BRANCH=$BUILDKITE_BRANCH +fi + +echo "getting submodule info for $CURRENT_BRANCH" +while read -r a b; do + PR_MAP[$a]=$b +done < <(git submodule --quiet foreach --recursive 'echo $path `git log -1 --format=%ct`') + +echo "getting submodule info for $BASE_BRANCH" +git checkout $BASE_BRANCH &> /dev/null +git submodule update --init &> /dev/null +while read -r a b; do + BASE_MAP[$a]=$b +done < <(git submodule --quiet foreach --recursive 'echo $path `git log -1 --format=%ct`') + +for k in "${!BASE_MAP[@]}"; do + base_ts=${BASE_MAP[$k]} + pr_ts=${PR_MAP[$k]} + echo "submodule $k" + echo " timestamp on $CURRENT_BRANCH: $pr_ts" + echo " timestamp on $BASE_BRANCH: $base_ts" + if (( $pr_ts < $base_ts)); then + echo "$k is older on $CURRENT_BRANCH than $BASE_BRANCH; investigating..." + + if for c in `git log $CURRENT_BRANCH ^$BASE_BRANCH --pretty=format:"%H"`; do git show --pretty="" --name-only $c; done | grep -q "^$k$"; then + echo "ERROR: $k has regressed" + exit 1 + else + echo "$k was not in the diff; no regression detected" + fi + fi +done diff --git a/.cicd/travis-build.sh b/.cicd/travis-build.sh deleted file mode 100755 index d12ba8ebb05..00000000000 --- a/.cicd/travis-build.sh +++ /dev/null @@ -1,23 +0,0 @@ -#!/usr/bin/env bash -set -eo pipefail -cd $( dirname "${BASH_SOURCE[0]}" )/.. # Ensure we're in the repo root and not inside of scripts -. ./.cicd/.helpers - -CPU_CORES=$(getconf _NPROCESSORS_ONLN) -if [[ "$(uname)" == Darwin ]]; then - echo 'Detected Darwin, building natively.' - [[ -d eos ]] && cd eos - [[ ! -d build ]] && mkdir build - cd build - echo \$PATH - ccache -s - echo '$ cmake ..' - cmake .. - echo "$ make -j $CPU_CORES" - make -j $CPU_CORES - echo 'Running unit tests.' - echo "$ ctest -j $CPU_CORES -LE _tests --output-on-failure -T Test" - ctest -j $CPU_CORES -LE _tests --output-on-failure -T Test # run unit tests -else # linux - execute docker run --rm -v $(pwd):/workdir -v /usr/lib/ccache -v $HOME/.ccache:/opt/.ccache -e CCACHE_DIR=/opt/.ccache $FULL_TAG -fi \ No newline at end of file diff --git a/.cicd/travis/travis-build.sh b/.cicd/travis/travis-build.sh new file mode 100755 index 00000000000..4f8bc35fbab --- /dev/null +++ b/.cicd/travis/travis-build.sh @@ -0,0 +1,22 @@ +#!/usr/bin/env bash +set -eo pipefail +ROOT_DIR=$(pwd) +cd $( dirname "${BASH_SOURCE[0]}" )/.. # Ensure we're in the .cicd dir +. ./.helpers +cd $ROOT_DIR +pwd + +CPU_CORES=$(getconf _NPROCESSORS_ONLN) +if [[ "$(uname)" == Darwin ]]; then + . ./.cicd/docker/.helpers-v33 + fold-execute ccache -s + mkdir -p build && cd build && fold-execute cmake .. + fold-execute make -j$(getconf _NPROCESSORS_ONLN) + if ${ENABLE_PARALLEL_TESTS:-true}; then fold-execute ctest -j$(getconf _NPROCESSORS_ONLN) -LE _tests --output-on-failure -T Test; fi + if ${ENABLE_SERIAL_TESTS:-true}; then mkdir -p ./mongodb && fold-execute mongod --dbpath ./mongodb --fork --logpath mongod.log && fold-execute ctest -L nonparallelizable_tests --output-on-failure -T Test; fi + if ${ENABLE_LR_TESTS:-false}; then fold-execute ctest -L long_running_tests --output-on-failure -T Test; fi + if ${ENABLE_SUBMODULE_REGRESSION_TEST:-true}; then cd $ROOT_DIR && fold-execute ./.cicd/submodule-regression-checker.sh; fi +else # linux + DOCKER_RUN_EXTRAS="-e ENABLE_PACKAGE_BUILDER=false" # Travis doesn't need to test or push packages + execute eval docker run --rm -v $(pwd):/workdir -v /usr/lib/ccache -v $HOME/.ccache:/opt/.ccache -e CCACHE_DIR=/opt/.ccache -e TRAVIS $DOCKER_RUN_EXTRAS $FULL_TAG +fi \ No newline at end of file diff --git a/.cicd/travis-trigger.sh b/.cicd/travis/travis-trigger.sh similarity index 100% rename from .cicd/travis-trigger.sh rename to .cicd/travis/travis-trigger.sh diff --git a/.gitignore b/.gitignore index 46a19161c91..6dd6c1ca492 100644 --- a/.gitignore +++ b/.gitignore @@ -9,6 +9,7 @@ *.dot *.abi.hpp *.cmake +!.cicd !CMakeModules/*.cmake *.ninja \#* diff --git a/.travis.yml b/.travis.yml index 168d4071571..9bdd9ce61d3 100644 --- a/.travis.yml +++ b/.travis.yml @@ -45,10 +45,11 @@ matrix: - openssl - boost@1.70 - ccache + - mongodb env: - PATH="/usr/local/opt/ccache/libexec:$PATH" script: | - ./.cicd/travis-build.sh + ./.cicd/travis/travis-build.sh notifications: webhooks: secure: gmqODqoFAil2cR7v++ibqRNECBOSD/VJX+2qPa7XptkVWmVMzbII5CNgBQAscjFsp9arHPMXCCzkBi847PCSiHdsnYFQ4T273FLRWr3cDbLjfmR+BJ7dGKvQnlpSi2Ze2TtAPJyRl+iv+cxDj7cWE5zw2c4xbgh1a/cNO+/ayUfFkyMEIfVWRsHkdkra4gOLywou0XRLHr4CX1V60uU7uuqATnIMMi7gQYwiKKtZqjkbf8wcBvZirDhjQ6lDPN5tnZo6L4QHmqjtzNJg/UrD4h+zES53dLVI4uxlXRAwwpw+mJOFA3QE/3FT+bMQjLCffUz4gZaWcdgebPYzrwSWUbJoFdWAOwcTqivQY0FIQzcz/r6uGWcwWTavzkPEbg68BVM2BZId/0110J6feeTkpJ3MPV+UsIoGTvbg50vi/I06icftuZ/cLqDj3+Emifm7Jlr1sRTSdqtYAJj/2ImUfsb46cwgjAVhFOTvc+KuPgJQgvOXV7bZkxEr5qDWo8Al2sV8BWb83j1rMlZ4LfERokImDVqxu2kkcunchzvhtYFTesSpmwegVpwceCtOtO0rEUgATnfTEHzk2rm8nuz4UtidsQnluUKqmKD0QCqHXFfn+3ZRJsDqr+iCYdxv1BAeAVc9q1L7bgrKDMGiJgkxuhZ2v3J2SflWLvjZjFDduuc=