From 94c454134ff4693fa7ec8820f31015e5eae8fb68 Mon Sep 17 00:00:00 2001 From: Zach Butler Date: Thu, 18 Apr 2019 17:36:38 -0400 Subject: [PATCH 01/62] Removed centralized pipelines --- .buildkite/coverage.yml | 29 ---- .buildkite/debug.yml | 230 ------------------------------ .buildkite/docker.yml | 74 ---------- .buildkite/long_running_tests.yml | 212 --------------------------- .buildkite/sanitizers.yml | 131 ----------------- 5 files changed, 676 deletions(-) delete mode 100644 .buildkite/coverage.yml delete mode 100644 .buildkite/debug.yml delete mode 100644 .buildkite/docker.yml delete mode 100644 .buildkite/long_running_tests.yml delete mode 100644 .buildkite/sanitizers.yml diff --git a/.buildkite/coverage.yml b/.buildkite/coverage.yml deleted file mode 100644 index c5a50bc64f4..00000000000 --- a/.buildkite/coverage.yml +++ /dev/null @@ -1,29 +0,0 @@ -steps: - - command: | - echo "--- :hammer: Building" && \ - /usr/bin/cmake -GNinja -DCMAKE_BUILD_TYPE=Debug -DCMAKE_CXX_COMPILER=clang++-4.0 -DCMAKE_C_COMPILER=clang-4.0 -DBOOST_ROOT="${BOOST_ROOT}" -DWASM_ROOT="${WASM_ROOT}" -DOPENSSL_ROOT_DIR="${OPENSSL_ROOT_DIR}" -DBUILD_MONGO_DB_PLUGIN=true -DENABLE_COVERAGE_TESTING=true -DBUILD_DOXYGEN=false && \ - /usr/bin/ninja - echo "--- :spiral_note_pad: Generating Code Coverage Report" && \ - /usr/bin/ninja EOSIO_ut_coverage && \ - echo "--- :arrow_up: Publishing Code Coverage Report" && \ - buildkite-agent artifact upload "EOSIO_ut_coverage/**/*" s3://eos-coverage/$BUILDKITE_JOB_ID && \ - cp /config/.coveralls.yml . && \ - /usr/local/bin/coveralls-lcov EOSIO_ut_coverage_filtered.info && \ - echo "+++ View Report" && \ - printf "\033]1339;url=https://eos-coverage.s3-us-west-2.amazonaws.com/$BUILDKITE_JOB_ID/EOSIO_ut_coverage/index.html;content=View Full Coverage Report\a\n" - label: ":spiral_note_pad: Generate Report" - agents: - queue: "automation-large-builder-fleet" - plugins: - docker#v1.4.0: - image: "eosio/ci:ubuntu18" - workdir: /data/job - mounts: - - /etc/buildkite-agent/config:/config - environment: - - BOOST_ROOT=/root/opt/boost - - OPENSSL_ROOT_DIR=/usr/include/openssl - - WASM_ROOT=/root/opt/wasm - - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/root/opt/wasm/bin - - CI=true - timeout: 60 diff --git a/.buildkite/debug.yml b/.buildkite/debug.yml deleted file mode 100644 index 28576d56195..00000000000 --- a/.buildkite/debug.yml +++ /dev/null @@ -1,230 +0,0 @@ -steps: - - command: | - echo "--- Creating symbolic link to job directory :file_folder:" && \ - sleep 5 && ln -s "$(pwd)" /data/job && cd /data/job && \ - echo "+++ Building :hammer:" && \ - echo 1 | ./eosio_build.sh -o Debug && \ - echo "--- Compressing build directory :compression:" && \ - tar -pczf build.tar.gz build/ - label: ":darwin: Build" - agents: - - "role=macos-builder" - artifact_paths: "build.tar.gz" - timeout: 60 - - - command: | - echo "+++ :hammer: Building" && \ - echo 1 | ./eosio_build.sh -o Debug && \ - echo "--- :compression: Compressing build directory" && \ - tar -pczf build.tar.gz build/ - label: ":ubuntu: Build" - agents: - queue: "automation-large-builder-fleet" - artifact_paths: "build.tar.gz" - plugins: - docker#v2.0.0: - image: "eosio/ci:ubuntu" - workdir: /data/job - timeout: 60 - - - command: | - echo "+++ :hammer: Building" && \ - echo 1 | ./eosio_build.sh -o Debug && \ - echo "--- :compression: Compressing build directory" && \ - tar -pczf build.tar.gz build/ - label: ":ubuntu: 18.04 Build" - agents: - queue: "automation-large-builder-fleet" - artifact_paths: "build.tar.gz" - plugins: - docker#v2.0.0: - image: "eosio/ci:ubuntu18" - workdir: /data/job - timeout: 60 - - - command: | - echo "+++ :hammer: Building" && \ - echo 1 | ./eosio_build.sh -o Debug && \ - echo "--- :compression: Compressing build directory" && \ - tar -pczf build.tar.gz build/ - label: ":fedora: Build" - agents: - queue: "automation-large-builder-fleet" - artifact_paths: "build.tar.gz" - plugins: - docker#v2.0.0: - image: "eosio/ci:fedora" - workdir: /data/job - timeout: 60 - - - command: | - echo "+++ :hammer: Building" && \ - echo 1 | ./eosio_build.sh -o Debug && \ - echo "--- :compression: Compressing build directory" && \ - tar -pczf build.tar.gz build/ - label: ":centos: Build" - agents: - queue: "automation-large-builder-fleet" - artifact_paths: "build.tar.gz" - plugins: - docker#v2.0.0: - image: "eosio/ci:centos" - workdir: /data/job - timeout: 60 - - - command: | - echo "+++ :hammer: Building" && \ - echo 1 | ./eosio_build.sh -o Debug && \ - echo "--- :compression: Compressing build directory" && \ - tar -pczf build.tar.gz build/ - label: ":aws: Build" - agents: - queue: "automation-large-builder-fleet" - artifact_paths: "build.tar.gz" - plugins: - docker#v2.0.0: - image: "eosio/ci:amazonlinux" - workdir: /data/job - timeout: 60 - - - wait - - - command: | - echo "--- :arrow_down: Downloading build directory" && \ - buildkite-agent artifact download "build.tar.gz" . --step ":darwin: Build" && \ - tar -zxf build.tar.gz && \ - echo "--- :m: Starting MongoDB" && \ - $(which mongod) --fork --logpath "$(pwd)"/mongod.log && \ - echo "+++ :microscope: Running tests" && \ - ln -s "$(pwd)" /data/job && cd /data/job/build && ctest -LE long_running_tests --output-on-failure - retry: - automatic: - limit: 1 - label: ":darwin: Tests" - agents: - - "role=macos-tester" - artifact_paths: - - "mongod.log" - - "build/genesis.json" - - "build/config.ini" - timeout: 60 - - - command: | - echo "--- :arrow_down: Downloading build directory" && \ - buildkite-agent artifact download "build.tar.gz" . --step ":ubuntu: Build" && \ - tar -zxf build.tar.gz && \ - echo "--- :m: Starting MongoDB" && \ - $(which mongod) --fork --logpath "$(pwd)"/mongod.log && \ - echo "+++ :microscope: Running tests" && \ - cd /data/job/build && ctest -LE long_running_tests --output-on-failure - retry: - automatic: - limit: 1 - label: ":ubuntu: Tests" - agents: - queue: "automation-large-builder-fleet" - artifact_paths: - - "mongod.log" - - "build/genesis.json" - - "build/config.ini" - plugins: - docker#v2.0.0: - image: "eosio/ci:ubuntu" - workdir: /data/job - timeout: 60 - - - command: | - echo "--- :arrow_down: Downloading build directory" && \ - buildkite-agent artifact download "build.tar.gz" . --step ":ubuntu: 18.04 Build" && \ - tar -zxf build.tar.gz && \ - echo "--- :m: Starting MongoDB" && \ - $(which mongod) --fork --logpath "$(pwd)"/mongod.log && \ - echo "+++ :microscope: Running tests" && \ - cd /data/job/build && ctest -LE long_running_tests --output-on-failure - retry: - automatic: - limit: 1 - label: ":ubuntu: 18.04 Tests" - agents: - queue: "automation-large-builder-fleet" - artifact_paths: - - "mongod.log" - - "build/genesis.json" - - "build/config.ini" - plugins: - docker#v2.0.0: - image: "eosio/ci:ubuntu18" - workdir: /data/job - timeout: 60 - - - command: | - echo "--- :arrow_down: Downloading build directory" && \ - buildkite-agent artifact download "build.tar.gz" . --step ":fedora: Build" && \ - tar -zxf build.tar.gz && \ - echo "--- :m: Starting MongoDB" && \ - $(which mongod) --fork --logpath "$(pwd)"/mongod.log && \ - echo "+++ :microscope: Running tests" && \ - cd /data/job/build && ctest -LE long_running_tests --output-on-failure - retry: - automatic: - limit: 1 - label: ":fedora: Tests" - agents: - queue: "automation-large-builder-fleet" - artifact_paths: - - "mongod.log" - - "build/genesis.json" - - "build/config.ini" - plugins: - docker#v2.0.0: - image: "eosio/ci:fedora" - workdir: /data/job - timeout: 60 - - - command: | - echo "--- :arrow_down: Downloading build directory" && \ - buildkite-agent artifact download "build.tar.gz" . --step ":centos: Build" && \ - tar -zxf build.tar.gz && \ - echo "--- :m: Starting MongoDB" && \ - $(which mongod) --fork --logpath "$(pwd)"/mongod.log && \ - echo "+++ :microscope: Running tests" && \ - cd /data/job/build && ctest -LE long_running_tests --output-on-failure - retry: - automatic: - limit: 1 - label: ":centos: Tests" - agents: - queue: "automation-large-builder-fleet" - artifact_paths: - - "mongod.log" - - "build/genesis.json" - - "build/config.ini" - plugins: - docker#v2.0.0: - image: "eosio/ci:centos" - workdir: /data/job - timeout: 60 - - - command: | - echo "--- :arrow_down: Downloading build directory" && \ - buildkite-agent artifact download "build.tar.gz" . --step ":aws: Build" && \ - tar -zxf build.tar.gz && \ - echo "--- :m: Starting MongoDB" && \ - $(which mongod) --fork --logpath "$(pwd)"/mongod.log && \ - echo "+++ :microscope: Running tests" && \ - cd /data/job/build && ctest -LE long_running_tests --output-on-failure - retry: - automatic: - limit: 1 - label: ":aws: Tests" - agents: - queue: "automation-large-builder-fleet" - artifact_paths: - - "mongod.log" - - "build/genesis.json" - - "build/config.ini" - plugins: - docker#v2.0.0: - image: "eosio/ci:amazonlinux" - workdir: /data/job - timeout: 60 diff --git a/.buildkite/docker.yml b/.buildkite/docker.yml deleted file mode 100644 index f8f6d8e0a12..00000000000 --- a/.buildkite/docker.yml +++ /dev/null @@ -1,74 +0,0 @@ -steps: - - command: | - echo "AUTHENTICATING GOOGLE SERVICE ACCOUNT" && \ - gcloud --quiet auth activate-service-account b1-automation-svc@b1-automation-dev.iam.gserviceaccount.com --key-file=/etc/gcp-service-account.json && \ - docker-credential-gcr configure-docker && \ - echo "BUILDING BUILD IMAGE" && \ - cd Docker/builder && \ - docker build -t eosio/builder:latest -t eosio/builder:$BUILDKITE_COMMIT . --build-arg branch=$BUILDKITE_COMMIT && \ - docker tag eosio/builder:$BUILDKITE_COMMIT gcr.io/b1-automation-dev/eosio/builder:$BUILDKITE_COMMIT && \ - docker tag eosio/builder:latest gcr.io/b1-automation-dev/eosio/builder:latest && \ - echo "PUSHING DOCKER IMAGES" && \ - docker push gcr.io/b1-automation-dev/eosio/builder:$BUILDKITE_COMMIT && \ - docker push gcr.io/b1-automation-dev/eosio/builder:latest && \ - echo "TRASHING OLD IMAGES" && \ - docker rmi eosio/builder:$BUILDKITE_COMMIT && \ - docker rmi eosio/builder:latest && \ - docker rmi gcr.io/b1-automation-dev/eosio/builder:$BUILDKITE_COMMIT && \ - docker rmi gcr.io/b1-automation-dev/eosio/builder:latest - label: "Docker build builder" - agents: - queue: "automation-docker-builder-fleet" - timeout: 300 - - - wait - - - command: | - echo "AUTHENTICATING GOOGLE SERVICE ACCOUNT" && \ - gcloud --quiet auth activate-service-account b1-automation-svc@b1-automation-dev.iam.gserviceaccount.com --key-file=/etc/gcp-service-account.json && \ - docker-credential-gcr configure-docker && \ - echo "BUILDING EOS IMAGE" && \ - docker pull gcr.io/b1-automation-dev/eosio/builder:$BUILDKITE_COMMIT && \ - cd Docker && \ - docker build -t eosio/eos:latest -t eosio/eos:$BUILDKITE_COMMIT . --build-arg branch=$BUILDKITE_BRANCH && \ - docker tag eosio/eos:$BUILDKITE_COMMIT gcr.io/b1-automation-dev/eosio/eos:$BUILDKITE_COMMIT && \ - docker tag eosio/eos:latest gcr.io/b1-automation-dev/eosio/eos:latest && \ - echo "PUSHING DOCKER IMAGES" && \ - docker push gcr.io/b1-automation-dev/eosio/eos:$BUILDKITE_COMMIT && \ - docker push gcr.io/b1-automation-dev/eosio/eos:latest && \ - echo "TRASHING OLD IMAGES" && \ - docker rmi eosio/eos:$BUILDKITE_COMMIT && \ - docker rmi eosio/eos:latest && \ - docker rmi gcr.io/b1-automation-dev/eosio/eos:$BUILDKITE_COMMIT && \ - docker rmi gcr.io/b1-automation-dev/eosio/eos:latest && \ - docker rmi gcr.io/b1-automation-dev/eosio/builder:$BUILDKITE_COMMIT - label: "Docker build eos" - agents: - queue: "automation-docker-builder-fleet" - timeout: 300 - - - command: | - echo "AUTHENTICATING GOOGLE SERVICE ACCOUNT" && \ - gcloud --quiet auth activate-service-account b1-automation-svc@b1-automation-dev.iam.gserviceaccount.com --key-file=/etc/gcp-service-account.json && \ - docker-credential-gcr configure-docker && \ - echo "BUILDING EOS DEV IMAGE" && \ - docker pull gcr.io/b1-automation-dev/eosio/builder:$BUILDKITE_COMMIT && \ - cd Docker/dev && \ - docker build -t eosio/eos-dev:latest -t eosio/eos-dev:$BUILDKITE_COMMIT . --build-arg branch=$BUILDKITE_BRANCH && \ - docker tag eosio/eos-dev:$BUILDKITE_COMMIT gcr.io/b1-automation-dev/eosio/eos-dev:$BUILDKITE_COMMIT && \ - docker tag eosio/eos-dev:latest gcr.io/b1-automation-dev/eosio/eos-dev:latest && \ - echo "PUSHING DOCKER IMAGES" && \ - docker push gcr.io/b1-automation-dev/eosio/eos-dev:$BUILDKITE_COMMIT && \ - docker push gcr.io/b1-automation-dev/eosio/eos-dev:latest && \ - echo "TRASHING OLD IMAGES" && \ - docker rmi eosio/eos-dev:$BUILDKITE_COMMIT && \ - docker rmi eosio/eos-dev:latest && \ - docker rmi gcr.io/b1-automation-dev/eosio/eos-dev:$BUILDKITE_COMMIT && \ - docker rmi gcr.io/b1-automation-dev/eosio/eos-dev:latest && \ - docker rmi gcr.io/b1-automation-dev/eosio/builder:$BUILDKITE_COMMIT - label: "Docker build eos-dev" - agents: - queue: "automation-docker-builder-fleet" - timeout: 300 - - - wait diff --git a/.buildkite/long_running_tests.yml b/.buildkite/long_running_tests.yml deleted file mode 100644 index e22016c4de4..00000000000 --- a/.buildkite/long_running_tests.yml +++ /dev/null @@ -1,212 +0,0 @@ -steps: - - command: | - echo "--- Creating symbolic link to job directory :file_folder:" && \ - sleep 5 && ln -s "$(pwd)" /data/job && cd /data/job && \ - echo "+++ Building :hammer:" && \ - echo 1 | ./eosio_build.sh && \ - echo "--- Compressing build directory :compression:" && \ - tar -pczf build.tar.gz build/ - label: ":darwin: Build" - agents: - - "role=macos-builder" - artifact_paths: "build.tar.gz" - timeout: 60 - - - command: | - echo "+++ :hammer: Building" && \ - echo 1 | ./eosio_build.sh && \ - echo "--- :compression: Compressing build directory" && \ - tar -pczf build.tar.gz build/ - label: ":ubuntu: Build" - agents: - queue: "automation-large-builder-fleet" - artifact_paths: "build.tar.gz" - plugins: - docker#v1.4.0: - image: "eosio/ci:ubuntu" - workdir: /data/job - timeout: 60 - - - command: | - echo "+++ :hammer: Building" && \ - echo 1 | ./eosio_build.sh && \ - echo "--- :compression: Compressing build directory" && \ - tar -pczf build.tar.gz build/ - label: ":ubuntu: 18.04 Build" - agents: - queue: "automation-large-builder-fleet" - artifact_paths: "build.tar.gz" - plugins: - docker#v1.4.0: - image: "eosio/ci:ubuntu18" - workdir: /data/job - timeout: 60 - - - command: | - echo "+++ :hammer: Building" && \ - echo 1 | ./eosio_build.sh && \ - echo "--- :compression: Compressing build directory" && \ - tar -pczf build.tar.gz build/ - label: ":fedora: Build" - agents: - queue: "automation-large-builder-fleet" - artifact_paths: "build.tar.gz" - plugins: - docker#v1.4.0: - image: "eosio/ci:fedora" - workdir: /data/job - timeout: 60 - - - command: | - echo "+++ :hammer: Building" && \ - echo 1 | ./eosio_build.sh && \ - echo "--- :compression: Compressing build directory" && \ - tar -pczf build.tar.gz build/ - label: ":centos: Build" - agents: - queue: "automation-large-builder-fleet" - artifact_paths: "build.tar.gz" - plugins: - docker#v1.4.0: - image: "eosio/ci:centos" - workdir: /data/job - timeout: 60 - - - command: | - echo "+++ :hammer: Building" && \ - echo 1 | ./eosio_build.sh && \ - echo "--- :compression: Compressing build directory" && \ - tar -pczf build.tar.gz build/ - label: ":aws: Build" - agents: - queue: "automation-large-builder-fleet" - artifact_paths: "build.tar.gz" - plugins: - docker#v1.4.0: - image: "eosio/ci:amazonlinux" - workdir: /data/job - timeout: 60 - - - wait - - - command: | - echo "--- :arrow_down: Downloading build directory" && \ - buildkite-agent artifact download "build.tar.gz" . --step ":darwin: Build" && \ - tar -zxf build.tar.gz && \ - echo "--- :m: Starting MongoDB" && \ - $(which mongod) --fork --logpath "$(pwd)"/mongod.log && \ - echo "+++ :microscope: Running tests" && \ - ln -s "$(pwd)" /data/job && cd /data/job/build && ctest -L long_running_tests --output-on-failure - label: ":darwin: Tests" - agents: - - "role=macos-tester" - artifact_paths: - - "mongod.log" - - "build/genesis.json" - - "build/config.ini" - timeout: 100 - - - command: | - echo "--- :arrow_down: Downloading build directory" && \ - buildkite-agent artifact download "build.tar.gz" . --step ":ubuntu: Build" && \ - tar -zxf build.tar.gz && \ - echo "--- :m: Starting MongoDB" && \ - $(which mongod) --fork --logpath "$(pwd)"/mongod.log && \ - echo "+++ :microscope: Running tests" && \ - cd /data/job/build && ctest -L long_running_tests --output-on-failure - label: ":ubuntu: Tests" - agents: - queue: "automation-large-builder-fleet" - artifact_paths: - - "mongod.log" - - "build/genesis.json" - - "build/config.ini" - plugins: - docker#v1.4.0: - image: "eosio/ci:ubuntu" - workdir: /data/job - timeout: 100 - - - command: | - echo "--- :arrow_down: Downloading build directory" && \ - buildkite-agent artifact download "build.tar.gz" . --step ":ubuntu: 18.04 Build" && \ - tar -zxf build.tar.gz && \ - echo "--- :m: Starting MongoDB" && \ - $(which mongod) --fork --logpath "$(pwd)"/mongod.log && \ - echo "+++ :microscope: Running tests" && \ - cd /data/job/build && ctest -L long_running_tests --output-on-failure - label: ":ubuntu: 18.04 Tests" - agents: - queue: "automation-large-builder-fleet" - artifact_paths: - - "mongod.log" - - "build/genesis.json" - - "build/config.ini" - plugins: - docker#v1.4.0: - image: "eosio/ci:ubuntu18" - workdir: /data/job - timeout: 100 - - - command: | - echo "--- :arrow_down: Downloading build directory" && \ - buildkite-agent artifact download "build.tar.gz" . --step ":fedora: Build" && \ - tar -zxf build.tar.gz && \ - echo "--- :m: Starting MongoDB" && \ - $(which mongod) --fork --logpath "$(pwd)"/mongod.log && \ - echo "+++ :microscope: Running tests" && \ - cd /data/job/build && ctest -L long_running_tests --output-on-failure - label: ":fedora: Tests" - agents: - queue: "automation-large-builder-fleet" - artifact_paths: - - "mongod.log" - - "build/genesis.json" - - "build/config.ini" - plugins: - docker#v1.4.0: - image: "eosio/ci:fedora" - workdir: /data/job - timeout: 100 - - - command: | - echo "--- :arrow_down: Downloading build directory" && \ - buildkite-agent artifact download "build.tar.gz" . --step ":centos: Build" && \ - tar -zxf build.tar.gz && \ - echo "--- :m: Starting MongoDB" && \ - $(which mongod) --fork --logpath "$(pwd)"/mongod.log && \ - echo "+++ :microscope: Running tests" && \ - cd /data/job/build && ctest -L long_running_tests --output-on-failure - label: ":centos: Tests" - agents: - queue: "automation-large-builder-fleet" - artifact_paths: - - "mongod.log" - - "build/genesis.json" - - "build/config.ini" - plugins: - docker#v1.4.0: - image: "eosio/ci:centos" - workdir: /data/job - timeout: 100 - - - command: | - echo "--- :arrow_down: Downloading build directory" && \ - buildkite-agent artifact download "build.tar.gz" . --step ":aws: Build" && \ - tar -zxf build.tar.gz && \ - echo "--- :m: Starting MongoDB" && \ - $(which mongod) --fork --logpath "$(pwd)"/mongod.log && \ - echo "+++ :microscope: Running tests" && \ - cd /data/job/build && ctest -L long_running_tests --output-on-failure - label: ":aws: Tests" - agents: - queue: "automation-large-builder-fleet" - artifact_paths: - - "mongod.log" - - "build/genesis.json" - - "build/config.ini" - plugins: - docker#v1.4.0: - image: "eosio/ci:amazonlinux" - workdir: /data/job - timeout: 100 diff --git a/.buildkite/sanitizers.yml b/.buildkite/sanitizers.yml deleted file mode 100644 index b8588135610..00000000000 --- a/.buildkite/sanitizers.yml +++ /dev/null @@ -1,131 +0,0 @@ -steps: - - command: | - echo "--- :hammer: Building with Undefined Sanitizer" && \ - /usr/bin/cmake -GNinja \ - -DCMAKE_BUILD_TYPE=Debug \ - -DCMAKE_CXX_COMPILER=clang++-4.0 \ - -DCMAKE_C_COMPILER=clang-4.0 \ - -DBOOST_ROOT="${BOOST_ROOT}" \ - -DWASM_ROOT="${WASM_ROOT}" \ - -DOPENSSL_ROOT_DIR="${OPENSSL_ROOT_DIR}" \ - -DBUILD_MONGO_DB_PLUGIN=true \ - -DENABLE_COVERAGE_TESTING=true\ - -DBUILD_DOXYGEN=false -DCMAKE_CXX_FLAGS="-fsanitize=undefined -fsanitize-recover=all -g -fno-omit-frame-pointer" \ - -DCMAKE_C_FLAGS="-fsanitize=undefined -fsanitize-recover=all -g -fno-omit-frame-pointer" \ - -DCMAKE_EXE_LINKER_FLAGS="-fsanitize=undefined -fsanitize-recover=all -rtlib=compiler-rt -lgcc_s -pthread" \ - -DCMAKE_MODULE_LINKER_FLAGS="-fsanitize=undefined -fsanitize-recover=all -rtlib=compiler-rt -lgcc_s -pthread" && \ - echo "--- :shinto_shrine: Running ninja" && \ - /usr/bin/ninja | tee ninja.log && \ - echo "--- :compression: Compressing build directory" && \ - tar -pczf build.tar.gz * - echo "--- :beers: Done" - label: ":_: Undefined Sanitizer" - agents: - queue: "automation-large-builder-fleet" - artifact_paths: - - "build.tar.gz" - - "ninja.log" - plugins: - docker#v1.4.0: - image: "eosio/ci:ubuntu18" - command: ["--privileged"] - workdir: /data/job - mounts: - - /etc/buildkite-agent/config:/config - environment: - - BOOST_ROOT=/root/opt/boost - - OPENSSL_ROOT_DIR=/usr/include/openssl - - WASM_ROOT=/root/opt/wasm - - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/root/opt/wasm/bin - - CI=true - - UBSAN_OPTIONS=print_stacktrace=1 - timeout: 60 - - - command: | - echo "--- :hammer: Building with Address Sanitizer" && \ - /usr/bin/cmake -GNinja \ - -DCMAKE_BUILD_TYPE=Debug \ - -DCMAKE_CXX_COMPILER=clang++-4.0 \ - -DCMAKE_C_COMPILER=clang-4.0 \ - -DBOOST_ROOT="${BOOST_ROOT}" \ - -DWASM_ROOT="${WASM_ROOT}" \ - -DOPENSSL_ROOT_DIR="${OPENSSL_ROOT_DIR}" \ - -DBUILD_MONGO_DB_PLUGIN=true \ - -DENABLE_COVERAGE_TESTING=true \ - -DBUILD_DOXYGEN=false \ - -DCMAKE_CXX_FLAGS="-fsanitize=address -fsanitize-recover=all -O1 -g -fno-omit-frame-pointer" \ - -DCMAKE_C_FLAGS="-fsanitize=address -fsanitize-recover=all -O1 -g -fno-omit-frame-pointer" \ - -DCMAKE_EXE_LINKER_FLAGS="-fsanitize=address -fsanitize-recover=all -rtlib=compiler-rt -lgcc_s" \ - -DCMAKE_MODULE_LINKER_FLAGS="-fsanitize=address -fsanitize-recover=all -rtlib=compiler-rt -lgcc_s" - echo "--- :shinto_shrine: Running ninja" && \ - /usr/bin/ninja | tee ninja.log && \ - echo "--- :compression: Compressing build directory" && \ - tar -pczf build.tar.gz * - echo "--- :beers: Done" - label: ":_: Address Sanitizer" - agents: - queue: "automation-large-builder-fleet" - artifact_paths: - - "build.tar.gz" - - "ninja.log" - plugins: - docker#v1.4.0: - image: "eosio/ci:ubuntu18" - command: ["--privileged"] - workdir: /data/job - mounts: - - /etc/buildkite-agent/config:/config - environment: - - BOOST_ROOT=/root/opt/boost - - OPENSSL_ROOT_DIR=/usr/include/openssl - - WASM_ROOT=/root/opt/wasm - - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/root/opt/wasm/bin - - CI=true - - ASAN_OPTIONS=fast_unwind_on_malloc=0:halt_on_error=0:detect_odr_violation=0:detect_leaks=0:symbolize=1:verbosity=1 - timeout: 60 - - - wait - - - command: | - echo "--- :arrow_down: Downloading build directory" && \ - buildkite-agent artifact download "build.tar.gz" . --step ":_: Undefined Sanitizer" && \ - tar -zxf build.tar.gz --no-same-owner && \ - echo "--- :m: Starting MongoDB" && \ - $(which mongod) --fork --logpath "$(pwd)"/mongod.log && \ - echo "+++ :microscope: Running tests" && \ - ctest -j8 -LE _tests -V -O sanitizer.log || true - label: ":_: Undefined Sanitizer Tests" - agents: - queue: "automation-large-builder-fleet" - artifact_paths: - - "mongod.log" - - "sanitizer.log" - plugins: - docker#v1.4.0: - image: "eosio/ci:ubuntu18" - workdir: /data/job - mounts: - - /etc/buildkite-agent/config:/config - timeout: 120 - - - command: | - echo "--- :arrow_down: Downloading build directory" && \ - buildkite-agent artifact download "build.tar.gz" . --step ":_: Address Sanitizer" && \ - tar -zxf build.tar.gz --no-same-owner && \ - echo "--- :m: Starting MongoDB" && \ - $(which mongod) --fork --logpath "$(pwd)"/mongod.log && \ - echo "+++ :microscope: Running tests" && \ - ctest -j8 -LE _tests -V -O sanitizer.log || true - label: ":_: Address Sanitizer Tests" - agents: - queue: "automation-large-builder-fleet" - artifact_paths: - - "mongod.log" - - "sanitizer.log" - plugins: - docker#v1.4.0: - image: "eosio/ci:ubuntu18" - workdir: /data/job - mounts: - - /etc/buildkite-agent/config:/config - timeout: 120 \ No newline at end of file From ad3ae3896769c8c7361773cf4f91e8afc5847c63 Mon Sep 17 00:00:00 2001 From: msmic Date: Fri, 19 Apr 2019 16:15:04 +0800 Subject: [PATCH 02/62] add pipeline_linux_build.yml pipeline_mac_build.yml --- .buildkite/pipeline_linux_build.yml | 383 ++++++++++++++++++++++++++++ .buildkite/pipeline_mac_build.yml | 131 ++++++++++ 2 files changed, 514 insertions(+) create mode 100644 .buildkite/pipeline_linux_build.yml create mode 100644 .buildkite/pipeline_mac_build.yml diff --git a/.buildkite/pipeline_linux_build.yml b/.buildkite/pipeline_linux_build.yml new file mode 100644 index 00000000000..b2eaaf5c4d4 --- /dev/null +++ b/.buildkite/pipeline_linux_build.yml @@ -0,0 +1,383 @@ +steps: + - command: | + echo "+++ :hammer: Building" && \ + echo 1 | ./eosio_build.sh && \ + echo "--- :compression: Compressing build directory" && \ + tar -pczf build.tar.gz build/ + label: ":ubuntu: Build" + agents: + queue: "automation-large-builder-fleet" + artifact_paths: "build.tar.gz" + plugins: + docker#v1.4.0: + image: "eosio/ci:ubuntu" + workdir: /data/job + timeout: 60 + + - command: | + echo "+++ :hammer: Building" && \ + echo 1 | ./eosio_build.sh && \ + echo "--- :compression: Compressing build directory" && \ + tar -pczf build.tar.gz build/ + label: ":ubuntu: 18.04 Build" + agents: + queue: "automation-large-builder-fleet" + artifact_paths: "build.tar.gz" + plugins: + docker#v1.4.0: + image: "eosio/ci:ubuntu18" + workdir: /data/job + timeout: 60 + + - command: | + echo "+++ :hammer: Building" && \ + echo 1 | ./eosio_build.sh && \ + echo "--- :compression: Compressing build directory" && \ + tar -pczf build.tar.gz build/ + label: ":fedora: Build" + agents: + queue: "automation-large-builder-fleet" + artifact_paths: "build.tar.gz" + plugins: + docker#v1.4.0: + image: "eosio/ci:fedora" + workdir: /data/job + timeout: 60 + + - command: | + echo "+++ :hammer: Building" && \ + echo 1 | ./eosio_build.sh && \ + echo "--- :compression: Compressing build directory" && \ + tar -pczf build.tar.gz build/ + label: ":centos: Build" + agents: + queue: "automation-large-builder-fleet" + artifact_paths: "build.tar.gz" + plugins: + docker#v1.4.0: + image: "eosio/ci:centos" + workdir: /data/job + timeout: 60 + + - command: | + echo "+++ :hammer: Building" && \ + echo 1 | ./eosio_build.sh && \ + echo "--- :compression: Compressing build directory" && \ + tar -pczf build.tar.gz build/ + label: ":aws: Build" + agents: + queue: "automation-large-builder-fleet" + artifact_paths: "build.tar.gz" + plugins: + docker#v1.4.0: + image: "eosio/ci:amazonlinux" + workdir: /data/job + timeout: 60 + + - wait + + - command: | + echo "--- :arrow_down: Downloading build directory" && \ + buildkite-agent artifact download "build.tar.gz" . --step ":ubuntu: Build" && \ + tar -zxf build.tar.gz && \ + echo "--- :m: Starting MongoDB" && \ + $(which mongod) --fork --logpath "$(pwd)"/mongod.log && \ + echo "+++ :microscope: Running tests" && \ + cd /data/job/build && ctest -j8 -LE _tests --output-on-failure + label: ":ubuntu: Tests" + agents: + queue: "automation-large-builder-fleet" + artifact_paths: + - "mongod.log" + - "build/genesis.json" + - "build/config.ini" + plugins: + docker#v1.4.0: + image: "eosio/ci:ubuntu" + workdir: /data/job + timeout: 60 + + - command: | + echo "--- :arrow_down: Downloading build directory" && \ + buildkite-agent artifact download "build.tar.gz" . --step ":ubuntu: Build" && \ + tar -zxf build.tar.gz && \ + echo "--- :m: Starting MongoDB" && \ + $(which mongod) --fork --logpath "$(pwd)"/mongod.log && \ + echo "+++ :microscope: Running tests" && \ + cd /data/job/build && ctest -L nonparallelizable_tests --output-on-failure + label: ":ubuntu: NP Tests" + agents: + queue: "automation-large-builder-fleet" + artifact_paths: + - "mongod.log" + - "build/genesis.json" + - "build/config.ini" + plugins: + docker#v1.4.0: + image: "eosio/ci:ubuntu" + workdir: /data/job + timeout: 60 + + - command: | + echo "--- :arrow_down: Downloading build directory" && \ + buildkite-agent artifact download "build.tar.gz" . --step ":ubuntu: 18.04 Build" && \ + tar -zxf build.tar.gz && \ + echo "--- :m: Starting MongoDB" && \ + $(which mongod) --fork --logpath "$(pwd)"/mongod.log && \ + echo "+++ :microscope: Running tests" && \ + cd /data/job/build && ctest -j8 -LE _tests --output-on-failure + label: ":ubuntu: 18.04 Tests" + agents: + queue: "automation-large-builder-fleet" + artifact_paths: + - "mongod.log" + - "build/genesis.json" + - "build/config.ini" + plugins: + docker#v1.4.0: + image: "eosio/ci:ubuntu18" + workdir: /data/job + timeout: 60 + + - command: | + echo "--- :arrow_down: Downloading build directory" && \ + buildkite-agent artifact download "build.tar.gz" . --step ":ubuntu: 18.04 Build" && \ + tar -zxf build.tar.gz && \ + echo "--- :m: Starting MongoDB" && \ + $(which mongod) --fork --logpath "$(pwd)"/mongod.log && \ + echo "+++ :microscope: Running tests" && \ + cd /data/job/build && ctest -L nonparallelizable_tests --output-on-failure + label: ":ubuntu: 18.04 NP Tests" + agents: + queue: "automation-large-builder-fleet" + artifact_paths: + - "mongod.log" + - "build/genesis.json" + - "build/config.ini" + plugins: + docker#v1.4.0: + image: "eosio/ci:ubuntu18" + workdir: /data/job + timeout: 60 + + - command: | + echo "--- :arrow_down: Downloading build directory" && \ + buildkite-agent artifact download "build.tar.gz" . --step ":fedora: Build" && \ + tar -zxf build.tar.gz && \ + echo "--- :m: Starting MongoDB" && \ + $(which mongod) --fork --logpath "$(pwd)"/mongod.log && \ + echo "+++ :microscope: Running tests" && \ + cd /data/job/build && ctest -j8 -LE _tests --output-on-failure + label: ":fedora: Tests" + agents: + queue: "automation-large-builder-fleet" + artifact_paths: + - "mongod.log" + - "build/genesis.json" + - "build/config.ini" + plugins: + docker#v1.4.0: + image: "eosio/ci:fedora" + workdir: /data/job + timeout: 60 + + - command: | + echo "--- :arrow_down: Downloading build directory" && \ + buildkite-agent artifact download "build.tar.gz" . --step ":fedora: Build" && \ + tar -zxf build.tar.gz && \ + echo "--- :m: Starting MongoDB" && \ + $(which mongod) --fork --logpath "$(pwd)"/mongod.log && \ + echo "+++ :microscope: Running tests" && \ + cd /data/job/build && ctest -L nonparallelizable_tests --output-on-failure + label: ":fedora: NP Tests" + agents: + queue: "automation-large-builder-fleet" + artifact_paths: + - "mongod.log" + - "build/genesis.json" + - "build/config.ini" + plugins: + docker#v1.4.0: + image: "eosio/ci:fedora" + workdir: /data/job + timeout: 60 + + - command: | + echo "--- :arrow_down: Downloading build directory" && \ + buildkite-agent artifact download "build.tar.gz" . --step ":centos: Build" && \ + tar -zxf build.tar.gz && \ + echo "--- :m: Starting MongoDB" && \ + $(which mongod) --fork --logpath "$(pwd)"/mongod.log && \ + echo "+++ :microscope: Running tests" && \ + cd /data/job/build && ctest -j8 -LE _tests --output-on-failure + label: ":centos: Tests" + agents: + queue: "automation-large-builder-fleet" + artifact_paths: + - "mongod.log" + - "build/genesis.json" + - "build/config.ini" + plugins: + docker#v1.4.0: + image: "eosio/ci:centos" + workdir: /data/job + timeout: 60 + + - command: | + echo "--- :arrow_down: Downloading build directory" && \ + buildkite-agent artifact download "build.tar.gz" . --step ":centos: Build" && \ + tar -zxf build.tar.gz && \ + echo "--- :m: Starting MongoDB" && \ + $(which mongod) --fork --logpath "$(pwd)"/mongod.log && \ + echo "+++ :microscope: Running tests" && \ + cd /data/job/build && ctest -L nonparallelizable_tests --output-on-failure + label: ":centos: NP Tests" + agents: + queue: "automation-large-builder-fleet" + artifact_paths: + - "mongod.log" + - "build/genesis.json" + - "build/config.ini" + plugins: + docker#v1.4.0: + image: "eosio/ci:centos" + workdir: /data/job + timeout: 60 + + - command: | + echo "--- :arrow_down: Downloading build directory" && \ + buildkite-agent artifact download "build.tar.gz" . --step ":aws: Build" && \ + tar -zxf build.tar.gz && \ + echo "--- :m: Starting MongoDB" && \ + $(which mongod) --fork --logpath "$(pwd)"/mongod.log && \ + echo "+++ :microscope: Running tests" && \ + cd /data/job/build && ctest -j8 -LE _tests --output-on-failure + label: ":aws: Tests" + agents: + queue: "automation-large-builder-fleet" + artifact_paths: + - "mongod.log" + - "build/genesis.json" + - "build/config.ini" + plugins: + docker#v1.4.0: + image: "eosio/ci:amazonlinux" + workdir: /data/job + timeout: 60 + + - command: | + echo "--- :arrow_down: Downloading build directory" && \ + buildkite-agent artifact download "build.tar.gz" . --step ":aws: Build" && \ + tar -zxf build.tar.gz && \ + echo "--- :m: Starting MongoDB" && \ + $(which mongod) --fork --logpath "$(pwd)"/mongod.log && \ + echo "+++ :microscope: Running tests" && \ + cd /data/job/build && ctest -L nonparallelizable_tests --output-on-failure + label: ":aws: NP Tests" + agents: + queue: "automation-large-builder-fleet" + artifact_paths: + - "mongod.log" + - "build/genesis.json" + - "build/config.ini" + plugins: + docker#v1.4.0: + image: "eosio/ci:amazonlinux" + workdir: /data/job + timeout: 60 + + - wait + + - command: | + echo "--- :arrow_down: Downloading build directory" && \ + buildkite-agent artifact download "build.tar.gz" . --step ":ubuntu: Build" && \ + tar -zxf build.tar.gz && \ + echo "+++ :microscope: Starting package build" && \ + cd /data/job/build/packages && bash generate_package.sh deb + label: ":ubuntu: Package builder" + agents: + queue: "automation-large-builder-fleet" + artifact_paths: + - "build/packages/*.deb" + plugins: + docker#v1.4.0: + image: "eosio/ci:ubuntu" + workdir: /data/job + env: + OS: "ubuntu-16.04" + PKGTYPE: "deb" + timeout: 60 + + - command: | + echo "--- :arrow_down: Downloading build directory" && \ + buildkite-agent artifact download "build.tar.gz" . --step ":ubuntu: 18.04 Build" && \ + tar -zxf build.tar.gz && \ + echo "+++ :microscope: Starting package build" && \ + cd /data/job/build/packages && bash generate_package.sh deb + label: ":ubuntu: 18.04 Package builder" + agents: + queue: "automation-large-builder-fleet" + artifact_paths: + - "build/packages/*.deb" + plugins: + docker#v1.4.0: + image: "eosio/ci:ubuntu18" + workdir: /data/job + env: + OS: "ubuntu-18.04" + PKGTYPE: "deb" + timeout: 60 + + - command: | + echo "--- :arrow_down: Downloading build directory" && \ + buildkite-agent artifact download "build.tar.gz" . --step ":fedora: Build" && \ + tar -zxf build.tar.gz && \ + echo "+++ :microscope: Starting package build" && \ + yum install -y rpm-build && \ + mkdir -p /root/rpmbuild/BUILD && \ + mkdir -p /root/rpmbuild/BUILDROOT && \ + mkdir -p /root/rpmbuild/RPMS && \ + mkdir -p /root/rpmbuild/SOURCES && \ + mkdir -p /root/rpmbuild/SPECS && \ + mkdir -p /root/rpmbuild/SRPMS && \ + cd /data/job/build/packages && bash generate_package.sh rpm + label: ":fedora: Package builder" + agents: + queue: "automation-large-builder-fleet" + artifact_paths: + - "build/packages/x86_64/*.rpm" + plugins: + docker#v1.4.0: + image: "eosio/ci:fedora" + workdir: /data/job + env: + OS: "fc27" + PKGTYPE: "rpm" + timeout: 60 + + - command: | + echo "--- :arrow_down: Downloading build directory" && \ + buildkite-agent artifact download "build.tar.gz" . --step ":centos: Build" && \ + tar -zxf build.tar.gz && \ + echo "+++ :microscope: Starting package build" && \ + yum install -y rpm-build && \ + mkdir -p /root/rpmbuild/BUILD && \ + mkdir -p /root/rpmbuild/BUILDROOT && \ + mkdir -p /root/rpmbuild/RPMS && \ + mkdir -p /root/rpmbuild/SOURCES && \ + mkdir -p /root/rpmbuild/SPECS && \ + mkdir -p /root/rpmbuild/SRPMS && \ + cd /data/job/build/packages && bash generate_package.sh rpm + label: ":centos: Package builder" + agents: + queue: "automation-large-builder-fleet" + artifact_paths: + - "build/packages/x86_64/*.rpm" + plugins: + docker#v1.4.0: + image: "eosio/ci:centos" + workdir: /data/job + env: + OS: "el7" + PKGTYPE: "rpm" + timeout: 60 \ No newline at end of file diff --git a/.buildkite/pipeline_mac_build.yml b/.buildkite/pipeline_mac_build.yml new file mode 100644 index 00000000000..73d0ab98ffd --- /dev/null +++ b/.buildkite/pipeline_mac_build.yml @@ -0,0 +1,131 @@ +steps: + - command: | + echo "--- Creating symbolic link to job directory :file_folder:" && \ + sleep 5 && ln -s "$(pwd)" /data/job && cd /data/job && \ + echo "+++ Building :hammer:" && \ + echo 1 | ./eosio_build.sh && \ + echo "--- Compressing build directory :compression:" && \ + tar -pczf build.tar.gz build/ + label: ":darwin: High Sierra Build" + agents: + - "role=macos-builder" + artifact_paths: "build.tar.gz" + timeout: 60 + + - command: | + echo "--- Creating symbolic link to job directory :file_folder:" && \ + sleep 5 && ln -s "$(pwd)" /data/job && cd /data/job && \ + echo "+++ Building :hammer:" && \ + echo 1 | ./eosio_build.sh && \ + echo "--- Compressing build directory :compression:" && \ + tar -pczf build.tar.gz build/ + label: ":darwin: Mojave Build" + agents: + - "role=builder" + - "os=mojave" + artifact_paths: "build.tar.gz" + timeout: 60 + + - wait + + - command: | + echo "--- :arrow_down: Downloading build directory" && \ + buildkite-agent artifact download "build.tar.gz" . --step ":darwin: High Sierra Build" && \ + tar -zxf build.tar.gz && \ + echo "--- :m: Starting MongoDB" && \ + $(which mongod) --fork --logpath "$(pwd)"/mongod.log && \ + echo "+++ :microscope: Running tests" && \ + ln -s "$(pwd)" /data/job && cd /data/job/build && ctest -j8 -LE _tests --output-on-failure + label: ":darwin: High Sierra Tests" + agents: + - "role=macos-builder" + artifact_paths: + - "mongod.log" + - "build/genesis.json" + - "build/config.ini" + timeout: 60 + + - command: | + echo "--- :arrow_down: Downloading build directory" && \ + buildkite-agent artifact download "build.tar.gz" . --step ":darwin: High Sierra Build" && \ + tar -zxf build.tar.gz && \ + echo "--- :m: Starting MongoDB" && \ + $(which mongod) --fork --logpath "$(pwd)"/mongod.log && \ + echo "+++ :microscope: Running tests" && \ + ln -s "$(pwd)" /data/job && cd /data/job/build && ctest -L nonparallelizable_tests --output-on-failure + label: ":darwin: High Sierra NP Tests" + agents: + - "role=macos-builder" + artifact_paths: + - "mongod.log" + - "build/genesis.json" + - "build/config.ini" + timeout: 60 + + - command: | + echo "--- :arrow_down: Downloading build directory" && \ + buildkite-agent artifact download "build.tar.gz" . --step ":darwin: Mojave Build" && \ + tar -zxf build.tar.gz && \ + echo "--- :m: Starting MongoDB" && \ + $(which mongod) --fork --logpath "$(pwd)"/mongod.log && \ + echo "+++ :microscope: Running tests" && \ + ln -s "$(pwd)" /data/job && cd /data/job/build && ctest -j8 -LE _tests --output-on-failure + label: ":darwin: Mojave Tests" + agents: + - "role=builder" + - "os=mojave" + artifact_paths: + - "mongod.log" + - "build/genesis.json" + - "build/config.ini" + timeout: 60 + + - command: | + echo "--- :arrow_down: Downloading build directory" && \ + buildkite-agent artifact download "build.tar.gz" . --step ":darwin: Mojave Build" && \ + tar -zxf build.tar.gz && \ + echo "--- :m: Starting MongoDB" && \ + $(which mongod) --fork --logpath "$(pwd)"/mongod.log && \ + echo "+++ :microscope: Running tests" && \ + ln -s "$(pwd)" /data/job && cd /data/job/build && ctest -L nonparallelizable_tests --output-on-failure + label: ":darwin: Mojave NP Tests" + agents: + - "role=builder" + - "os=mojave" + artifact_paths: + - "mongod.log" + - "build/genesis.json" + - "build/config.ini" + timeout: 60 + + - wait + + - command: | + echo "--- :arrow_down: Downloading build directory" && \ + buildkite-agent artifact download "build.tar.gz" . --step ":darwin: High Sierra Build" && \ + tar -zxf build.tar.gz && \ + echo "+++ :microscope: Starting package build" && \ + ln -s "$(pwd)" /data/job && cd /data/job/build/packages && bash generate_package.sh brew + label: ":darwin: High Sierra Package Builder" + agents: + - "role=macos-builder" + - "os=high-sierra" + artifact_paths: + - "build/packages/*.tar.gz" + - "build/packages/*.rb" + timeout: 60 + + - command: | + echo "--- :arrow_down: Downloading build directory" && \ + buildkite-agent artifact download "build.tar.gz" . --step ":darwin: Mojave Build" && \ + tar -zxf build.tar.gz && \ + echo "+++ :microscope: Starting package build" && \ + ln -s "$(pwd)" /data/job && cd /data/job/build/packages && bash generate_package.sh brew + label: ":darwin: Mojave Package Builder" + agents: + - "role=builder" + - "os=mojave" + artifact_paths: + - "build/packages/*.tar.gz" + - "build/packages/*.rb" + timeout: 60 From c41244250cb331de5e3c1e4bf7b93d4bbabec84d Mon Sep 17 00:00:00 2001 From: Kevin Heifner Date: Fri, 19 Apr 2019 12:20:30 -0500 Subject: [PATCH 03/62] Reduce logging of complete object when unable to serialize --- libraries/chain/include/eosio/chain/abi_serializer.hpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libraries/chain/include/eosio/chain/abi_serializer.hpp b/libraries/chain/include/eosio/chain/abi_serializer.hpp index 8f8fca4cdeb..096e1d4c1c8 100644 --- a/libraries/chain/include/eosio/chain/abi_serializer.hpp +++ b/libraries/chain/include/eosio/chain/abi_serializer.hpp @@ -680,7 +680,7 @@ void abi_serializer::to_variant( const T& o, variant& vo, Resolver resolver, con impl::abi_traverse_context ctx(max_serialization_time); impl::abi_to_variant::add(mvo, "_", o, resolver, ctx); vo = std::move(mvo["_"]); -} FC_RETHROW_EXCEPTIONS(error, "Failed to serialize type", ("object",o)) +} FC_RETHROW_EXCEPTIONS(error, "Failed to serialize type", ("type", typeid(o).name() )) template void abi_serializer::from_variant( const variant& v, T& o, Resolver resolver, const fc::microseconds& max_serialization_time ) try { From af8ed7104bdd2f42a1b7240d300e6bc5fc2f2c23 Mon Sep 17 00:00:00 2001 From: Kevin Heifner Date: Fri, 19 Apr 2019 13:15:32 -0500 Subject: [PATCH 04/62] Add demangle of type --- libraries/chain/include/eosio/chain/abi_serializer.hpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libraries/chain/include/eosio/chain/abi_serializer.hpp b/libraries/chain/include/eosio/chain/abi_serializer.hpp index 096e1d4c1c8..06b9d2bb761 100644 --- a/libraries/chain/include/eosio/chain/abi_serializer.hpp +++ b/libraries/chain/include/eosio/chain/abi_serializer.hpp @@ -680,7 +680,7 @@ void abi_serializer::to_variant( const T& o, variant& vo, Resolver resolver, con impl::abi_traverse_context ctx(max_serialization_time); impl::abi_to_variant::add(mvo, "_", o, resolver, ctx); vo = std::move(mvo["_"]); -} FC_RETHROW_EXCEPTIONS(error, "Failed to serialize type", ("type", typeid(o).name() )) +} FC_RETHROW_EXCEPTIONS(error, "Failed to serialize: ${type}", ("type", boost::core::demangle( typeid(o).name() ) )) template void abi_serializer::from_variant( const variant& v, T& o, Resolver resolver, const fc::microseconds& max_serialization_time ) try { From d45f20ce4fac9c844acc0856f167b94d3d2e110d Mon Sep 17 00:00:00 2001 From: Kevin Heifner Date: Tue, 23 Apr 2019 12:51:38 -0500 Subject: [PATCH 05/62] Keep block log open to minimize open/close of file --- libraries/chain/block_log.cpp | 115 ++++++++++++++-------------------- 1 file changed, 46 insertions(+), 69 deletions(-) diff --git a/libraries/chain/block_log.cpp b/libraries/chain/block_log.cpp index 41e9756483e..42505a69909 100644 --- a/libraries/chain/block_log.cpp +++ b/libraries/chain/block_log.cpp @@ -9,6 +9,7 @@ #define LOG_READ (std::ios::in | std::ios::binary) #define LOG_WRITE (std::ios::out | std::ios::binary | std::ios::app) +#define LOG_RW ( std::ios::in | std::ios::out | std::ios::binary ) namespace eosio { namespace chain { @@ -31,47 +32,42 @@ namespace eosio { namespace chain { std::fstream index_stream; fc::path block_file; fc::path index_file; - bool block_write; - bool index_write; + bool open_files = false; bool genesis_written_to_block_log = false; uint32_t version = 0; uint32_t first_block_num = 0; - inline void check_block_read() { - if (block_write) { - block_stream.close(); - block_stream.open(block_file.generic_string().c_str(), LOG_READ); - block_write = false; + inline void check_open_files() { + if( !open_files ) { + reopen(); } } + void reopen(); - inline void check_block_write() { - if (!block_write) { + void close() { + if( block_stream.is_open() ) block_stream.close(); - block_stream.open(block_file.generic_string().c_str(), LOG_WRITE); - block_write = true; - } - } - - inline void check_index_read() { - try { - if (index_write) { - index_stream.close(); - index_stream.open(index_file.generic_string().c_str(), LOG_READ); - index_write = false; - } - } - FC_LOG_AND_RETHROW() - } - - inline void check_index_write() { - if (!index_write) { + if( index_stream.is_open() ) index_stream.close(); - index_stream.open(index_file.generic_string().c_str(), LOG_WRITE); - index_write = true; - } + open_files = false; } }; + + void block_log_impl::reopen() { + close(); + + // open to create files if they don't exist + //ilog("Opening block log at ${path}", ("path", my->block_file.generic_string())); + block_stream.open(block_file.generic_string().c_str(), LOG_WRITE); + index_stream.open(index_file.generic_string().c_str(), LOG_WRITE); + + close(); + + block_stream.open(block_file.generic_string().c_str(), LOG_RW); + index_stream.open(index_file.generic_string().c_str(), LOG_RW); + + open_files = true; + } } block_log::block_log(const fc::path& data_dir) @@ -88,26 +84,21 @@ namespace eosio { namespace chain { block_log::~block_log() { if (my) { flush(); + my->close(); my.reset(); } } void block_log::open(const fc::path& data_dir) { - if (my->block_stream.is_open()) - my->block_stream.close(); - if (my->index_stream.is_open()) - my->index_stream.close(); + my->close(); if (!fc::is_directory(data_dir)) fc::create_directories(data_dir); + my->block_file = data_dir / "blocks.log"; my->index_file = data_dir / "blocks.index"; - //ilog("Opening block log at ${path}", ("path", my->block_file.generic_string())); - my->block_stream.open(my->block_file.generic_string().c_str(), LOG_WRITE); - my->index_stream.open(my->index_file.generic_string().c_str(), LOG_WRITE); - my->block_write = true; - my->index_write = true; + my->reopen(); /* On startup of the block log, there are several states the log file and the index file can be * in relation to each other. @@ -132,7 +123,6 @@ namespace eosio { namespace chain { if (log_size) { ilog("Log is nonempty"); - my->check_block_read(); my->block_stream.seekg( 0 ); my->version = 0; my->block_stream.read( (char*)&my->version, sizeof(my->version) ); @@ -155,9 +145,6 @@ namespace eosio { namespace chain { my->head_id = my->head->id(); if (index_size) { - my->check_block_read(); - my->check_index_read(); - ilog("Index is nonempty"); uint64_t block_pos; my->block_stream.seekg(-sizeof(uint64_t), std::ios::end); @@ -180,10 +167,9 @@ namespace eosio { namespace chain { } } else if (index_size) { ilog("Index is nonempty, remove and recreate it"); - my->index_stream.close(); + my->close(); fc::remove_all(my->index_file); - my->index_stream.open(my->index_file.generic_string().c_str(), LOG_WRITE); - my->index_write = true; + my->reopen(); } } @@ -191,9 +177,10 @@ namespace eosio { namespace chain { try { EOS_ASSERT( my->genesis_written_to_block_log, block_log_append_fail, "Cannot append to block log until the genesis is first written" ); - my->check_block_write(); - my->check_index_write(); + my->check_open_files(); + my->block_stream.seekp(0, std::ios::end); + my->index_stream.seekp(0, std::ios::end); uint64_t pos = my->block_stream.tellp(); EOS_ASSERT(my->index_stream.tellp() == sizeof(uint64_t) * (b->block_num() - my->first_block_num), block_log_append_fail, @@ -220,22 +207,17 @@ namespace eosio { namespace chain { } void block_log::reset( const genesis_state& gs, const signed_block_ptr& first_block, uint32_t first_block_num ) { - if (my->block_stream.is_open()) - my->block_stream.close(); - if (my->index_stream.is_open()) - my->index_stream.close(); + my->close(); fc::remove_all(my->block_file); fc::remove_all(my->index_file); - my->block_stream.open(my->block_file.generic_string().c_str(), LOG_WRITE); - my->index_stream.open(my->index_file.generic_string().c_str(), LOG_WRITE); - my->block_write = true; - my->index_write = true; + my->reopen(); auto data = fc::raw::pack(gs); my->version = 0; // version of 0 is invalid; it indicates that the genesis was not properly written to the block log my->first_block_num = first_block_num; + my->block_stream.seekp(0, std::ios::end); my->block_stream.write((char*)&my->version, sizeof(my->version)); my->block_stream.write((char*)&my->first_block_num, sizeof(my->first_block_num)); my->block_stream.write(data.data(), data.size()); @@ -251,22 +233,16 @@ namespace eosio { namespace chain { auto pos = my->block_stream.tellp(); - my->block_stream.close(); - my->block_stream.open(my->block_file.generic_string().c_str(), std::ios::in | std::ios::out | std::ios::binary ); // Bypass append-only writing just once - static_assert( block_log::max_supported_version > 0, "a version number of zero is not supported" ); my->version = block_log::max_supported_version; my->block_stream.seekp( 0 ); my->block_stream.write( (char*)&my->version, sizeof(my->version) ); my->block_stream.seekp( pos ); flush(); - - my->block_write = false; - my->check_block_write(); // Reset to append-only writing. } std::pair block_log::read_block(uint64_t pos)const { - my->check_block_read(); + my->check_open_files(); my->block_stream.seekg(pos); std::pair result; @@ -290,7 +266,7 @@ namespace eosio { namespace chain { } uint64_t block_log::get_block_pos(uint32_t block_num) const { - my->check_index_read(); + my->check_open_files(); if (!(my->head && block_num <= block_header::num_from_id(my->head_id) && block_num >= my->first_block_num)) return npos; my->index_stream.seekg(sizeof(uint64_t) * (block_num - my->first_block_num)); @@ -300,7 +276,7 @@ namespace eosio { namespace chain { } signed_block_ptr block_log::read_head()const { - my->check_block_read(); + my->check_open_files(); uint64_t pos; @@ -328,13 +304,13 @@ namespace eosio { namespace chain { void block_log::construct_index() { ilog("Reconstructing Block Log Index..."); - my->index_stream.close(); + my->close(); + fc::remove_all(my->index_file); - my->index_stream.open(my->index_file.generic_string().c_str(), LOG_WRITE); - my->index_write = true; + + my->reopen(); uint64_t end_pos; - my->check_block_read(); my->block_stream.seekg(-sizeof( uint64_t), std::ios::end); my->block_stream.read((char*)&end_pos, sizeof(end_pos)); @@ -357,6 +333,7 @@ namespace eosio { namespace chain { my->block_stream.read((char*) &totem, sizeof(totem)); } + my->index_stream.seekp(0, std::ios::end); while( pos < end_pos ) { fc::raw::unpack(my->block_stream, tmp); my->block_stream.read((char*)&pos, sizeof(pos)); From 0c085d23075c07785c2e99e7786d910d4f048adf Mon Sep 17 00:00:00 2001 From: arhag Date: Tue, 23 Apr 2019 16:50:42 -0400 Subject: [PATCH 06/62] allow opening block log with no blocks (fixes undefined behavior bug); contruct_index should leave index file empty if block log contains no blocks --- libraries/chain/block_log.cpp | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/libraries/chain/block_log.cpp b/libraries/chain/block_log.cpp index 42505a69909..1c06422c0a7 100644 --- a/libraries/chain/block_log.cpp +++ b/libraries/chain/block_log.cpp @@ -142,7 +142,11 @@ namespace eosio { namespace chain { } my->head = read_head(); - my->head_id = my->head->id(); + if( my->head ) { + my->head_id = my->head->id(); + } else { + my->head_id = {}; + } if (index_size) { ilog("Index is nonempty"); @@ -314,6 +318,12 @@ namespace eosio { namespace chain { my->block_stream.seekg(-sizeof( uint64_t), std::ios::end); my->block_stream.read((char*)&end_pos, sizeof(end_pos)); + + if( end_pos == npos ) { + ilog( "Block log contains no blocks. No need to construct index." ); + return; + } + signed_block tmp; uint64_t pos = 0; From 12f934cade9fe9d60bc38d77995eb9073104190a Mon Sep 17 00:00:00 2001 From: Kevin Heifner Date: Thu, 25 Apr 2019 08:00:04 -0500 Subject: [PATCH 07/62] Bump to 1.6.5 --- CMakeLists.txt | 2 +- Docker/README.md | 4 ++-- README.md | 16 ++++++++-------- 3 files changed, 11 insertions(+), 11 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index c73c0eff83c..a4111a579c0 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -35,7 +35,7 @@ set( CXX_STANDARD_REQUIRED ON) set(VERSION_MAJOR 1) set(VERSION_MINOR 6) -set(VERSION_PATCH 4) +set(VERSION_PATCH 5) if(VERSION_SUFFIX) set(VERSION_FULL "${VERSION_MAJOR}.${VERSION_MINOR}.${VERSION_PATCH}-${VERSION_SUFFIX}") diff --git a/Docker/README.md b/Docker/README.md index 38fc82cf43b..ca0de7e3e06 100644 --- a/Docker/README.md +++ b/Docker/README.md @@ -20,10 +20,10 @@ cd eos/Docker docker build . -t eosio/eos ``` -The above will build off the most recent commit to the master branch by default. If you would like to target a specific branch/tag, you may use a build argument. For example, if you wished to generate a docker image based off of the 1.6.4 tag, you could do the following: +The above will build off the most recent commit to the master branch by default. If you would like to target a specific branch/tag, you may use a build argument. For example, if you wished to generate a docker image based off of the 1.6.5 tag, you could do the following: ```bash -docker build -t eosio/eos:v1.6.4 --build-arg branch=v1.6.4 . +docker build -t eosio/eos:v1.6.5 --build-arg branch=v1.6.5 . ``` By default, the symbol in eosio.system is set to SYS. You can override this using the symbol argument while building the docker image. diff --git a/README.md b/README.md index 49b8a7554b0..2516e98c890 100644 --- a/README.md +++ b/README.md @@ -39,13 +39,13 @@ $ brew remove eosio ``` #### Ubuntu 18.04 Debian Package Install ```sh -$ wget https://github.com/eosio/eos/releases/download/v1.6.4/eosio_1.6.4-1-ubuntu-18.04_amd64.deb -$ sudo apt install ./eosio_1.6.4-1-ubuntu-18.04_amd64.deb +$ wget https://github.com/eosio/eos/releases/download/v1.6.5/eosio_1.6.5-1-ubuntu-18.04_amd64.deb +$ sudo apt install ./eosio_1.6.5-1-ubuntu-18.04_amd64.deb ``` #### Ubuntu 16.04 Debian Package Install ```sh -$ wget https://github.com/eosio/eos/releases/download/v1.6.4/eosio_1.6.4-1-ubuntu-16.04_amd64.deb -$ sudo apt install ./eosio_1.6.4-1-ubuntu-16.04_amd64.deb +$ wget https://github.com/eosio/eos/releases/download/v1.6.5/eosio_1.6.5-1-ubuntu-16.04_amd64.deb +$ sudo apt install ./eosio_1.6.5-1-ubuntu-16.04_amd64.deb ``` #### Debian Package Uninstall ```sh @@ -53,8 +53,8 @@ $ sudo apt remove eosio ``` #### Centos RPM Package Install ```sh -$ wget https://github.com/eosio/eos/releases/download/v1.6.4/eosio-1.6.4-1.el7.x86_64.rpm -$ sudo yum install ./eosio-1.6.4-1.el7.x86_64.rpm +$ wget https://github.com/eosio/eos/releases/download/v1.6.5/eosio-1.6.5-1.el7.x86_64.rpm +$ sudo yum install ./eosio-1.6.5-1.el7.x86_64.rpm ``` #### Centos RPM Package Uninstall ```sh @@ -62,8 +62,8 @@ $ sudo yum remove eosio.cdt ``` #### Fedora RPM Package Install ```sh -$ wget https://github.com/eosio/eos/releases/download/v1.6.4/eosio-1.6.4-1.fc27.x86_64.rpm -$ sudo yum install ./eosio-1.6.4-1.fc27.x86_64.rpm +$ wget https://github.com/eosio/eos/releases/download/v1.6.5/eosio-1.6.5-1.fc27.x86_64.rpm +$ sudo yum install ./eosio-1.6.5-1.fc27.x86_64.rpm ``` #### Fedora RPM Package Uninstall ```sh From 39c2182f6b45c60bcb688cec23e098ad979438c1 Mon Sep 17 00:00:00 2001 From: Kevin Heifner Date: Fri, 26 Apr 2019 15:27:56 -0500 Subject: [PATCH 08/62] Update to fc with gcc7 fix --- libraries/fc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libraries/fc b/libraries/fc index 456b588290d..16aa1495402 160000 --- a/libraries/fc +++ b/libraries/fc @@ -1 +1 @@ -Subproject commit 456b588290d0e109a155b0dd9fdf7ec01163380a +Subproject commit 16aa1495402083f1f1e923cb8806157784991437 From 96b3e81a930d95c3d8bb656393eb18e07954ac28 Mon Sep 17 00:00:00 2001 From: thaipandada Date: Tue, 30 Apr 2019 21:20:06 +0800 Subject: [PATCH 09/62] prepare v3.0.0 --- CMakeLists.txt | 4 ++-- Docker/README.md | 4 ++-- README.md | 12 +----------- README_CN.md | 12 +----------- 4 files changed, 6 insertions(+), 26 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index e49f6dd0907..6ec500711d7 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -33,9 +33,9 @@ set( CMAKE_CXX_STANDARD 14 ) set( CMAKE_CXX_EXTENSIONS ON ) set( CXX_STANDARD_REQUIRED ON) -set(VERSION_MAJOR 2) +set(VERSION_MAJOR 3) set(VERSION_MINOR 0) -set(VERSION_PATCH 3) +set(VERSION_PATCH 0) if(VERSION_SUFFIX) set(VERSION_FULL "${VERSION_MAJOR}.${VERSION_MINOR}.${VERSION_PATCH}-${VERSION_SUFFIX}") diff --git a/Docker/README.md b/Docker/README.md index b5228e17bbc..02eabc07b1c 100644 --- a/Docker/README.md +++ b/Docker/README.md @@ -20,10 +20,10 @@ cd bos/Docker docker build . -t boscore/bos -s BOS ``` -The above will build off the most recent commit to the master branch by default. If you would like to target a specific branch/tag, you may use a build argument. For example, if you wished to generate a docker image based off of the v2.0.3 tag, you could do the following: +The above will build off the most recent commit to the master branch by default. If you would like to target a specific branch/tag, you may use a build argument. For example, if you wished to generate a docker image based off of the v3.0.0 tag, you could do the following: ```bash -docker build -t boscore/bos:v2.0.3 --build-arg branch=v2.0.3 . +docker build -t boscore/bos:v3.0.0 --build-arg branch=v3.0.0 . ``` diff --git a/README.md b/README.md index 0b90a01c333..8b4a1662194 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # BOSCore - Born for DApps. Born for Usability. -## BOSCore Version: v2.0.3 +## BOSCore Version: v3.0.0 ### Basic EOSIO Version: v1.6.4 (support REX) # Background @@ -39,16 +39,6 @@ Attention: 3. Treat update of eosio/eos code as new feature. 4. Emergent issues must repaired by adopting hotfixes mode. -## BOSCore Workflow -BOSCore encourage community developer actively participate in contributing the code, members should follow the workflow below. -![BOSCore Workflow](./images/bos-workflow.png) - -Attention: -1. Only allow Feature Branch or bug fix to submit PR to Develop Branch. -2. Rebase is required before submitting PR to Develop Branch. -3. Treat update of eosio/eos code as new feature. -4. Emergent issues must repaired by adopting hotfixes mode. - BOSCore bases on EOSIO, so you can also referer: [Getting Started](https://developers.eos.io/eosio-nodeos/docs/overview-1) diff --git a/README_CN.md b/README_CN.md index 23b9199068f..35861debeca 100644 --- a/README_CN.md +++ b/README_CN.md @@ -1,6 +1,6 @@ # BOSCore - 更可用的链,为DApp而生。 -## BOSCore Version: v2.0.3 +## BOSCore Version: v3.0.0 ### Basic EOSIO Version: v1.6.4 (support REX) # 背景 @@ -39,16 +39,6 @@ BOSCore 鼓励社区开发者参与代码贡献,社区成员应当遵循以下 3. EOSIO 主网版本作为一个 Feature Branch 来对待 4. 紧急问题修复采用 hotfixes 模式 -## BOSCore 开发流程 -BOSCore 鼓励社区开发者参与代码贡献,社区成员应当遵循以下工作流: -![BOSCore Workflow](./images/bos-workflow.png) - -注意: -1. 只有待发布的 Feature Branch 或者Bug修复才应该向 Develop Branch 提交 -2. 向 Develop Branch 提交 PR 之前需要现在本地执行 rebase 操作 -3. EOSIO 主网版本作为一个 Feature Branch 来对待 -4. 紧急问题修复采用 hotfixes 模式 - BOSCore是基于EOSIO技术的扩展,所以EOSIO的相关资料也可以参考: [EOSIO 开始](https://developers.eos.io/eosio-nodeos/docs/overview-1) From 0d6ffa02078a28df2a0735122441b6ae6942990b Mon Sep 17 00:00:00 2001 From: Kevin Heifner Date: Mon, 29 Apr 2019 11:06:51 -0500 Subject: [PATCH 10/62] Determine flags for pgrep dynamicly instead of by list of hosts --- tests/testUtils.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/tests/testUtils.py b/tests/testUtils.py index a8dbe0fd4d2..ac18fd407ce 100755 --- a/tests/testUtils.py +++ b/tests/testUtils.py @@ -217,12 +217,17 @@ def arePortsAvailable(ports): @staticmethod def pgrepCmd(serverName): - pgrepOpts="-fl" # pylint: disable=deprecated-method - if platform.linux_distribution()[0] in ["Ubuntu", "LinuxMint", "Fedora","CentOS Linux","arch"]: + # pgrep differs on different platform (amazonlinux1 and 2 for example). We need to check if pgrep -h has -a available and add that if so: + try: + pgrepHelp = re.search('-a', subprocess.Popen("pgrep --help 2>/dev/null", shell=True, stdout=subprocess.PIPE).stdout.read().decode('utf-8')) + pgrepHelp.group(0) # group() errors if -a is not found, so we don't need to do anything else special here. pgrepOpts="-a" + except AttributeError as error: + # If no -a, AttributeError: 'NoneType' object has no attribute 'group' + pgrepOpts="-fl" - return "pgrep %s %s" % (pgrepOpts, serverName) + return "pgrep %s %s" % (pgrepOpts, serverName)\ @staticmethod def getBlockLog(blockLogLocation, silentErrors=False, exitOnError=False): From 665f334c9c74a47980e4d51c0e14997228cc580c Mon Sep 17 00:00:00 2001 From: Kevin Heifner Date: Mon, 29 Apr 2019 13:28:05 -0500 Subject: [PATCH 11/62] Add missing import or re --- tests/testUtils.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/testUtils.py b/tests/testUtils.py index ac18fd407ce..85a7f89f301 100755 --- a/tests/testUtils.py +++ b/tests/testUtils.py @@ -1,3 +1,4 @@ +import re import errno import subprocess import time From e08fb6b7ae123d738186fa1a211112b5047a110a Mon Sep 17 00:00:00 2001 From: Kevin Heifner Date: Wed, 1 May 2019 11:55:23 -0500 Subject: [PATCH 12/62] Change default log level from debug to info. --- libraries/fc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libraries/fc b/libraries/fc index 16aa1495402..0934dd7d3b5 160000 --- a/libraries/fc +++ b/libraries/fc @@ -1 +1 @@ -Subproject commit 16aa1495402083f1f1e923cb8806157784991437 +Subproject commit 0934dd7d3b5aacee1cc7229a3bfaf1a1ac89adf5 From b5be2d9f85b7d52d871b9fed6b746cfc73146ada Mon Sep 17 00:00:00 2001 From: Zach Butler Date: Thu, 2 May 2019 22:05:55 -0400 Subject: [PATCH 13/62] Created test metrics Buildkite job --- .buildkite/pipeline.yml | 11 + .../node_modules/node-fetch/CHANGELOG.md | 260 +++ .../node_modules/node-fetch/LICENSE.md | 22 + .../metrics/node_modules/node-fetch/README.md | 538 ++++++ .../node_modules/node-fetch/browser.js | 23 + .../node_modules/node-fetch/lib/index.es.js | 1631 ++++++++++++++++ .../node_modules/node-fetch/lib/index.js | 1640 +++++++++++++++++ .../node_modules/node-fetch/lib/index.mjs | 1629 ++++++++++++++++ .../node_modules/node-fetch/package.json | 94 + scripts/metrics/node_modules/sax/LICENSE | 41 + scripts/metrics/node_modules/sax/README.md | 225 +++ scripts/metrics/node_modules/sax/lib/sax.js | 1565 ++++++++++++++++ scripts/metrics/node_modules/sax/package.json | 61 + scripts/metrics/node_modules/xml2js/LICENSE | 19 + scripts/metrics/node_modules/xml2js/README.md | 406 ++++ .../metrics/node_modules/xml2js/lib/bom.js | 12 + .../node_modules/xml2js/lib/builder.js | 127 ++ .../node_modules/xml2js/lib/defaults.js | 72 + .../metrics/node_modules/xml2js/lib/parser.js | 357 ++++ .../node_modules/xml2js/lib/processors.js | 34 + .../metrics/node_modules/xml2js/lib/xml2js.js | 37 + .../metrics/node_modules/xml2js/package.json | 280 +++ .../node_modules/xmlbuilder/.npmignore | 5 + .../node_modules/xmlbuilder/CHANGELOG.md | 423 +++++ .../metrics/node_modules/xmlbuilder/LICENSE | 21 + .../metrics/node_modules/xmlbuilder/README.md | 85 + .../node_modules/xmlbuilder/lib/Utility.js | 73 + .../xmlbuilder/lib/XMLAttribute.js | 31 + .../node_modules/xmlbuilder/lib/XMLCData.js | 32 + .../node_modules/xmlbuilder/lib/XMLComment.js | 32 + .../xmlbuilder/lib/XMLDTDAttList.js | 50 + .../xmlbuilder/lib/XMLDTDElement.js | 35 + .../xmlbuilder/lib/XMLDTDEntity.js | 56 + .../xmlbuilder/lib/XMLDTDNotation.js | 37 + .../xmlbuilder/lib/XMLDeclaration.js | 40 + .../node_modules/xmlbuilder/lib/XMLDocType.js | 107 ++ .../xmlbuilder/lib/XMLDocument.js | 48 + .../xmlbuilder/lib/XMLDocumentCB.js | 402 ++++ .../node_modules/xmlbuilder/lib/XMLElement.js | 111 ++ .../node_modules/xmlbuilder/lib/XMLNode.js | 432 +++++ .../lib/XMLProcessingInstruction.js | 35 + .../node_modules/xmlbuilder/lib/XMLRaw.js | 32 + .../xmlbuilder/lib/XMLStreamWriter.js | 279 +++ .../xmlbuilder/lib/XMLStringWriter.js | 334 ++++ .../xmlbuilder/lib/XMLStringifier.js | 163 ++ .../node_modules/xmlbuilder/lib/XMLText.js | 32 + .../xmlbuilder/lib/XMLWriterBase.js | 90 + .../node_modules/xmlbuilder/lib/index.js | 53 + .../node_modules/xmlbuilder/package.json | 65 + scripts/metrics/package-lock.json | 30 + scripts/metrics/test-metrics.js | 415 +++++ scripts/metrics/test-metrics.json | 1 + 52 files changed, 12633 insertions(+) create mode 100644 scripts/metrics/node_modules/node-fetch/CHANGELOG.md create mode 100644 scripts/metrics/node_modules/node-fetch/LICENSE.md create mode 100644 scripts/metrics/node_modules/node-fetch/README.md create mode 100644 scripts/metrics/node_modules/node-fetch/browser.js create mode 100644 scripts/metrics/node_modules/node-fetch/lib/index.es.js create mode 100644 scripts/metrics/node_modules/node-fetch/lib/index.js create mode 100644 scripts/metrics/node_modules/node-fetch/lib/index.mjs create mode 100644 scripts/metrics/node_modules/node-fetch/package.json create mode 100644 scripts/metrics/node_modules/sax/LICENSE create mode 100644 scripts/metrics/node_modules/sax/README.md create mode 100644 scripts/metrics/node_modules/sax/lib/sax.js create mode 100644 scripts/metrics/node_modules/sax/package.json create mode 100644 scripts/metrics/node_modules/xml2js/LICENSE create mode 100644 scripts/metrics/node_modules/xml2js/README.md create mode 100644 scripts/metrics/node_modules/xml2js/lib/bom.js create mode 100644 scripts/metrics/node_modules/xml2js/lib/builder.js create mode 100644 scripts/metrics/node_modules/xml2js/lib/defaults.js create mode 100644 scripts/metrics/node_modules/xml2js/lib/parser.js create mode 100644 scripts/metrics/node_modules/xml2js/lib/processors.js create mode 100644 scripts/metrics/node_modules/xml2js/lib/xml2js.js create mode 100644 scripts/metrics/node_modules/xml2js/package.json create mode 100644 scripts/metrics/node_modules/xmlbuilder/.npmignore create mode 100644 scripts/metrics/node_modules/xmlbuilder/CHANGELOG.md create mode 100644 scripts/metrics/node_modules/xmlbuilder/LICENSE create mode 100644 scripts/metrics/node_modules/xmlbuilder/README.md create mode 100644 scripts/metrics/node_modules/xmlbuilder/lib/Utility.js create mode 100644 scripts/metrics/node_modules/xmlbuilder/lib/XMLAttribute.js create mode 100644 scripts/metrics/node_modules/xmlbuilder/lib/XMLCData.js create mode 100644 scripts/metrics/node_modules/xmlbuilder/lib/XMLComment.js create mode 100644 scripts/metrics/node_modules/xmlbuilder/lib/XMLDTDAttList.js create mode 100644 scripts/metrics/node_modules/xmlbuilder/lib/XMLDTDElement.js create mode 100644 scripts/metrics/node_modules/xmlbuilder/lib/XMLDTDEntity.js create mode 100644 scripts/metrics/node_modules/xmlbuilder/lib/XMLDTDNotation.js create mode 100644 scripts/metrics/node_modules/xmlbuilder/lib/XMLDeclaration.js create mode 100644 scripts/metrics/node_modules/xmlbuilder/lib/XMLDocType.js create mode 100644 scripts/metrics/node_modules/xmlbuilder/lib/XMLDocument.js create mode 100644 scripts/metrics/node_modules/xmlbuilder/lib/XMLDocumentCB.js create mode 100644 scripts/metrics/node_modules/xmlbuilder/lib/XMLElement.js create mode 100644 scripts/metrics/node_modules/xmlbuilder/lib/XMLNode.js create mode 100644 scripts/metrics/node_modules/xmlbuilder/lib/XMLProcessingInstruction.js create mode 100644 scripts/metrics/node_modules/xmlbuilder/lib/XMLRaw.js create mode 100644 scripts/metrics/node_modules/xmlbuilder/lib/XMLStreamWriter.js create mode 100644 scripts/metrics/node_modules/xmlbuilder/lib/XMLStringWriter.js create mode 100644 scripts/metrics/node_modules/xmlbuilder/lib/XMLStringifier.js create mode 100644 scripts/metrics/node_modules/xmlbuilder/lib/XMLText.js create mode 100644 scripts/metrics/node_modules/xmlbuilder/lib/XMLWriterBase.js create mode 100644 scripts/metrics/node_modules/xmlbuilder/lib/index.js create mode 100644 scripts/metrics/node_modules/xmlbuilder/package.json create mode 100644 scripts/metrics/package-lock.json create mode 100755 scripts/metrics/test-metrics.js create mode 100644 scripts/metrics/test-metrics.json diff --git a/.buildkite/pipeline.yml b/.buildkite/pipeline.yml index 893a19f9ff9..6fed5a0f33a 100644 --- a/.buildkite/pipeline.yml +++ b/.buildkite/pipeline.yml @@ -384,6 +384,17 @@ steps: workdir: /data/job timeout: 60 + - wait: + continue_on_failure: true + + - command: | + cd scripts/metrics + node --max-old-space-size=4096 test-metrics.js + label: ":bar_chart: Test Metrics" + agents: + queue: "automation-apps-builder-fleet" + timeout: 10 + - wait - command: | diff --git a/scripts/metrics/node_modules/node-fetch/CHANGELOG.md b/scripts/metrics/node_modules/node-fetch/CHANGELOG.md new file mode 100644 index 00000000000..941b6a8d8b7 --- /dev/null +++ b/scripts/metrics/node_modules/node-fetch/CHANGELOG.md @@ -0,0 +1,260 @@ + +Changelog +========= + + +# 2.x release + +## v2.5.0 + +- Enhance: `Response` object now includes `redirected` property. +- Enhance: `fetch()` now accepts third-party `Blob` implementation as body. +- Other: disable `package-lock.json` generation as we never commit them. +- Other: dev dependency update. +- Other: readme update. + +## v2.4.1 + +- Fix: `Blob` import rule for node < 10, as `Readable` isn't a named export. + +## v2.4.0 + +- Enhance: added `Brotli` compression support (using node's zlib). +- Enhance: updated `Blob` implementation per spec. +- Fix: set content type automatically for `URLSearchParams`. +- Fix: `Headers` now reject empty header names. +- Fix: test cases, as node 12+ no longer accepts invalid header response. + +## v2.3.0 + +- Enhance: added `AbortSignal` support, with README example. +- Enhance: handle invalid `Location` header during redirect by rejecting them explicitly with `FetchError`. +- Fix: update `browser.js` to support react-native environment, where `self` isn't available globally. + +## v2.2.1 + +- Fix: `compress` flag shouldn't overwrite existing `Accept-Encoding` header. +- Fix: multiple `import` rules, where `PassThrough` etc. doesn't have a named export when using node <10 and `--exerimental-modules` flag. +- Other: Better README. + +## v2.2.0 + +- Enhance: Support all `ArrayBuffer` view types +- Enhance: Support Web Workers +- Enhance: Support Node.js' `--experimental-modules` mode; deprecate `.es.js` file +- Fix: Add `__esModule` property to the exports object +- Other: Better example in README for writing response to a file +- Other: More tests for Agent + +## v2.1.2 + +- Fix: allow `Body` methods to work on `ArrayBuffer`-backed `Body` objects +- Fix: reject promise returned by `Body` methods when the accumulated `Buffer` exceeds the maximum size +- Fix: support custom `Host` headers with any casing +- Fix: support importing `fetch()` from TypeScript in `browser.js` +- Fix: handle the redirect response body properly + +## v2.1.1 + +Fix packaging errors in v2.1.0. + +## v2.1.0 + +- Enhance: allow using ArrayBuffer as the `body` of a `fetch()` or `Request` +- Fix: store HTTP headers of a `Headers` object internally with the given case, for compatibility with older servers that incorrectly treated header names in a case-sensitive manner +- Fix: silently ignore invalid HTTP headers +- Fix: handle HTTP redirect responses without a `Location` header just like non-redirect responses +- Fix: include bodies when following a redirection when appropriate + +## v2.0.0 + +This is a major release. Check [our upgrade guide](https://github.com/bitinn/node-fetch/blob/master/UPGRADE-GUIDE.md) for an overview on some key differences between v1 and v2. + +### General changes + +- Major: Node.js 0.10.x and 0.12.x support is dropped +- Major: `require('node-fetch/lib/response')` etc. is now unsupported; use `require('node-fetch').Response` or ES6 module imports +- Enhance: start testing on Node.js v4.x, v6.x, v8.x LTS, as well as v9.x stable +- Enhance: use Rollup to produce a distributed bundle (less memory overhead and faster startup) +- Enhance: make `Object.prototype.toString()` on Headers, Requests, and Responses return correct class strings +- Other: rewrite in ES2015 using Babel +- Other: use Codecov for code coverage tracking +- Other: update package.json script for npm 5 +- Other: `encoding` module is now optional (alpha.7) +- Other: expose browser.js through package.json, avoid bundling mishaps (alpha.9) +- Other: allow TypeScript to `import` node-fetch by exposing default (alpha.9) + +### HTTP requests + +- Major: overwrite user's `Content-Length` if we can be sure our information is correct (per spec) +- Fix: errors in a response are caught before the body is accessed +- Fix: support WHATWG URL objects, created by `whatwg-url` package or `require('url').URL` in Node.js 7+ + +### Response and Request classes + +- Major: `response.text()` no longer attempts to detect encoding, instead always opting for UTF-8 (per spec); use `response.textConverted()` for the v1 behavior +- Major: make `response.json()` throw error instead of returning an empty object on 204 no-content respose (per spec; reverts behavior changed in v1.6.2) +- Major: internal methods are no longer exposed +- Major: throw error when a `GET` or `HEAD` Request is constructed with a non-null body (per spec) +- Enhance: add `response.arrayBuffer()` (also applies to Requests) +- Enhance: add experimental `response.blob()` (also applies to Requests) +- Enhance: `URLSearchParams` is now accepted as a body +- Enhance: wrap `response.json()` json parsing error as `FetchError` +- Fix: fix Request and Response with `null` body + +### Headers class + +- Major: remove `headers.getAll()`; make `get()` return all headers delimited by commas (per spec) +- Enhance: make Headers iterable +- Enhance: make Headers constructor accept an array of tuples +- Enhance: make sure header names and values are valid in HTTP +- Fix: coerce Headers prototype function parameters to strings, where applicable + +### Documentation + +- Enhance: more comprehensive API docs +- Enhance: add a list of default headers in README + + +# 1.x release + +## backport releases (v1.7.0 and beyond) + +See [changelog on 1.x branch](https://github.com/bitinn/node-fetch/blob/1.x/CHANGELOG.md) for details. + +## v1.6.3 + +- Enhance: error handling document to explain `FetchError` design +- Fix: support `form-data` 2.x releases (requires `form-data` >= 2.1.0) + +## v1.6.2 + +- Enhance: minor document update +- Fix: response.json() returns empty object on 204 no-content response instead of throwing a syntax error + +## v1.6.1 + +- Fix: if `res.body` is a non-stream non-formdata object, we will call `body.toString` and send it as a string +- Fix: `counter` value is incorrectly set to `follow` value when wrapping Request instance +- Fix: documentation update + +## v1.6.0 + +- Enhance: added `res.buffer()` api for convenience, it returns body as a Node.js buffer +- Enhance: better old server support by handling raw deflate response +- Enhance: skip encoding detection for non-HTML/XML response +- Enhance: minor document update +- Fix: HEAD request doesn't need decompression, as body is empty +- Fix: `req.body` now accepts a Node.js buffer + +## v1.5.3 + +- Fix: handle 204 and 304 responses when body is empty but content-encoding is gzip/deflate +- Fix: allow resolving response and cloned response in any order +- Fix: avoid setting `content-length` when `form-data` body use streams +- Fix: send DELETE request with content-length when body is present +- Fix: allow any url when calling new Request, but still reject non-http(s) url in fetch + +## v1.5.2 + +- Fix: allow node.js core to handle keep-alive connection pool when passing a custom agent + +## v1.5.1 + +- Fix: redirect mode `manual` should work even when there is no redirection or broken redirection + +## v1.5.0 + +- Enhance: rejected promise now use custom `Error` (thx to @pekeler) +- Enhance: `FetchError` contains `err.type` and `err.code`, allows for better error handling (thx to @pekeler) +- Enhance: basic support for redirect mode `manual` and `error`, allows for location header extraction (thx to @jimmywarting for the initial PR) + +## v1.4.1 + +- Fix: wrapping Request instance with FormData body again should preserve the body as-is + +## v1.4.0 + +- Enhance: Request and Response now have `clone` method (thx to @kirill-konshin for the initial PR) +- Enhance: Request and Response now have proper string and buffer body support (thx to @kirill-konshin) +- Enhance: Body constructor has been refactored out (thx to @kirill-konshin) +- Enhance: Headers now has `forEach` method (thx to @tricoder42) +- Enhance: back to 100% code coverage +- Fix: better form-data support (thx to @item4) +- Fix: better character encoding detection under chunked encoding (thx to @dsuket for the initial PR) + +## v1.3.3 + +- Fix: make sure `Content-Length` header is set when body is string for POST/PUT/PATCH requests +- Fix: handle body stream error, for cases such as incorrect `Content-Encoding` header +- Fix: when following certain redirects, use `GET` on subsequent request per Fetch Spec +- Fix: `Request` and `Response` constructors now parse headers input using `Headers` + +## v1.3.2 + +- Enhance: allow auto detect of form-data input (no `FormData` spec on node.js, this is form-data specific feature) + +## v1.3.1 + +- Enhance: allow custom host header to be set (server-side only feature, as it's a forbidden header on client-side) + +## v1.3.0 + +- Enhance: now `fetch.Request` is exposed as well + +## v1.2.1 + +- Enhance: `Headers` now normalized `Number` value to `String`, prevent common mistakes + +## v1.2.0 + +- Enhance: now fetch.Headers and fetch.Response are exposed, making testing easier + +## v1.1.2 + +- Fix: `Headers` should only support `String` and `Array` properties, and ignore others + +## v1.1.1 + +- Enhance: now req.headers accept both plain object and `Headers` instance + +## v1.1.0 + +- Enhance: timeout now also applies to response body (in case of slow response) +- Fix: timeout is now cleared properly when fetch is done/has failed + +## v1.0.6 + +- Fix: less greedy content-type charset matching + +## v1.0.5 + +- Fix: when `follow = 0`, fetch should not follow redirect +- Enhance: update tests for better coverage +- Enhance: code formatting +- Enhance: clean up doc + +## v1.0.4 + +- Enhance: test iojs support +- Enhance: timeout attached to socket event only fire once per redirect + +## v1.0.3 + +- Fix: response size limit should reject large chunk +- Enhance: added character encoding detection for xml, such as rss/atom feed (encoding in DTD) + +## v1.0.2 + +- Fix: added res.ok per spec change + +## v1.0.0 + +- Enhance: better test coverage and doc + + +# 0.x release + +## v0.1 + +- Major: initial public release diff --git a/scripts/metrics/node_modules/node-fetch/LICENSE.md b/scripts/metrics/node_modules/node-fetch/LICENSE.md new file mode 100644 index 00000000000..660ffecb58b --- /dev/null +++ b/scripts/metrics/node_modules/node-fetch/LICENSE.md @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2016 David Frank + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + diff --git a/scripts/metrics/node_modules/node-fetch/README.md b/scripts/metrics/node_modules/node-fetch/README.md new file mode 100644 index 00000000000..48f4215e4e7 --- /dev/null +++ b/scripts/metrics/node_modules/node-fetch/README.md @@ -0,0 +1,538 @@ +node-fetch +========== + +[![npm version][npm-image]][npm-url] +[![build status][travis-image]][travis-url] +[![coverage status][codecov-image]][codecov-url] +[![install size][install-size-image]][install-size-url] + +A light-weight module that brings `window.fetch` to Node.js + +(We are looking for [v2 maintainers and collaborators](https://github.com/bitinn/node-fetch/issues/567)) + + + +- [Motivation](#motivation) +- [Features](#features) +- [Difference from client-side fetch](#difference-from-client-side-fetch) +- [Installation](#installation) +- [Loading and configuring the module](#loading-and-configuring-the-module) +- [Common Usage](#common-usage) + - [Plain text or HTML](#plain-text-or-html) + - [JSON](#json) + - [Simple Post](#simple-post) + - [Post with JSON](#post-with-json) + - [Post with form parameters](#post-with-form-parameters) + - [Handling exceptions](#handling-exceptions) + - [Handling client and server errors](#handling-client-and-server-errors) +- [Advanced Usage](#advanced-usage) + - [Streams](#streams) + - [Buffer](#buffer) + - [Accessing Headers and other Meta data](#accessing-headers-and-other-meta-data) + - [Post data using a file stream](#post-data-using-a-file-stream) + - [Post with form-data (detect multipart)](#post-with-form-data-detect-multipart) + - [Request cancellation with AbortSignal](#request-cancellation-with-abortsignal) +- [API](#api) + - [fetch(url[, options])](#fetchurl-options) + - [Options](#options) + - [Class: Request](#class-request) + - [Class: Response](#class-response) + - [Class: Headers](#class-headers) + - [Interface: Body](#interface-body) + - [Class: FetchError](#class-fetcherror) +- [License](#license) +- [Acknowledgement](#acknowledgement) + + + +## Motivation + +Instead of implementing `XMLHttpRequest` in Node.js to run browser-specific [Fetch polyfill](https://github.com/github/fetch), why not go from native `http` to `fetch` API directly? Hence `node-fetch`, minimal code for a `window.fetch` compatible API on Node.js runtime. + +See Matt Andrews' [isomorphic-fetch](https://github.com/matthew-andrews/isomorphic-fetch) or Leonardo Quixada's [cross-fetch](https://github.com/lquixada/cross-fetch) for isomorphic usage (exports `node-fetch` for server-side, `whatwg-fetch` for client-side). + +## Features + +- Stay consistent with `window.fetch` API. +- Make conscious trade-off when following [WHATWG fetch spec][whatwg-fetch] and [stream spec](https://streams.spec.whatwg.org/) implementation details, document known differences. +- Use native promise, but allow substituting it with [insert your favorite promise library]. +- Use native Node streams for body, on both request and response. +- Decode content encoding (gzip/deflate) properly, and convert string output (such as `res.text()` and `res.json()`) to UTF-8 automatically. +- Useful extensions such as timeout, redirect limit, response size limit, [explicit errors](ERROR-HANDLING.md) for troubleshooting. + +## Difference from client-side fetch + +- See [Known Differences](LIMITS.md) for details. +- If you happen to use a missing feature that `window.fetch` offers, feel free to open an issue. +- Pull requests are welcomed too! + +## Installation + +Current stable release (`2.x`) + +```sh +$ npm install node-fetch --save +``` + +## Loading and configuring the module +We suggest you load the module via `require`, pending the stabalizing of es modules in node: +```js +const fetch = require('node-fetch'); +``` + +If you are using a Promise library other than native, set it through fetch.Promise: +```js +const Bluebird = require('bluebird'); + +fetch.Promise = Bluebird; +``` + +## Common Usage + +NOTE: The documentation below is up-to-date with `2.x` releases, [see `1.x` readme](https://github.com/bitinn/node-fetch/blob/1.x/README.md), [changelog](https://github.com/bitinn/node-fetch/blob/1.x/CHANGELOG.md) and [2.x upgrade guide](UPGRADE-GUIDE.md) for the differences. + +#### Plain text or HTML +```js +fetch('https://github.com/') + .then(res => res.text()) + .then(body => console.log(body)); +``` + +#### JSON + +```js + +fetch('https://api.github.com/users/github') + .then(res => res.json()) + .then(json => console.log(json)); +``` + +#### Simple Post +```js +fetch('https://httpbin.org/post', { method: 'POST', body: 'a=1' }) + .then(res => res.json()) // expecting a json response + .then(json => console.log(json)); +``` + +#### Post with JSON + +```js +const body = { a: 1 }; + +fetch('https://httpbin.org/post', { + method: 'post', + body: JSON.stringify(body), + headers: { 'Content-Type': 'application/json' }, + }) + .then(res => res.json()) + .then(json => console.log(json)); +``` + +#### Post with form parameters +`URLSearchParams` is available in Node.js as of v7.5.0. See [official documentation](https://nodejs.org/api/url.html#url_class_urlsearchparams) for more usage methods. + +NOTE: The `Content-Type` header is only set automatically to `x-www-form-urlencoded` when an instance of `URLSearchParams` is given as such: + +```js +const { URLSearchParams } = require('url'); + +const params = new URLSearchParams(); +params.append('a', 1); + +fetch('https://httpbin.org/post', { method: 'POST', body: params }) + .then(res => res.json()) + .then(json => console.log(json)); +``` + +#### Handling exceptions +NOTE: 3xx-5xx responses are *NOT* exceptions, and should be handled in `then()`, see the next section. + +Adding a catch to the fetch promise chain will catch *all* exceptions, such as errors originating from node core libraries, like network errors, and operational errors which are instances of FetchError. See the [error handling document](ERROR-HANDLING.md) for more details. + +```js +fetch('https://domain.invalid/') + .catch(err => console.error(err)); +``` + +#### Handling client and server errors +It is common to create a helper function to check that the response contains no client (4xx) or server (5xx) error responses: + +```js +function checkStatus(res) { + if (res.ok) { // res.status >= 200 && res.status < 300 + return res; + } else { + throw MyCustomError(res.statusText); + } +} + +fetch('https://httpbin.org/status/400') + .then(checkStatus) + .then(res => console.log('will not get here...')) +``` + +## Advanced Usage + +#### Streams +The "Node.js way" is to use streams when possible: + +```js +fetch('https://assets-cdn.github.com/images/modules/logos_page/Octocat.png') + .then(res => { + const dest = fs.createWriteStream('./octocat.png'); + res.body.pipe(dest); + }); +``` + +#### Buffer +If you prefer to cache binary data in full, use buffer(). (NOTE: buffer() is a `node-fetch` only API) + +```js +const fileType = require('file-type'); + +fetch('https://assets-cdn.github.com/images/modules/logos_page/Octocat.png') + .then(res => res.buffer()) + .then(buffer => fileType(buffer)) + .then(type => { /* ... */ }); +``` + +#### Accessing Headers and other Meta data +```js +fetch('https://github.com/') + .then(res => { + console.log(res.ok); + console.log(res.status); + console.log(res.statusText); + console.log(res.headers.raw()); + console.log(res.headers.get('content-type')); + }); +``` + +#### Post data using a file stream + +```js +const { createReadStream } = require('fs'); + +const stream = createReadStream('input.txt'); + +fetch('https://httpbin.org/post', { method: 'POST', body: stream }) + .then(res => res.json()) + .then(json => console.log(json)); +``` + +#### Post with form-data (detect multipart) + +```js +const FormData = require('form-data'); + +const form = new FormData(); +form.append('a', 1); + +fetch('https://httpbin.org/post', { method: 'POST', body: form }) + .then(res => res.json()) + .then(json => console.log(json)); + +// OR, using custom headers +// NOTE: getHeaders() is non-standard API + +const form = new FormData(); +form.append('a', 1); + +const options = { + method: 'POST', + body: form, + headers: form.getHeaders() +} + +fetch('https://httpbin.org/post', options) + .then(res => res.json()) + .then(json => console.log(json)); +``` + +#### Request cancellation with AbortSignal + +> NOTE: You may only cancel streamed requests on Node >= v8.0.0 + +You may cancel requests with `AbortController`. A suggested implementation is [`abort-controller`](https://www.npmjs.com/package/abort-controller). + +An example of timing out a request after 150ms could be achieved as follows: + +```js +import AbortController from 'abort-controller'; + +const controller = new AbortController(); +const timeout = setTimeout( + () => { controller.abort(); }, + 150, +); + +fetch(url, { signal: controller.signal }) + .then(res => res.json()) + .then( + data => { + useData(data) + }, + err => { + if (err.name === 'AbortError') { + // request was aborted + } + }, + ) + .finally(() => { + clearTimeout(timeout); + }); +``` + +See [test cases](https://github.com/bitinn/node-fetch/blob/master/test/test.js) for more examples. + + +## API + +### fetch(url[, options]) + +- `url` A string representing the URL for fetching +- `options` [Options](#fetch-options) for the HTTP(S) request +- Returns: Promise<[Response](#class-response)> + +Perform an HTTP(S) fetch. + +`url` should be an absolute url, such as `https://example.com/`. A path-relative URL (`/file/under/root`) or protocol-relative URL (`//can-be-http-or-https.com/`) will result in a rejected promise. + + +### Options + +The default values are shown after each option key. + +```js +{ + // These properties are part of the Fetch Standard + method: 'GET', + headers: {}, // request headers. format is the identical to that accepted by the Headers constructor (see below) + body: null, // request body. can be null, a string, a Buffer, a Blob, or a Node.js Readable stream + redirect: 'follow', // set to `manual` to extract redirect headers, `error` to reject redirect + signal: null, // pass an instance of AbortSignal to optionally abort requests + + // The following properties are node-fetch extensions + follow: 20, // maximum redirect count. 0 to not follow redirect + timeout: 0, // req/res timeout in ms, it resets on redirect. 0 to disable (OS limit applies). Signal is recommended instead. + compress: true, // support gzip/deflate content encoding. false to disable + size: 0, // maximum response body size in bytes. 0 to disable + agent: null // http(s).Agent instance, allows custom proxy, certificate, dns lookup etc. +} +``` + +##### Default Headers + +If no values are set, the following request headers will be sent automatically: + +Header | Value +------------------- | -------------------------------------------------------- +`Accept-Encoding` | `gzip,deflate` _(when `options.compress === true`)_ +`Accept` | `*/*` +`Connection` | `close` _(when no `options.agent` is present)_ +`Content-Length` | _(automatically calculated, if possible)_ +`Transfer-Encoding` | `chunked` _(when `req.body` is a stream)_ +`User-Agent` | `node-fetch/1.0 (+https://github.com/bitinn/node-fetch)` + + +### Class: Request + +An HTTP(S) request containing information about URL, method, headers, and the body. This class implements the [Body](#iface-body) interface. + +Due to the nature of Node.js, the following properties are not implemented at this moment: + +- `type` +- `destination` +- `referrer` +- `referrerPolicy` +- `mode` +- `credentials` +- `cache` +- `integrity` +- `keepalive` + +The following node-fetch extension properties are provided: + +- `follow` +- `compress` +- `counter` +- `agent` + +See [options](#fetch-options) for exact meaning of these extensions. + +#### new Request(input[, options]) + +*(spec-compliant)* + +- `input` A string representing a URL, or another `Request` (which will be cloned) +- `options` [Options][#fetch-options] for the HTTP(S) request + +Constructs a new `Request` object. The constructor is identical to that in the [browser](https://developer.mozilla.org/en-US/docs/Web/API/Request/Request). + +In most cases, directly `fetch(url, options)` is simpler than creating a `Request` object. + + +### Class: Response + +An HTTP(S) response. This class implements the [Body](#iface-body) interface. + +The following properties are not implemented in node-fetch at this moment: + +- `Response.error()` +- `Response.redirect()` +- `type` +- `trailer` + +#### new Response([body[, options]]) + +*(spec-compliant)* + +- `body` A string or [Readable stream][node-readable] +- `options` A [`ResponseInit`][response-init] options dictionary + +Constructs a new `Response` object. The constructor is identical to that in the [browser](https://developer.mozilla.org/en-US/docs/Web/API/Response/Response). + +Because Node.js does not implement service workers (for which this class was designed), one rarely has to construct a `Response` directly. + +#### response.ok + +*(spec-compliant)* + +Convenience property representing if the request ended normally. Will evaluate to true if the response status was greater than or equal to 200 but smaller than 300. + +#### response.redirected + +*(spec-compliant)* + +Convenience property representing if the request has been redirected at least once. Will evaluate to true if the internal redirect counter is greater than 0. + + +### Class: Headers + +This class allows manipulating and iterating over a set of HTTP headers. All methods specified in the [Fetch Standard][whatwg-fetch] are implemented. + +#### new Headers([init]) + +*(spec-compliant)* + +- `init` Optional argument to pre-fill the `Headers` object + +Construct a new `Headers` object. `init` can be either `null`, a `Headers` object, an key-value map object, or any iterable object. + +```js +// Example adapted from https://fetch.spec.whatwg.org/#example-headers-class + +const meta = { + 'Content-Type': 'text/xml', + 'Breaking-Bad': '<3' +}; +const headers = new Headers(meta); + +// The above is equivalent to +const meta = [ + [ 'Content-Type', 'text/xml' ], + [ 'Breaking-Bad', '<3' ] +]; +const headers = new Headers(meta); + +// You can in fact use any iterable objects, like a Map or even another Headers +const meta = new Map(); +meta.set('Content-Type', 'text/xml'); +meta.set('Breaking-Bad', '<3'); +const headers = new Headers(meta); +const copyOfHeaders = new Headers(headers); +``` + + +### Interface: Body + +`Body` is an abstract interface with methods that are applicable to both `Request` and `Response` classes. + +The following methods are not yet implemented in node-fetch at this moment: + +- `formData()` + +#### body.body + +*(deviation from spec)* + +* Node.js [`Readable` stream][node-readable] + +The data encapsulated in the `Body` object. Note that while the [Fetch Standard][whatwg-fetch] requires the property to always be a WHATWG `ReadableStream`, in node-fetch it is a Node.js [`Readable` stream][node-readable]. + +#### body.bodyUsed + +*(spec-compliant)* + +* `Boolean` + +A boolean property for if this body has been consumed. Per spec, a consumed body cannot be used again. + +#### body.arrayBuffer() +#### body.blob() +#### body.json() +#### body.text() + +*(spec-compliant)* + +* Returns: Promise + +Consume the body and return a promise that will resolve to one of these formats. + +#### body.buffer() + +*(node-fetch extension)* + +* Returns: Promise<Buffer> + +Consume the body and return a promise that will resolve to a Buffer. + +#### body.textConverted() + +*(node-fetch extension)* + +* Returns: Promise<String> + +Identical to `body.text()`, except instead of always converting to UTF-8, encoding sniffing will be performed and text converted to UTF-8, if possible. + +(This API requires an optional dependency on npm package [encoding](https://www.npmjs.com/package/encoding), which you need to install manually. `webpack` users may see [a warning message](https://github.com/bitinn/node-fetch/issues/412#issuecomment-379007792) due to this optional dependency.) + + +### Class: FetchError + +*(node-fetch extension)* + +An operational error in the fetching process. See [ERROR-HANDLING.md][] for more info. + + +### Class: AbortError + +*(node-fetch extension)* + +An Error thrown when the request is aborted in response to an `AbortSignal`'s `abort` event. It has a `name` property of `AbortError`. See [ERROR-HANDLING.MD][] for more info. + +## Acknowledgement + +Thanks to [github/fetch](https://github.com/github/fetch) for providing a solid implementation reference. + +`node-fetch` v1 was maintained by [@bitinn](https://github.com/bitinn); v2 was maintained by [@TimothyGu](https://github.com/timothygu), [@bitinn](https://github.com/bitinn) and [@jimmywarting](https://github.com/jimmywarting); v2 readme is written by [@jkantr](https://github.com/jkantr). + +## License + +MIT + +[npm-image]: https://flat.badgen.net/npm/v/node-fetch +[npm-url]: https://www.npmjs.com/package/node-fetch +[travis-image]: https://flat.badgen.net/travis/bitinn/node-fetch +[travis-url]: https://travis-ci.org/bitinn/node-fetch +[codecov-image]: https://flat.badgen.net/codecov/c/github/bitinn/node-fetch/master +[codecov-url]: https://codecov.io/gh/bitinn/node-fetch +[install-size-image]: https://flat.badgen.net/packagephobia/install/node-fetch +[install-size-url]: https://packagephobia.now.sh/result?p=node-fetch +[whatwg-fetch]: https://fetch.spec.whatwg.org/ +[response-init]: https://fetch.spec.whatwg.org/#responseinit +[node-readable]: https://nodejs.org/api/stream.html#stream_readable_streams +[mdn-headers]: https://developer.mozilla.org/en-US/docs/Web/API/Headers +[LIMITS.md]: https://github.com/bitinn/node-fetch/blob/master/LIMITS.md +[ERROR-HANDLING.md]: https://github.com/bitinn/node-fetch/blob/master/ERROR-HANDLING.md +[UPGRADE-GUIDE.md]: https://github.com/bitinn/node-fetch/blob/master/UPGRADE-GUIDE.md diff --git a/scripts/metrics/node_modules/node-fetch/browser.js b/scripts/metrics/node_modules/node-fetch/browser.js new file mode 100644 index 00000000000..0ad5de004c4 --- /dev/null +++ b/scripts/metrics/node_modules/node-fetch/browser.js @@ -0,0 +1,23 @@ +"use strict"; + +// ref: https://github.com/tc39/proposal-global +var getGlobal = function () { + // the only reliable means to get the global object is + // `Function('return this')()` + // However, this causes CSP violations in Chrome apps. + if (typeof self !== 'undefined') { return self; } + if (typeof window !== 'undefined') { return window; } + if (typeof global !== 'undefined') { return global; } + throw new Error('unable to locate global object'); +} + +var global = getGlobal(); + +module.exports = exports = global.fetch; + +// Needed for TypeScript and Webpack. +exports.default = global.fetch.bind(global); + +exports.Headers = global.Headers; +exports.Request = global.Request; +exports.Response = global.Response; \ No newline at end of file diff --git a/scripts/metrics/node_modules/node-fetch/lib/index.es.js b/scripts/metrics/node_modules/node-fetch/lib/index.es.js new file mode 100644 index 00000000000..20ab807872f --- /dev/null +++ b/scripts/metrics/node_modules/node-fetch/lib/index.es.js @@ -0,0 +1,1631 @@ +process.emitWarning("The .es.js file is deprecated. Use .mjs instead."); + +import Stream from 'stream'; +import http from 'http'; +import Url from 'url'; +import https from 'https'; +import zlib from 'zlib'; + +// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js + +// fix for "Readable" isn't a named export issue +const Readable = Stream.Readable; + +const BUFFER = Symbol('buffer'); +const TYPE = Symbol('type'); + +class Blob { + constructor() { + this[TYPE] = ''; + + const blobParts = arguments[0]; + const options = arguments[1]; + + const buffers = []; + let size = 0; + + if (blobParts) { + const a = blobParts; + const length = Number(a.length); + for (let i = 0; i < length; i++) { + const element = a[i]; + let buffer; + if (element instanceof Buffer) { + buffer = element; + } else if (ArrayBuffer.isView(element)) { + buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); + } else if (element instanceof ArrayBuffer) { + buffer = Buffer.from(element); + } else if (element instanceof Blob) { + buffer = element[BUFFER]; + } else { + buffer = Buffer.from(typeof element === 'string' ? element : String(element)); + } + size += buffer.length; + buffers.push(buffer); + } + } + + this[BUFFER] = Buffer.concat(buffers); + + let type = options && options.type !== undefined && String(options.type).toLowerCase(); + if (type && !/[^\u0020-\u007E]/.test(type)) { + this[TYPE] = type; + } + } + get size() { + return this[BUFFER].length; + } + get type() { + return this[TYPE]; + } + text() { + return Promise.resolve(this[BUFFER].toString()); + } + arrayBuffer() { + const buf = this[BUFFER]; + const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + return Promise.resolve(ab); + } + stream() { + const readable = new Readable(); + readable._read = function () {}; + readable.push(this[BUFFER]); + readable.push(null); + return readable; + } + toString() { + return '[object Blob]'; + } + slice() { + const size = this.size; + + const start = arguments[0]; + const end = arguments[1]; + let relativeStart, relativeEnd; + if (start === undefined) { + relativeStart = 0; + } else if (start < 0) { + relativeStart = Math.max(size + start, 0); + } else { + relativeStart = Math.min(start, size); + } + if (end === undefined) { + relativeEnd = size; + } else if (end < 0) { + relativeEnd = Math.max(size + end, 0); + } else { + relativeEnd = Math.min(end, size); + } + const span = Math.max(relativeEnd - relativeStart, 0); + + const buffer = this[BUFFER]; + const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); + const blob = new Blob([], { type: arguments[2] }); + blob[BUFFER] = slicedBuffer; + return blob; + } +} + +Object.defineProperties(Blob.prototype, { + size: { enumerable: true }, + type: { enumerable: true }, + slice: { enumerable: true } +}); + +Object.defineProperty(Blob.prototype, Symbol.toStringTag, { + value: 'Blob', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * fetch-error.js + * + * FetchError interface for operational errors + */ + +/** + * Create FetchError instance + * + * @param String message Error message for human + * @param String type Error type for machine + * @param String systemError For Node.js system error + * @return FetchError + */ +function FetchError(message, type, systemError) { + Error.call(this, message); + + this.message = message; + this.type = type; + + // when err.type is `system`, err.code contains system error code + if (systemError) { + this.code = this.errno = systemError.code; + } + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} + +FetchError.prototype = Object.create(Error.prototype); +FetchError.prototype.constructor = FetchError; +FetchError.prototype.name = 'FetchError'; + +let convert; +try { + convert = require('encoding').convert; +} catch (e) {} + +const INTERNALS = Symbol('Body internals'); + +// fix an issue where "PassThrough" isn't a named export for node <10 +const PassThrough = Stream.PassThrough; + +/** + * Body mixin + * + * Ref: https://fetch.spec.whatwg.org/#body + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +function Body(body) { + var _this = this; + + var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, + _ref$size = _ref.size; + + let size = _ref$size === undefined ? 0 : _ref$size; + var _ref$timeout = _ref.timeout; + let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; + + if (body == null) { + // body is undefined or null + body = null; + } else if (isURLSearchParams(body)) { + // body is a URLSearchParams + body = Buffer.from(body.toString()); + } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { + // body is ArrayBuffer + body = Buffer.from(body); + } else if (ArrayBuffer.isView(body)) { + // body is ArrayBufferView + body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); + } else if (body instanceof Stream) ; else { + // none of the above + // coerce to string then buffer + body = Buffer.from(String(body)); + } + this[INTERNALS] = { + body, + disturbed: false, + error: null + }; + this.size = size; + this.timeout = timeout; + + if (body instanceof Stream) { + body.on('error', function (err) { + const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); + _this[INTERNALS].error = error; + }); + } +} + +Body.prototype = { + get body() { + return this[INTERNALS].body; + }, + + get bodyUsed() { + return this[INTERNALS].disturbed; + }, + + /** + * Decode response as ArrayBuffer + * + * @return Promise + */ + arrayBuffer() { + return consumeBody.call(this).then(function (buf) { + return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + }); + }, + + /** + * Return raw response as Blob + * + * @return Promise + */ + blob() { + let ct = this.headers && this.headers.get('content-type') || ''; + return consumeBody.call(this).then(function (buf) { + return Object.assign( + // Prevent copying + new Blob([], { + type: ct.toLowerCase() + }), { + [BUFFER]: buf + }); + }); + }, + + /** + * Decode response as json + * + * @return Promise + */ + json() { + var _this2 = this; + + return consumeBody.call(this).then(function (buffer) { + try { + return JSON.parse(buffer.toString()); + } catch (err) { + return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); + } + }); + }, + + /** + * Decode response as text + * + * @return Promise + */ + text() { + return consumeBody.call(this).then(function (buffer) { + return buffer.toString(); + }); + }, + + /** + * Decode response as buffer (non-spec api) + * + * @return Promise + */ + buffer() { + return consumeBody.call(this); + }, + + /** + * Decode response as text, while automatically detecting the encoding and + * trying to decode to UTF-8 (non-spec api) + * + * @return Promise + */ + textConverted() { + var _this3 = this; + + return consumeBody.call(this).then(function (buffer) { + return convertBody(buffer, _this3.headers); + }); + } +}; + +// In browsers, all properties are enumerable. +Object.defineProperties(Body.prototype, { + body: { enumerable: true }, + bodyUsed: { enumerable: true }, + arrayBuffer: { enumerable: true }, + blob: { enumerable: true }, + json: { enumerable: true }, + text: { enumerable: true } +}); + +Body.mixIn = function (proto) { + for (const name of Object.getOwnPropertyNames(Body.prototype)) { + // istanbul ignore else: future proof + if (!(name in proto)) { + const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); + Object.defineProperty(proto, name, desc); + } + } +}; + +/** + * Consume and convert an entire Body to a Buffer. + * + * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body + * + * @return Promise + */ +function consumeBody() { + var _this4 = this; + + if (this[INTERNALS].disturbed) { + return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); + } + + this[INTERNALS].disturbed = true; + + if (this[INTERNALS].error) { + return Body.Promise.reject(this[INTERNALS].error); + } + + let body = this.body; + + // body is null + if (body === null) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is blob + if (isBlob(body)) { + body = body.stream(); + } + + // body is buffer + if (Buffer.isBuffer(body)) { + return Body.Promise.resolve(body); + } + + // istanbul ignore if: should never happen + if (!(body instanceof Stream)) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is stream + // get ready to actually consume the body + let accum = []; + let accumBytes = 0; + let abort = false; + + return new Body.Promise(function (resolve, reject) { + let resTimeout; + + // allow timeout on slow response body + if (_this4.timeout) { + resTimeout = setTimeout(function () { + abort = true; + reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); + }, _this4.timeout); + } + + // handle stream errors + body.on('error', function (err) { + if (err.name === 'AbortError') { + // if the request was aborted, reject with this Error + abort = true; + reject(err); + } else { + // other errors, such as incorrect content-encoding + reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + + body.on('data', function (chunk) { + if (abort || chunk === null) { + return; + } + + if (_this4.size && accumBytes + chunk.length > _this4.size) { + abort = true; + reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); + return; + } + + accumBytes += chunk.length; + accum.push(chunk); + }); + + body.on('end', function () { + if (abort) { + return; + } + + clearTimeout(resTimeout); + + try { + resolve(Buffer.concat(accum, accumBytes)); + } catch (err) { + // handle streams that have accumulated too much data (issue #414) + reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + }); +} + +/** + * Detect buffer encoding and convert to target encoding + * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding + * + * @param Buffer buffer Incoming buffer + * @param String encoding Target encoding + * @return String + */ +function convertBody(buffer, headers) { + if (typeof convert !== 'function') { + throw new Error('The package `encoding` must be installed to use the textConverted() function'); + } + + const ct = headers.get('content-type'); + let charset = 'utf-8'; + let res, str; + + // header + if (ct) { + res = /charset=([^;]*)/i.exec(ct); + } + + // no charset in content type, peek at response body for at most 1024 bytes + str = buffer.slice(0, 1024).toString(); + + // html5 + if (!res && str) { + res = / 0 && arguments[0] !== undefined ? arguments[0] : undefined; + + this[MAP] = Object.create(null); + + if (init instanceof Headers) { + const rawHeaders = init.raw(); + const headerNames = Object.keys(rawHeaders); + + for (const headerName of headerNames) { + for (const value of rawHeaders[headerName]) { + this.append(headerName, value); + } + } + + return; + } + + // We don't worry about converting prop to ByteString here as append() + // will handle it. + if (init == null) ; else if (typeof init === 'object') { + const method = init[Symbol.iterator]; + if (method != null) { + if (typeof method !== 'function') { + throw new TypeError('Header pairs must be iterable'); + } + + // sequence> + // Note: per spec we have to first exhaust the lists then process them + const pairs = []; + for (const pair of init) { + if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { + throw new TypeError('Each header pair must be iterable'); + } + pairs.push(Array.from(pair)); + } + + for (const pair of pairs) { + if (pair.length !== 2) { + throw new TypeError('Each header pair must be a name/value tuple'); + } + this.append(pair[0], pair[1]); + } + } else { + // record + for (const key of Object.keys(init)) { + const value = init[key]; + this.append(key, value); + } + } + } else { + throw new TypeError('Provided initializer must be an object'); + } + } + + /** + * Return combined header value given name + * + * @param String name Header name + * @return Mixed + */ + get(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key === undefined) { + return null; + } + + return this[MAP][key].join(', '); + } + + /** + * Iterate over all headers + * + * @param Function callback Executed for each item with parameters (value, name, thisArg) + * @param Boolean thisArg `this` context for callback function + * @return Void + */ + forEach(callback) { + let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; + + let pairs = getHeaders(this); + let i = 0; + while (i < pairs.length) { + var _pairs$i = pairs[i]; + const name = _pairs$i[0], + value = _pairs$i[1]; + + callback.call(thisArg, value, name, this); + pairs = getHeaders(this); + i++; + } + } + + /** + * Overwrite header values given name + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + set(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + this[MAP][key !== undefined ? key : name] = [value]; + } + + /** + * Append a value onto existing header + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + append(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + if (key !== undefined) { + this[MAP][key].push(value); + } else { + this[MAP][name] = [value]; + } + } + + /** + * Check for header name existence + * + * @param String name Header name + * @return Boolean + */ + has(name) { + name = `${name}`; + validateName(name); + return find(this[MAP], name) !== undefined; + } + + /** + * Delete all header values given name + * + * @param String name Header name + * @return Void + */ + delete(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key !== undefined) { + delete this[MAP][key]; + } + } + + /** + * Return raw headers (non-spec api) + * + * @return Object + */ + raw() { + return this[MAP]; + } + + /** + * Get an iterator on keys. + * + * @return Iterator + */ + keys() { + return createHeadersIterator(this, 'key'); + } + + /** + * Get an iterator on values. + * + * @return Iterator + */ + values() { + return createHeadersIterator(this, 'value'); + } + + /** + * Get an iterator on entries. + * + * This is the default iterator of the Headers object. + * + * @return Iterator + */ + [Symbol.iterator]() { + return createHeadersIterator(this, 'key+value'); + } +} +Headers.prototype.entries = Headers.prototype[Symbol.iterator]; + +Object.defineProperty(Headers.prototype, Symbol.toStringTag, { + value: 'Headers', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(Headers.prototype, { + get: { enumerable: true }, + forEach: { enumerable: true }, + set: { enumerable: true }, + append: { enumerable: true }, + has: { enumerable: true }, + delete: { enumerable: true }, + keys: { enumerable: true }, + values: { enumerable: true }, + entries: { enumerable: true } +}); + +function getHeaders(headers) { + let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; + + const keys = Object.keys(headers[MAP]).sort(); + return keys.map(kind === 'key' ? function (k) { + return k.toLowerCase(); + } : kind === 'value' ? function (k) { + return headers[MAP][k].join(', '); + } : function (k) { + return [k.toLowerCase(), headers[MAP][k].join(', ')]; + }); +} + +const INTERNAL = Symbol('internal'); + +function createHeadersIterator(target, kind) { + const iterator = Object.create(HeadersIteratorPrototype); + iterator[INTERNAL] = { + target, + kind, + index: 0 + }; + return iterator; +} + +const HeadersIteratorPrototype = Object.setPrototypeOf({ + next() { + // istanbul ignore if + if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { + throw new TypeError('Value of `this` is not a HeadersIterator'); + } + + var _INTERNAL = this[INTERNAL]; + const target = _INTERNAL.target, + kind = _INTERNAL.kind, + index = _INTERNAL.index; + + const values = getHeaders(target, kind); + const len = values.length; + if (index >= len) { + return { + value: undefined, + done: true + }; + } + + this[INTERNAL].index = index + 1; + + return { + value: values[index], + done: false + }; + } +}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); + +Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { + value: 'HeadersIterator', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * Export the Headers object in a form that Node.js can consume. + * + * @param Headers headers + * @return Object + */ +function exportNodeCompatibleHeaders(headers) { + const obj = Object.assign({ __proto__: null }, headers[MAP]); + + // http.request() only supports string as Host header. This hack makes + // specifying custom Host header possible. + const hostHeaderKey = find(headers[MAP], 'Host'); + if (hostHeaderKey !== undefined) { + obj[hostHeaderKey] = obj[hostHeaderKey][0]; + } + + return obj; +} + +/** + * Create a Headers object from an object of headers, ignoring those that do + * not conform to HTTP grammar productions. + * + * @param Object obj Object of headers + * @return Headers + */ +function createHeadersLenient(obj) { + const headers = new Headers(); + for (const name of Object.keys(obj)) { + if (invalidTokenRegex.test(name)) { + continue; + } + if (Array.isArray(obj[name])) { + for (const val of obj[name]) { + if (invalidHeaderCharRegex.test(val)) { + continue; + } + if (headers[MAP][name] === undefined) { + headers[MAP][name] = [val]; + } else { + headers[MAP][name].push(val); + } + } + } else if (!invalidHeaderCharRegex.test(obj[name])) { + headers[MAP][name] = [obj[name]]; + } + } + return headers; +} + +const INTERNALS$1 = Symbol('Response internals'); + +// fix an issue where "STATUS_CODES" aren't a named export for node <10 +const STATUS_CODES = http.STATUS_CODES; + +/** + * Response class + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +class Response { + constructor() { + let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; + let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + Body.call(this, body, opts); + + const status = opts.status || 200; + const headers = new Headers(opts.headers); + + if (body != null && !headers.has('Content-Type')) { + const contentType = extractContentType(body); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + this[INTERNALS$1] = { + url: opts.url, + status, + statusText: opts.statusText || STATUS_CODES[status], + headers, + counter: opts.counter + }; + } + + get url() { + return this[INTERNALS$1].url; + } + + get status() { + return this[INTERNALS$1].status; + } + + /** + * Convenience property representing if the request ended normally + */ + get ok() { + return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; + } + + get redirected() { + return this[INTERNALS$1].counter > 0; + } + + get statusText() { + return this[INTERNALS$1].statusText; + } + + get headers() { + return this[INTERNALS$1].headers; + } + + /** + * Clone this response + * + * @return Response + */ + clone() { + return new Response(clone(this), { + url: this.url, + status: this.status, + statusText: this.statusText, + headers: this.headers, + ok: this.ok, + redirected: this.redirected + }); + } +} + +Body.mixIn(Response.prototype); + +Object.defineProperties(Response.prototype, { + url: { enumerable: true }, + status: { enumerable: true }, + ok: { enumerable: true }, + redirected: { enumerable: true }, + statusText: { enumerable: true }, + headers: { enumerable: true }, + clone: { enumerable: true } +}); + +Object.defineProperty(Response.prototype, Symbol.toStringTag, { + value: 'Response', + writable: false, + enumerable: false, + configurable: true +}); + +const INTERNALS$2 = Symbol('Request internals'); + +// fix an issue where "format", "parse" aren't a named export for node <10 +const parse_url = Url.parse; +const format_url = Url.format; + +const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; + +/** + * Check if a value is an instance of Request. + * + * @param Mixed input + * @return Boolean + */ +function isRequest(input) { + return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; +} + +function isAbortSignal(signal) { + const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); + return !!(proto && proto.constructor.name === 'AbortSignal'); +} + +/** + * Request class + * + * @param Mixed input Url or Request instance + * @param Object init Custom options + * @return Void + */ +class Request { + constructor(input) { + let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + let parsedURL; + + // normalize input + if (!isRequest(input)) { + if (input && input.href) { + // in order to support Node.js' Url objects; though WHATWG's URL objects + // will fall into this branch also (since their `toString()` will return + // `href` property anyway) + parsedURL = parse_url(input.href); + } else { + // coerce input to a string before attempting to parse + parsedURL = parse_url(`${input}`); + } + input = {}; + } else { + parsedURL = parse_url(input.url); + } + + let method = init.method || input.method || 'GET'; + method = method.toUpperCase(); + + if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { + throw new TypeError('Request with GET/HEAD method cannot have body'); + } + + let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; + + Body.call(this, inputBody, { + timeout: init.timeout || input.timeout || 0, + size: init.size || input.size || 0 + }); + + const headers = new Headers(init.headers || input.headers || {}); + + if (inputBody != null && !headers.has('Content-Type')) { + const contentType = extractContentType(inputBody); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + let signal = isRequest(input) ? input.signal : null; + if ('signal' in init) signal = init.signal; + + if (signal != null && !isAbortSignal(signal)) { + throw new TypeError('Expected signal to be an instanceof AbortSignal'); + } + + this[INTERNALS$2] = { + method, + redirect: init.redirect || input.redirect || 'follow', + headers, + parsedURL, + signal + }; + + // node-fetch-only options + this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; + this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; + this.counter = init.counter || input.counter || 0; + this.agent = init.agent || input.agent; + } + + get method() { + return this[INTERNALS$2].method; + } + + get url() { + return format_url(this[INTERNALS$2].parsedURL); + } + + get headers() { + return this[INTERNALS$2].headers; + } + + get redirect() { + return this[INTERNALS$2].redirect; + } + + get signal() { + return this[INTERNALS$2].signal; + } + + /** + * Clone this request + * + * @return Request + */ + clone() { + return new Request(this); + } +} + +Body.mixIn(Request.prototype); + +Object.defineProperty(Request.prototype, Symbol.toStringTag, { + value: 'Request', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(Request.prototype, { + method: { enumerable: true }, + url: { enumerable: true }, + headers: { enumerable: true }, + redirect: { enumerable: true }, + clone: { enumerable: true }, + signal: { enumerable: true } +}); + +/** + * Convert a Request to Node.js http request options. + * + * @param Request A Request instance + * @return Object The options object to be passed to http.request + */ +function getNodeRequestOptions(request) { + const parsedURL = request[INTERNALS$2].parsedURL; + const headers = new Headers(request[INTERNALS$2].headers); + + // fetch step 1.3 + if (!headers.has('Accept')) { + headers.set('Accept', '*/*'); + } + + // Basic fetch + if (!parsedURL.protocol || !parsedURL.hostname) { + throw new TypeError('Only absolute URLs are supported'); + } + + if (!/^https?:$/.test(parsedURL.protocol)) { + throw new TypeError('Only HTTP(S) protocols are supported'); + } + + if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) { + throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); + } + + // HTTP-network-or-cache fetch steps 2.4-2.7 + let contentLengthValue = null; + if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { + contentLengthValue = '0'; + } + if (request.body != null) { + const totalBytes = getTotalBytes(request); + if (typeof totalBytes === 'number') { + contentLengthValue = String(totalBytes); + } + } + if (contentLengthValue) { + headers.set('Content-Length', contentLengthValue); + } + + // HTTP-network-or-cache fetch step 2.11 + if (!headers.has('User-Agent')) { + headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); + } + + // HTTP-network-or-cache fetch step 2.15 + if (request.compress && !headers.has('Accept-Encoding')) { + headers.set('Accept-Encoding', 'gzip,deflate'); + } + + if (!headers.has('Connection') && !request.agent) { + headers.set('Connection', 'close'); + } + + // HTTP-network fetch step 4.2 + // chunked encoding is handled by Node.js + + return Object.assign({}, parsedURL, { + method: request.method, + headers: exportNodeCompatibleHeaders(headers), + agent: request.agent + }); +} + +/** + * abort-error.js + * + * AbortError interface for cancelled requests + */ + +/** + * Create AbortError instance + * + * @param String message Error message for human + * @return AbortError + */ +function AbortError(message) { + Error.call(this, message); + + this.type = 'aborted'; + this.message = message; + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} + +AbortError.prototype = Object.create(Error.prototype); +AbortError.prototype.constructor = AbortError; +AbortError.prototype.name = 'AbortError'; + +// fix an issue where "PassThrough", "resolve" aren't a named export for node <10 +const PassThrough$1 = Stream.PassThrough; +const resolve_url = Url.resolve; + +/** + * Fetch function + * + * @param Mixed url Absolute url or Request instance + * @param Object opts Fetch options + * @return Promise + */ +function fetch(url, opts) { + + // allow custom promise + if (!fetch.Promise) { + throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); + } + + Body.Promise = fetch.Promise; + + // wrap http.request into fetch + return new fetch.Promise(function (resolve, reject) { + // build request object + const request = new Request(url, opts); + const options = getNodeRequestOptions(request); + + const send = (options.protocol === 'https:' ? https : http).request; + const signal = request.signal; + + let response = null; + + const abort = function abort() { + let error = new AbortError('The user aborted a request.'); + reject(error); + if (request.body && request.body instanceof Stream.Readable) { + request.body.destroy(error); + } + if (!response || !response.body) return; + response.body.emit('error', error); + }; + + if (signal && signal.aborted) { + abort(); + return; + } + + const abortAndFinalize = function abortAndFinalize() { + abort(); + finalize(); + }; + + // send request + const req = send(options); + let reqTimeout; + + if (signal) { + signal.addEventListener('abort', abortAndFinalize); + } + + function finalize() { + req.abort(); + if (signal) signal.removeEventListener('abort', abortAndFinalize); + clearTimeout(reqTimeout); + } + + if (request.timeout) { + req.once('socket', function (socket) { + reqTimeout = setTimeout(function () { + reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); + finalize(); + }, request.timeout); + }); + } + + req.on('error', function (err) { + reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); + finalize(); + }); + + req.on('response', function (res) { + clearTimeout(reqTimeout); + + const headers = createHeadersLenient(res.headers); + + // HTTP fetch step 5 + if (fetch.isRedirect(res.statusCode)) { + // HTTP fetch step 5.2 + const location = headers.get('Location'); + + // HTTP fetch step 5.3 + const locationURL = location === null ? null : resolve_url(request.url, location); + + // HTTP fetch step 5.5 + switch (request.redirect) { + case 'error': + reject(new FetchError(`redirect mode is set to error: ${request.url}`, 'no-redirect')); + finalize(); + return; + case 'manual': + // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. + if (locationURL !== null) { + // handle corrupted header + try { + headers.set('Location', locationURL); + } catch (err) { + // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request + reject(err); + } + } + break; + case 'follow': + // HTTP-redirect fetch step 2 + if (locationURL === null) { + break; + } + + // HTTP-redirect fetch step 5 + if (request.counter >= request.follow) { + reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 6 (counter increment) + // Create a new Request object. + const requestOpts = { + headers: new Headers(request.headers), + follow: request.follow, + counter: request.counter + 1, + agent: request.agent, + compress: request.compress, + method: request.method, + body: request.body, + signal: request.signal, + timeout: request.timeout + }; + + // HTTP-redirect fetch step 9 + if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { + reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 11 + if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { + requestOpts.method = 'GET'; + requestOpts.body = undefined; + requestOpts.headers.delete('content-length'); + } + + // HTTP-redirect fetch step 15 + resolve(fetch(new Request(locationURL, requestOpts))); + finalize(); + return; + } + } + + // prepare response + res.once('end', function () { + if (signal) signal.removeEventListener('abort', abortAndFinalize); + }); + let body = res.pipe(new PassThrough$1()); + + const response_options = { + url: request.url, + status: res.statusCode, + statusText: res.statusMessage, + headers: headers, + size: request.size, + timeout: request.timeout, + counter: request.counter + }; + + // HTTP-network fetch step 12.1.1.3 + const codings = headers.get('Content-Encoding'); + + // HTTP-network fetch step 12.1.1.4: handle content codings + + // in following scenarios we ignore compression support + // 1. compression support is disabled + // 2. HEAD request + // 3. no Content-Encoding header + // 4. no content response (204) + // 5. content not modified response (304) + if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { + response = new Response(body, response_options); + resolve(response); + return; + } + + // For Node v6+ + // Be less strict when decoding compressed responses, since sometimes + // servers send slightly invalid responses that are still accepted + // by common browsers. + // Always using Z_SYNC_FLUSH is what cURL does. + const zlibOptions = { + flush: zlib.Z_SYNC_FLUSH, + finishFlush: zlib.Z_SYNC_FLUSH + }; + + // for gzip + if (codings == 'gzip' || codings == 'x-gzip') { + body = body.pipe(zlib.createGunzip(zlibOptions)); + response = new Response(body, response_options); + resolve(response); + return; + } + + // for deflate + if (codings == 'deflate' || codings == 'x-deflate') { + // handle the infamous raw deflate response from old servers + // a hack for old IIS and Apache servers + const raw = res.pipe(new PassThrough$1()); + raw.once('data', function (chunk) { + // see http://stackoverflow.com/questions/37519828 + if ((chunk[0] & 0x0F) === 0x08) { + body = body.pipe(zlib.createInflate()); + } else { + body = body.pipe(zlib.createInflateRaw()); + } + response = new Response(body, response_options); + resolve(response); + }); + return; + } + + // for br + if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { + body = body.pipe(zlib.createBrotliDecompress()); + response = new Response(body, response_options); + resolve(response); + return; + } + + // otherwise, use response as-is + response = new Response(body, response_options); + resolve(response); + }); + + writeToStream(req, request); + }); +} +/** + * Redirect code matching + * + * @param Number code Status code + * @return Boolean + */ +fetch.isRedirect = function (code) { + return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; +}; + +// expose Promise +fetch.Promise = global.Promise; + +export default fetch; +export { Headers, Request, Response, FetchError }; diff --git a/scripts/metrics/node_modules/node-fetch/lib/index.js b/scripts/metrics/node_modules/node-fetch/lib/index.js new file mode 100644 index 00000000000..86c7c031229 --- /dev/null +++ b/scripts/metrics/node_modules/node-fetch/lib/index.js @@ -0,0 +1,1640 @@ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + +var Stream = _interopDefault(require('stream')); +var http = _interopDefault(require('http')); +var Url = _interopDefault(require('url')); +var https = _interopDefault(require('https')); +var zlib = _interopDefault(require('zlib')); + +// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js + +// fix for "Readable" isn't a named export issue +const Readable = Stream.Readable; + +const BUFFER = Symbol('buffer'); +const TYPE = Symbol('type'); + +class Blob { + constructor() { + this[TYPE] = ''; + + const blobParts = arguments[0]; + const options = arguments[1]; + + const buffers = []; + let size = 0; + + if (blobParts) { + const a = blobParts; + const length = Number(a.length); + for (let i = 0; i < length; i++) { + const element = a[i]; + let buffer; + if (element instanceof Buffer) { + buffer = element; + } else if (ArrayBuffer.isView(element)) { + buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); + } else if (element instanceof ArrayBuffer) { + buffer = Buffer.from(element); + } else if (element instanceof Blob) { + buffer = element[BUFFER]; + } else { + buffer = Buffer.from(typeof element === 'string' ? element : String(element)); + } + size += buffer.length; + buffers.push(buffer); + } + } + + this[BUFFER] = Buffer.concat(buffers); + + let type = options && options.type !== undefined && String(options.type).toLowerCase(); + if (type && !/[^\u0020-\u007E]/.test(type)) { + this[TYPE] = type; + } + } + get size() { + return this[BUFFER].length; + } + get type() { + return this[TYPE]; + } + text() { + return Promise.resolve(this[BUFFER].toString()); + } + arrayBuffer() { + const buf = this[BUFFER]; + const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + return Promise.resolve(ab); + } + stream() { + const readable = new Readable(); + readable._read = function () {}; + readable.push(this[BUFFER]); + readable.push(null); + return readable; + } + toString() { + return '[object Blob]'; + } + slice() { + const size = this.size; + + const start = arguments[0]; + const end = arguments[1]; + let relativeStart, relativeEnd; + if (start === undefined) { + relativeStart = 0; + } else if (start < 0) { + relativeStart = Math.max(size + start, 0); + } else { + relativeStart = Math.min(start, size); + } + if (end === undefined) { + relativeEnd = size; + } else if (end < 0) { + relativeEnd = Math.max(size + end, 0); + } else { + relativeEnd = Math.min(end, size); + } + const span = Math.max(relativeEnd - relativeStart, 0); + + const buffer = this[BUFFER]; + const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); + const blob = new Blob([], { type: arguments[2] }); + blob[BUFFER] = slicedBuffer; + return blob; + } +} + +Object.defineProperties(Blob.prototype, { + size: { enumerable: true }, + type: { enumerable: true }, + slice: { enumerable: true } +}); + +Object.defineProperty(Blob.prototype, Symbol.toStringTag, { + value: 'Blob', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * fetch-error.js + * + * FetchError interface for operational errors + */ + +/** + * Create FetchError instance + * + * @param String message Error message for human + * @param String type Error type for machine + * @param String systemError For Node.js system error + * @return FetchError + */ +function FetchError(message, type, systemError) { + Error.call(this, message); + + this.message = message; + this.type = type; + + // when err.type is `system`, err.code contains system error code + if (systemError) { + this.code = this.errno = systemError.code; + } + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} + +FetchError.prototype = Object.create(Error.prototype); +FetchError.prototype.constructor = FetchError; +FetchError.prototype.name = 'FetchError'; + +let convert; +try { + convert = require('encoding').convert; +} catch (e) {} + +const INTERNALS = Symbol('Body internals'); + +// fix an issue where "PassThrough" isn't a named export for node <10 +const PassThrough = Stream.PassThrough; + +/** + * Body mixin + * + * Ref: https://fetch.spec.whatwg.org/#body + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +function Body(body) { + var _this = this; + + var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, + _ref$size = _ref.size; + + let size = _ref$size === undefined ? 0 : _ref$size; + var _ref$timeout = _ref.timeout; + let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; + + if (body == null) { + // body is undefined or null + body = null; + } else if (isURLSearchParams(body)) { + // body is a URLSearchParams + body = Buffer.from(body.toString()); + } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { + // body is ArrayBuffer + body = Buffer.from(body); + } else if (ArrayBuffer.isView(body)) { + // body is ArrayBufferView + body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); + } else if (body instanceof Stream) ; else { + // none of the above + // coerce to string then buffer + body = Buffer.from(String(body)); + } + this[INTERNALS] = { + body, + disturbed: false, + error: null + }; + this.size = size; + this.timeout = timeout; + + if (body instanceof Stream) { + body.on('error', function (err) { + const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); + _this[INTERNALS].error = error; + }); + } +} + +Body.prototype = { + get body() { + return this[INTERNALS].body; + }, + + get bodyUsed() { + return this[INTERNALS].disturbed; + }, + + /** + * Decode response as ArrayBuffer + * + * @return Promise + */ + arrayBuffer() { + return consumeBody.call(this).then(function (buf) { + return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + }); + }, + + /** + * Return raw response as Blob + * + * @return Promise + */ + blob() { + let ct = this.headers && this.headers.get('content-type') || ''; + return consumeBody.call(this).then(function (buf) { + return Object.assign( + // Prevent copying + new Blob([], { + type: ct.toLowerCase() + }), { + [BUFFER]: buf + }); + }); + }, + + /** + * Decode response as json + * + * @return Promise + */ + json() { + var _this2 = this; + + return consumeBody.call(this).then(function (buffer) { + try { + return JSON.parse(buffer.toString()); + } catch (err) { + return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); + } + }); + }, + + /** + * Decode response as text + * + * @return Promise + */ + text() { + return consumeBody.call(this).then(function (buffer) { + return buffer.toString(); + }); + }, + + /** + * Decode response as buffer (non-spec api) + * + * @return Promise + */ + buffer() { + return consumeBody.call(this); + }, + + /** + * Decode response as text, while automatically detecting the encoding and + * trying to decode to UTF-8 (non-spec api) + * + * @return Promise + */ + textConverted() { + var _this3 = this; + + return consumeBody.call(this).then(function (buffer) { + return convertBody(buffer, _this3.headers); + }); + } +}; + +// In browsers, all properties are enumerable. +Object.defineProperties(Body.prototype, { + body: { enumerable: true }, + bodyUsed: { enumerable: true }, + arrayBuffer: { enumerable: true }, + blob: { enumerable: true }, + json: { enumerable: true }, + text: { enumerable: true } +}); + +Body.mixIn = function (proto) { + for (const name of Object.getOwnPropertyNames(Body.prototype)) { + // istanbul ignore else: future proof + if (!(name in proto)) { + const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); + Object.defineProperty(proto, name, desc); + } + } +}; + +/** + * Consume and convert an entire Body to a Buffer. + * + * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body + * + * @return Promise + */ +function consumeBody() { + var _this4 = this; + + if (this[INTERNALS].disturbed) { + return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); + } + + this[INTERNALS].disturbed = true; + + if (this[INTERNALS].error) { + return Body.Promise.reject(this[INTERNALS].error); + } + + let body = this.body; + + // body is null + if (body === null) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is blob + if (isBlob(body)) { + body = body.stream(); + } + + // body is buffer + if (Buffer.isBuffer(body)) { + return Body.Promise.resolve(body); + } + + // istanbul ignore if: should never happen + if (!(body instanceof Stream)) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is stream + // get ready to actually consume the body + let accum = []; + let accumBytes = 0; + let abort = false; + + return new Body.Promise(function (resolve, reject) { + let resTimeout; + + // allow timeout on slow response body + if (_this4.timeout) { + resTimeout = setTimeout(function () { + abort = true; + reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); + }, _this4.timeout); + } + + // handle stream errors + body.on('error', function (err) { + if (err.name === 'AbortError') { + // if the request was aborted, reject with this Error + abort = true; + reject(err); + } else { + // other errors, such as incorrect content-encoding + reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + + body.on('data', function (chunk) { + if (abort || chunk === null) { + return; + } + + if (_this4.size && accumBytes + chunk.length > _this4.size) { + abort = true; + reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); + return; + } + + accumBytes += chunk.length; + accum.push(chunk); + }); + + body.on('end', function () { + if (abort) { + return; + } + + clearTimeout(resTimeout); + + try { + resolve(Buffer.concat(accum, accumBytes)); + } catch (err) { + // handle streams that have accumulated too much data (issue #414) + reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + }); +} + +/** + * Detect buffer encoding and convert to target encoding + * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding + * + * @param Buffer buffer Incoming buffer + * @param String encoding Target encoding + * @return String + */ +function convertBody(buffer, headers) { + if (typeof convert !== 'function') { + throw new Error('The package `encoding` must be installed to use the textConverted() function'); + } + + const ct = headers.get('content-type'); + let charset = 'utf-8'; + let res, str; + + // header + if (ct) { + res = /charset=([^;]*)/i.exec(ct); + } + + // no charset in content type, peek at response body for at most 1024 bytes + str = buffer.slice(0, 1024).toString(); + + // html5 + if (!res && str) { + res = / 0 && arguments[0] !== undefined ? arguments[0] : undefined; + + this[MAP] = Object.create(null); + + if (init instanceof Headers) { + const rawHeaders = init.raw(); + const headerNames = Object.keys(rawHeaders); + + for (const headerName of headerNames) { + for (const value of rawHeaders[headerName]) { + this.append(headerName, value); + } + } + + return; + } + + // We don't worry about converting prop to ByteString here as append() + // will handle it. + if (init == null) ; else if (typeof init === 'object') { + const method = init[Symbol.iterator]; + if (method != null) { + if (typeof method !== 'function') { + throw new TypeError('Header pairs must be iterable'); + } + + // sequence> + // Note: per spec we have to first exhaust the lists then process them + const pairs = []; + for (const pair of init) { + if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { + throw new TypeError('Each header pair must be iterable'); + } + pairs.push(Array.from(pair)); + } + + for (const pair of pairs) { + if (pair.length !== 2) { + throw new TypeError('Each header pair must be a name/value tuple'); + } + this.append(pair[0], pair[1]); + } + } else { + // record + for (const key of Object.keys(init)) { + const value = init[key]; + this.append(key, value); + } + } + } else { + throw new TypeError('Provided initializer must be an object'); + } + } + + /** + * Return combined header value given name + * + * @param String name Header name + * @return Mixed + */ + get(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key === undefined) { + return null; + } + + return this[MAP][key].join(', '); + } + + /** + * Iterate over all headers + * + * @param Function callback Executed for each item with parameters (value, name, thisArg) + * @param Boolean thisArg `this` context for callback function + * @return Void + */ + forEach(callback) { + let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; + + let pairs = getHeaders(this); + let i = 0; + while (i < pairs.length) { + var _pairs$i = pairs[i]; + const name = _pairs$i[0], + value = _pairs$i[1]; + + callback.call(thisArg, value, name, this); + pairs = getHeaders(this); + i++; + } + } + + /** + * Overwrite header values given name + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + set(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + this[MAP][key !== undefined ? key : name] = [value]; + } + + /** + * Append a value onto existing header + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + append(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + if (key !== undefined) { + this[MAP][key].push(value); + } else { + this[MAP][name] = [value]; + } + } + + /** + * Check for header name existence + * + * @param String name Header name + * @return Boolean + */ + has(name) { + name = `${name}`; + validateName(name); + return find(this[MAP], name) !== undefined; + } + + /** + * Delete all header values given name + * + * @param String name Header name + * @return Void + */ + delete(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key !== undefined) { + delete this[MAP][key]; + } + } + + /** + * Return raw headers (non-spec api) + * + * @return Object + */ + raw() { + return this[MAP]; + } + + /** + * Get an iterator on keys. + * + * @return Iterator + */ + keys() { + return createHeadersIterator(this, 'key'); + } + + /** + * Get an iterator on values. + * + * @return Iterator + */ + values() { + return createHeadersIterator(this, 'value'); + } + + /** + * Get an iterator on entries. + * + * This is the default iterator of the Headers object. + * + * @return Iterator + */ + [Symbol.iterator]() { + return createHeadersIterator(this, 'key+value'); + } +} +Headers.prototype.entries = Headers.prototype[Symbol.iterator]; + +Object.defineProperty(Headers.prototype, Symbol.toStringTag, { + value: 'Headers', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(Headers.prototype, { + get: { enumerable: true }, + forEach: { enumerable: true }, + set: { enumerable: true }, + append: { enumerable: true }, + has: { enumerable: true }, + delete: { enumerable: true }, + keys: { enumerable: true }, + values: { enumerable: true }, + entries: { enumerable: true } +}); + +function getHeaders(headers) { + let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; + + const keys = Object.keys(headers[MAP]).sort(); + return keys.map(kind === 'key' ? function (k) { + return k.toLowerCase(); + } : kind === 'value' ? function (k) { + return headers[MAP][k].join(', '); + } : function (k) { + return [k.toLowerCase(), headers[MAP][k].join(', ')]; + }); +} + +const INTERNAL = Symbol('internal'); + +function createHeadersIterator(target, kind) { + const iterator = Object.create(HeadersIteratorPrototype); + iterator[INTERNAL] = { + target, + kind, + index: 0 + }; + return iterator; +} + +const HeadersIteratorPrototype = Object.setPrototypeOf({ + next() { + // istanbul ignore if + if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { + throw new TypeError('Value of `this` is not a HeadersIterator'); + } + + var _INTERNAL = this[INTERNAL]; + const target = _INTERNAL.target, + kind = _INTERNAL.kind, + index = _INTERNAL.index; + + const values = getHeaders(target, kind); + const len = values.length; + if (index >= len) { + return { + value: undefined, + done: true + }; + } + + this[INTERNAL].index = index + 1; + + return { + value: values[index], + done: false + }; + } +}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); + +Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { + value: 'HeadersIterator', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * Export the Headers object in a form that Node.js can consume. + * + * @param Headers headers + * @return Object + */ +function exportNodeCompatibleHeaders(headers) { + const obj = Object.assign({ __proto__: null }, headers[MAP]); + + // http.request() only supports string as Host header. This hack makes + // specifying custom Host header possible. + const hostHeaderKey = find(headers[MAP], 'Host'); + if (hostHeaderKey !== undefined) { + obj[hostHeaderKey] = obj[hostHeaderKey][0]; + } + + return obj; +} + +/** + * Create a Headers object from an object of headers, ignoring those that do + * not conform to HTTP grammar productions. + * + * @param Object obj Object of headers + * @return Headers + */ +function createHeadersLenient(obj) { + const headers = new Headers(); + for (const name of Object.keys(obj)) { + if (invalidTokenRegex.test(name)) { + continue; + } + if (Array.isArray(obj[name])) { + for (const val of obj[name]) { + if (invalidHeaderCharRegex.test(val)) { + continue; + } + if (headers[MAP][name] === undefined) { + headers[MAP][name] = [val]; + } else { + headers[MAP][name].push(val); + } + } + } else if (!invalidHeaderCharRegex.test(obj[name])) { + headers[MAP][name] = [obj[name]]; + } + } + return headers; +} + +const INTERNALS$1 = Symbol('Response internals'); + +// fix an issue where "STATUS_CODES" aren't a named export for node <10 +const STATUS_CODES = http.STATUS_CODES; + +/** + * Response class + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +class Response { + constructor() { + let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; + let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + Body.call(this, body, opts); + + const status = opts.status || 200; + const headers = new Headers(opts.headers); + + if (body != null && !headers.has('Content-Type')) { + const contentType = extractContentType(body); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + this[INTERNALS$1] = { + url: opts.url, + status, + statusText: opts.statusText || STATUS_CODES[status], + headers, + counter: opts.counter + }; + } + + get url() { + return this[INTERNALS$1].url; + } + + get status() { + return this[INTERNALS$1].status; + } + + /** + * Convenience property representing if the request ended normally + */ + get ok() { + return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; + } + + get redirected() { + return this[INTERNALS$1].counter > 0; + } + + get statusText() { + return this[INTERNALS$1].statusText; + } + + get headers() { + return this[INTERNALS$1].headers; + } + + /** + * Clone this response + * + * @return Response + */ + clone() { + return new Response(clone(this), { + url: this.url, + status: this.status, + statusText: this.statusText, + headers: this.headers, + ok: this.ok, + redirected: this.redirected + }); + } +} + +Body.mixIn(Response.prototype); + +Object.defineProperties(Response.prototype, { + url: { enumerable: true }, + status: { enumerable: true }, + ok: { enumerable: true }, + redirected: { enumerable: true }, + statusText: { enumerable: true }, + headers: { enumerable: true }, + clone: { enumerable: true } +}); + +Object.defineProperty(Response.prototype, Symbol.toStringTag, { + value: 'Response', + writable: false, + enumerable: false, + configurable: true +}); + +const INTERNALS$2 = Symbol('Request internals'); + +// fix an issue where "format", "parse" aren't a named export for node <10 +const parse_url = Url.parse; +const format_url = Url.format; + +const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; + +/** + * Check if a value is an instance of Request. + * + * @param Mixed input + * @return Boolean + */ +function isRequest(input) { + return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; +} + +function isAbortSignal(signal) { + const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); + return !!(proto && proto.constructor.name === 'AbortSignal'); +} + +/** + * Request class + * + * @param Mixed input Url or Request instance + * @param Object init Custom options + * @return Void + */ +class Request { + constructor(input) { + let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + let parsedURL; + + // normalize input + if (!isRequest(input)) { + if (input && input.href) { + // in order to support Node.js' Url objects; though WHATWG's URL objects + // will fall into this branch also (since their `toString()` will return + // `href` property anyway) + parsedURL = parse_url(input.href); + } else { + // coerce input to a string before attempting to parse + parsedURL = parse_url(`${input}`); + } + input = {}; + } else { + parsedURL = parse_url(input.url); + } + + let method = init.method || input.method || 'GET'; + method = method.toUpperCase(); + + if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { + throw new TypeError('Request with GET/HEAD method cannot have body'); + } + + let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; + + Body.call(this, inputBody, { + timeout: init.timeout || input.timeout || 0, + size: init.size || input.size || 0 + }); + + const headers = new Headers(init.headers || input.headers || {}); + + if (inputBody != null && !headers.has('Content-Type')) { + const contentType = extractContentType(inputBody); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + let signal = isRequest(input) ? input.signal : null; + if ('signal' in init) signal = init.signal; + + if (signal != null && !isAbortSignal(signal)) { + throw new TypeError('Expected signal to be an instanceof AbortSignal'); + } + + this[INTERNALS$2] = { + method, + redirect: init.redirect || input.redirect || 'follow', + headers, + parsedURL, + signal + }; + + // node-fetch-only options + this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; + this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; + this.counter = init.counter || input.counter || 0; + this.agent = init.agent || input.agent; + } + + get method() { + return this[INTERNALS$2].method; + } + + get url() { + return format_url(this[INTERNALS$2].parsedURL); + } + + get headers() { + return this[INTERNALS$2].headers; + } + + get redirect() { + return this[INTERNALS$2].redirect; + } + + get signal() { + return this[INTERNALS$2].signal; + } + + /** + * Clone this request + * + * @return Request + */ + clone() { + return new Request(this); + } +} + +Body.mixIn(Request.prototype); + +Object.defineProperty(Request.prototype, Symbol.toStringTag, { + value: 'Request', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(Request.prototype, { + method: { enumerable: true }, + url: { enumerable: true }, + headers: { enumerable: true }, + redirect: { enumerable: true }, + clone: { enumerable: true }, + signal: { enumerable: true } +}); + +/** + * Convert a Request to Node.js http request options. + * + * @param Request A Request instance + * @return Object The options object to be passed to http.request + */ +function getNodeRequestOptions(request) { + const parsedURL = request[INTERNALS$2].parsedURL; + const headers = new Headers(request[INTERNALS$2].headers); + + // fetch step 1.3 + if (!headers.has('Accept')) { + headers.set('Accept', '*/*'); + } + + // Basic fetch + if (!parsedURL.protocol || !parsedURL.hostname) { + throw new TypeError('Only absolute URLs are supported'); + } + + if (!/^https?:$/.test(parsedURL.protocol)) { + throw new TypeError('Only HTTP(S) protocols are supported'); + } + + if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) { + throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); + } + + // HTTP-network-or-cache fetch steps 2.4-2.7 + let contentLengthValue = null; + if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { + contentLengthValue = '0'; + } + if (request.body != null) { + const totalBytes = getTotalBytes(request); + if (typeof totalBytes === 'number') { + contentLengthValue = String(totalBytes); + } + } + if (contentLengthValue) { + headers.set('Content-Length', contentLengthValue); + } + + // HTTP-network-or-cache fetch step 2.11 + if (!headers.has('User-Agent')) { + headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); + } + + // HTTP-network-or-cache fetch step 2.15 + if (request.compress && !headers.has('Accept-Encoding')) { + headers.set('Accept-Encoding', 'gzip,deflate'); + } + + if (!headers.has('Connection') && !request.agent) { + headers.set('Connection', 'close'); + } + + // HTTP-network fetch step 4.2 + // chunked encoding is handled by Node.js + + return Object.assign({}, parsedURL, { + method: request.method, + headers: exportNodeCompatibleHeaders(headers), + agent: request.agent + }); +} + +/** + * abort-error.js + * + * AbortError interface for cancelled requests + */ + +/** + * Create AbortError instance + * + * @param String message Error message for human + * @return AbortError + */ +function AbortError(message) { + Error.call(this, message); + + this.type = 'aborted'; + this.message = message; + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} + +AbortError.prototype = Object.create(Error.prototype); +AbortError.prototype.constructor = AbortError; +AbortError.prototype.name = 'AbortError'; + +// fix an issue where "PassThrough", "resolve" aren't a named export for node <10 +const PassThrough$1 = Stream.PassThrough; +const resolve_url = Url.resolve; + +/** + * Fetch function + * + * @param Mixed url Absolute url or Request instance + * @param Object opts Fetch options + * @return Promise + */ +function fetch(url, opts) { + + // allow custom promise + if (!fetch.Promise) { + throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); + } + + Body.Promise = fetch.Promise; + + // wrap http.request into fetch + return new fetch.Promise(function (resolve, reject) { + // build request object + const request = new Request(url, opts); + const options = getNodeRequestOptions(request); + + const send = (options.protocol === 'https:' ? https : http).request; + const signal = request.signal; + + let response = null; + + const abort = function abort() { + let error = new AbortError('The user aborted a request.'); + reject(error); + if (request.body && request.body instanceof Stream.Readable) { + request.body.destroy(error); + } + if (!response || !response.body) return; + response.body.emit('error', error); + }; + + if (signal && signal.aborted) { + abort(); + return; + } + + const abortAndFinalize = function abortAndFinalize() { + abort(); + finalize(); + }; + + // send request + const req = send(options); + let reqTimeout; + + if (signal) { + signal.addEventListener('abort', abortAndFinalize); + } + + function finalize() { + req.abort(); + if (signal) signal.removeEventListener('abort', abortAndFinalize); + clearTimeout(reqTimeout); + } + + if (request.timeout) { + req.once('socket', function (socket) { + reqTimeout = setTimeout(function () { + reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); + finalize(); + }, request.timeout); + }); + } + + req.on('error', function (err) { + reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); + finalize(); + }); + + req.on('response', function (res) { + clearTimeout(reqTimeout); + + const headers = createHeadersLenient(res.headers); + + // HTTP fetch step 5 + if (fetch.isRedirect(res.statusCode)) { + // HTTP fetch step 5.2 + const location = headers.get('Location'); + + // HTTP fetch step 5.3 + const locationURL = location === null ? null : resolve_url(request.url, location); + + // HTTP fetch step 5.5 + switch (request.redirect) { + case 'error': + reject(new FetchError(`redirect mode is set to error: ${request.url}`, 'no-redirect')); + finalize(); + return; + case 'manual': + // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. + if (locationURL !== null) { + // handle corrupted header + try { + headers.set('Location', locationURL); + } catch (err) { + // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request + reject(err); + } + } + break; + case 'follow': + // HTTP-redirect fetch step 2 + if (locationURL === null) { + break; + } + + // HTTP-redirect fetch step 5 + if (request.counter >= request.follow) { + reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 6 (counter increment) + // Create a new Request object. + const requestOpts = { + headers: new Headers(request.headers), + follow: request.follow, + counter: request.counter + 1, + agent: request.agent, + compress: request.compress, + method: request.method, + body: request.body, + signal: request.signal, + timeout: request.timeout + }; + + // HTTP-redirect fetch step 9 + if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { + reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 11 + if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { + requestOpts.method = 'GET'; + requestOpts.body = undefined; + requestOpts.headers.delete('content-length'); + } + + // HTTP-redirect fetch step 15 + resolve(fetch(new Request(locationURL, requestOpts))); + finalize(); + return; + } + } + + // prepare response + res.once('end', function () { + if (signal) signal.removeEventListener('abort', abortAndFinalize); + }); + let body = res.pipe(new PassThrough$1()); + + const response_options = { + url: request.url, + status: res.statusCode, + statusText: res.statusMessage, + headers: headers, + size: request.size, + timeout: request.timeout, + counter: request.counter + }; + + // HTTP-network fetch step 12.1.1.3 + const codings = headers.get('Content-Encoding'); + + // HTTP-network fetch step 12.1.1.4: handle content codings + + // in following scenarios we ignore compression support + // 1. compression support is disabled + // 2. HEAD request + // 3. no Content-Encoding header + // 4. no content response (204) + // 5. content not modified response (304) + if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { + response = new Response(body, response_options); + resolve(response); + return; + } + + // For Node v6+ + // Be less strict when decoding compressed responses, since sometimes + // servers send slightly invalid responses that are still accepted + // by common browsers. + // Always using Z_SYNC_FLUSH is what cURL does. + const zlibOptions = { + flush: zlib.Z_SYNC_FLUSH, + finishFlush: zlib.Z_SYNC_FLUSH + }; + + // for gzip + if (codings == 'gzip' || codings == 'x-gzip') { + body = body.pipe(zlib.createGunzip(zlibOptions)); + response = new Response(body, response_options); + resolve(response); + return; + } + + // for deflate + if (codings == 'deflate' || codings == 'x-deflate') { + // handle the infamous raw deflate response from old servers + // a hack for old IIS and Apache servers + const raw = res.pipe(new PassThrough$1()); + raw.once('data', function (chunk) { + // see http://stackoverflow.com/questions/37519828 + if ((chunk[0] & 0x0F) === 0x08) { + body = body.pipe(zlib.createInflate()); + } else { + body = body.pipe(zlib.createInflateRaw()); + } + response = new Response(body, response_options); + resolve(response); + }); + return; + } + + // for br + if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { + body = body.pipe(zlib.createBrotliDecompress()); + response = new Response(body, response_options); + resolve(response); + return; + } + + // otherwise, use response as-is + response = new Response(body, response_options); + resolve(response); + }); + + writeToStream(req, request); + }); +} +/** + * Redirect code matching + * + * @param Number code Status code + * @return Boolean + */ +fetch.isRedirect = function (code) { + return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; +}; + +// expose Promise +fetch.Promise = global.Promise; + +module.exports = exports = fetch; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.default = exports; +exports.Headers = Headers; +exports.Request = Request; +exports.Response = Response; +exports.FetchError = FetchError; diff --git a/scripts/metrics/node_modules/node-fetch/lib/index.mjs b/scripts/metrics/node_modules/node-fetch/lib/index.mjs new file mode 100644 index 00000000000..dca525658b4 --- /dev/null +++ b/scripts/metrics/node_modules/node-fetch/lib/index.mjs @@ -0,0 +1,1629 @@ +import Stream from 'stream'; +import http from 'http'; +import Url from 'url'; +import https from 'https'; +import zlib from 'zlib'; + +// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js + +// fix for "Readable" isn't a named export issue +const Readable = Stream.Readable; + +const BUFFER = Symbol('buffer'); +const TYPE = Symbol('type'); + +class Blob { + constructor() { + this[TYPE] = ''; + + const blobParts = arguments[0]; + const options = arguments[1]; + + const buffers = []; + let size = 0; + + if (blobParts) { + const a = blobParts; + const length = Number(a.length); + for (let i = 0; i < length; i++) { + const element = a[i]; + let buffer; + if (element instanceof Buffer) { + buffer = element; + } else if (ArrayBuffer.isView(element)) { + buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); + } else if (element instanceof ArrayBuffer) { + buffer = Buffer.from(element); + } else if (element instanceof Blob) { + buffer = element[BUFFER]; + } else { + buffer = Buffer.from(typeof element === 'string' ? element : String(element)); + } + size += buffer.length; + buffers.push(buffer); + } + } + + this[BUFFER] = Buffer.concat(buffers); + + let type = options && options.type !== undefined && String(options.type).toLowerCase(); + if (type && !/[^\u0020-\u007E]/.test(type)) { + this[TYPE] = type; + } + } + get size() { + return this[BUFFER].length; + } + get type() { + return this[TYPE]; + } + text() { + return Promise.resolve(this[BUFFER].toString()); + } + arrayBuffer() { + const buf = this[BUFFER]; + const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + return Promise.resolve(ab); + } + stream() { + const readable = new Readable(); + readable._read = function () {}; + readable.push(this[BUFFER]); + readable.push(null); + return readable; + } + toString() { + return '[object Blob]'; + } + slice() { + const size = this.size; + + const start = arguments[0]; + const end = arguments[1]; + let relativeStart, relativeEnd; + if (start === undefined) { + relativeStart = 0; + } else if (start < 0) { + relativeStart = Math.max(size + start, 0); + } else { + relativeStart = Math.min(start, size); + } + if (end === undefined) { + relativeEnd = size; + } else if (end < 0) { + relativeEnd = Math.max(size + end, 0); + } else { + relativeEnd = Math.min(end, size); + } + const span = Math.max(relativeEnd - relativeStart, 0); + + const buffer = this[BUFFER]; + const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); + const blob = new Blob([], { type: arguments[2] }); + blob[BUFFER] = slicedBuffer; + return blob; + } +} + +Object.defineProperties(Blob.prototype, { + size: { enumerable: true }, + type: { enumerable: true }, + slice: { enumerable: true } +}); + +Object.defineProperty(Blob.prototype, Symbol.toStringTag, { + value: 'Blob', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * fetch-error.js + * + * FetchError interface for operational errors + */ + +/** + * Create FetchError instance + * + * @param String message Error message for human + * @param String type Error type for machine + * @param String systemError For Node.js system error + * @return FetchError + */ +function FetchError(message, type, systemError) { + Error.call(this, message); + + this.message = message; + this.type = type; + + // when err.type is `system`, err.code contains system error code + if (systemError) { + this.code = this.errno = systemError.code; + } + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} + +FetchError.prototype = Object.create(Error.prototype); +FetchError.prototype.constructor = FetchError; +FetchError.prototype.name = 'FetchError'; + +let convert; +try { + convert = require('encoding').convert; +} catch (e) {} + +const INTERNALS = Symbol('Body internals'); + +// fix an issue where "PassThrough" isn't a named export for node <10 +const PassThrough = Stream.PassThrough; + +/** + * Body mixin + * + * Ref: https://fetch.spec.whatwg.org/#body + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +function Body(body) { + var _this = this; + + var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, + _ref$size = _ref.size; + + let size = _ref$size === undefined ? 0 : _ref$size; + var _ref$timeout = _ref.timeout; + let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; + + if (body == null) { + // body is undefined or null + body = null; + } else if (isURLSearchParams(body)) { + // body is a URLSearchParams + body = Buffer.from(body.toString()); + } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { + // body is ArrayBuffer + body = Buffer.from(body); + } else if (ArrayBuffer.isView(body)) { + // body is ArrayBufferView + body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); + } else if (body instanceof Stream) ; else { + // none of the above + // coerce to string then buffer + body = Buffer.from(String(body)); + } + this[INTERNALS] = { + body, + disturbed: false, + error: null + }; + this.size = size; + this.timeout = timeout; + + if (body instanceof Stream) { + body.on('error', function (err) { + const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); + _this[INTERNALS].error = error; + }); + } +} + +Body.prototype = { + get body() { + return this[INTERNALS].body; + }, + + get bodyUsed() { + return this[INTERNALS].disturbed; + }, + + /** + * Decode response as ArrayBuffer + * + * @return Promise + */ + arrayBuffer() { + return consumeBody.call(this).then(function (buf) { + return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + }); + }, + + /** + * Return raw response as Blob + * + * @return Promise + */ + blob() { + let ct = this.headers && this.headers.get('content-type') || ''; + return consumeBody.call(this).then(function (buf) { + return Object.assign( + // Prevent copying + new Blob([], { + type: ct.toLowerCase() + }), { + [BUFFER]: buf + }); + }); + }, + + /** + * Decode response as json + * + * @return Promise + */ + json() { + var _this2 = this; + + return consumeBody.call(this).then(function (buffer) { + try { + return JSON.parse(buffer.toString()); + } catch (err) { + return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); + } + }); + }, + + /** + * Decode response as text + * + * @return Promise + */ + text() { + return consumeBody.call(this).then(function (buffer) { + return buffer.toString(); + }); + }, + + /** + * Decode response as buffer (non-spec api) + * + * @return Promise + */ + buffer() { + return consumeBody.call(this); + }, + + /** + * Decode response as text, while automatically detecting the encoding and + * trying to decode to UTF-8 (non-spec api) + * + * @return Promise + */ + textConverted() { + var _this3 = this; + + return consumeBody.call(this).then(function (buffer) { + return convertBody(buffer, _this3.headers); + }); + } +}; + +// In browsers, all properties are enumerable. +Object.defineProperties(Body.prototype, { + body: { enumerable: true }, + bodyUsed: { enumerable: true }, + arrayBuffer: { enumerable: true }, + blob: { enumerable: true }, + json: { enumerable: true }, + text: { enumerable: true } +}); + +Body.mixIn = function (proto) { + for (const name of Object.getOwnPropertyNames(Body.prototype)) { + // istanbul ignore else: future proof + if (!(name in proto)) { + const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); + Object.defineProperty(proto, name, desc); + } + } +}; + +/** + * Consume and convert an entire Body to a Buffer. + * + * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body + * + * @return Promise + */ +function consumeBody() { + var _this4 = this; + + if (this[INTERNALS].disturbed) { + return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); + } + + this[INTERNALS].disturbed = true; + + if (this[INTERNALS].error) { + return Body.Promise.reject(this[INTERNALS].error); + } + + let body = this.body; + + // body is null + if (body === null) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is blob + if (isBlob(body)) { + body = body.stream(); + } + + // body is buffer + if (Buffer.isBuffer(body)) { + return Body.Promise.resolve(body); + } + + // istanbul ignore if: should never happen + if (!(body instanceof Stream)) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is stream + // get ready to actually consume the body + let accum = []; + let accumBytes = 0; + let abort = false; + + return new Body.Promise(function (resolve, reject) { + let resTimeout; + + // allow timeout on slow response body + if (_this4.timeout) { + resTimeout = setTimeout(function () { + abort = true; + reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); + }, _this4.timeout); + } + + // handle stream errors + body.on('error', function (err) { + if (err.name === 'AbortError') { + // if the request was aborted, reject with this Error + abort = true; + reject(err); + } else { + // other errors, such as incorrect content-encoding + reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + + body.on('data', function (chunk) { + if (abort || chunk === null) { + return; + } + + if (_this4.size && accumBytes + chunk.length > _this4.size) { + abort = true; + reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); + return; + } + + accumBytes += chunk.length; + accum.push(chunk); + }); + + body.on('end', function () { + if (abort) { + return; + } + + clearTimeout(resTimeout); + + try { + resolve(Buffer.concat(accum, accumBytes)); + } catch (err) { + // handle streams that have accumulated too much data (issue #414) + reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + }); +} + +/** + * Detect buffer encoding and convert to target encoding + * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding + * + * @param Buffer buffer Incoming buffer + * @param String encoding Target encoding + * @return String + */ +function convertBody(buffer, headers) { + if (typeof convert !== 'function') { + throw new Error('The package `encoding` must be installed to use the textConverted() function'); + } + + const ct = headers.get('content-type'); + let charset = 'utf-8'; + let res, str; + + // header + if (ct) { + res = /charset=([^;]*)/i.exec(ct); + } + + // no charset in content type, peek at response body for at most 1024 bytes + str = buffer.slice(0, 1024).toString(); + + // html5 + if (!res && str) { + res = / 0 && arguments[0] !== undefined ? arguments[0] : undefined; + + this[MAP] = Object.create(null); + + if (init instanceof Headers) { + const rawHeaders = init.raw(); + const headerNames = Object.keys(rawHeaders); + + for (const headerName of headerNames) { + for (const value of rawHeaders[headerName]) { + this.append(headerName, value); + } + } + + return; + } + + // We don't worry about converting prop to ByteString here as append() + // will handle it. + if (init == null) ; else if (typeof init === 'object') { + const method = init[Symbol.iterator]; + if (method != null) { + if (typeof method !== 'function') { + throw new TypeError('Header pairs must be iterable'); + } + + // sequence> + // Note: per spec we have to first exhaust the lists then process them + const pairs = []; + for (const pair of init) { + if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { + throw new TypeError('Each header pair must be iterable'); + } + pairs.push(Array.from(pair)); + } + + for (const pair of pairs) { + if (pair.length !== 2) { + throw new TypeError('Each header pair must be a name/value tuple'); + } + this.append(pair[0], pair[1]); + } + } else { + // record + for (const key of Object.keys(init)) { + const value = init[key]; + this.append(key, value); + } + } + } else { + throw new TypeError('Provided initializer must be an object'); + } + } + + /** + * Return combined header value given name + * + * @param String name Header name + * @return Mixed + */ + get(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key === undefined) { + return null; + } + + return this[MAP][key].join(', '); + } + + /** + * Iterate over all headers + * + * @param Function callback Executed for each item with parameters (value, name, thisArg) + * @param Boolean thisArg `this` context for callback function + * @return Void + */ + forEach(callback) { + let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; + + let pairs = getHeaders(this); + let i = 0; + while (i < pairs.length) { + var _pairs$i = pairs[i]; + const name = _pairs$i[0], + value = _pairs$i[1]; + + callback.call(thisArg, value, name, this); + pairs = getHeaders(this); + i++; + } + } + + /** + * Overwrite header values given name + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + set(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + this[MAP][key !== undefined ? key : name] = [value]; + } + + /** + * Append a value onto existing header + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + append(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + if (key !== undefined) { + this[MAP][key].push(value); + } else { + this[MAP][name] = [value]; + } + } + + /** + * Check for header name existence + * + * @param String name Header name + * @return Boolean + */ + has(name) { + name = `${name}`; + validateName(name); + return find(this[MAP], name) !== undefined; + } + + /** + * Delete all header values given name + * + * @param String name Header name + * @return Void + */ + delete(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key !== undefined) { + delete this[MAP][key]; + } + } + + /** + * Return raw headers (non-spec api) + * + * @return Object + */ + raw() { + return this[MAP]; + } + + /** + * Get an iterator on keys. + * + * @return Iterator + */ + keys() { + return createHeadersIterator(this, 'key'); + } + + /** + * Get an iterator on values. + * + * @return Iterator + */ + values() { + return createHeadersIterator(this, 'value'); + } + + /** + * Get an iterator on entries. + * + * This is the default iterator of the Headers object. + * + * @return Iterator + */ + [Symbol.iterator]() { + return createHeadersIterator(this, 'key+value'); + } +} +Headers.prototype.entries = Headers.prototype[Symbol.iterator]; + +Object.defineProperty(Headers.prototype, Symbol.toStringTag, { + value: 'Headers', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(Headers.prototype, { + get: { enumerable: true }, + forEach: { enumerable: true }, + set: { enumerable: true }, + append: { enumerable: true }, + has: { enumerable: true }, + delete: { enumerable: true }, + keys: { enumerable: true }, + values: { enumerable: true }, + entries: { enumerable: true } +}); + +function getHeaders(headers) { + let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; + + const keys = Object.keys(headers[MAP]).sort(); + return keys.map(kind === 'key' ? function (k) { + return k.toLowerCase(); + } : kind === 'value' ? function (k) { + return headers[MAP][k].join(', '); + } : function (k) { + return [k.toLowerCase(), headers[MAP][k].join(', ')]; + }); +} + +const INTERNAL = Symbol('internal'); + +function createHeadersIterator(target, kind) { + const iterator = Object.create(HeadersIteratorPrototype); + iterator[INTERNAL] = { + target, + kind, + index: 0 + }; + return iterator; +} + +const HeadersIteratorPrototype = Object.setPrototypeOf({ + next() { + // istanbul ignore if + if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { + throw new TypeError('Value of `this` is not a HeadersIterator'); + } + + var _INTERNAL = this[INTERNAL]; + const target = _INTERNAL.target, + kind = _INTERNAL.kind, + index = _INTERNAL.index; + + const values = getHeaders(target, kind); + const len = values.length; + if (index >= len) { + return { + value: undefined, + done: true + }; + } + + this[INTERNAL].index = index + 1; + + return { + value: values[index], + done: false + }; + } +}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); + +Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { + value: 'HeadersIterator', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * Export the Headers object in a form that Node.js can consume. + * + * @param Headers headers + * @return Object + */ +function exportNodeCompatibleHeaders(headers) { + const obj = Object.assign({ __proto__: null }, headers[MAP]); + + // http.request() only supports string as Host header. This hack makes + // specifying custom Host header possible. + const hostHeaderKey = find(headers[MAP], 'Host'); + if (hostHeaderKey !== undefined) { + obj[hostHeaderKey] = obj[hostHeaderKey][0]; + } + + return obj; +} + +/** + * Create a Headers object from an object of headers, ignoring those that do + * not conform to HTTP grammar productions. + * + * @param Object obj Object of headers + * @return Headers + */ +function createHeadersLenient(obj) { + const headers = new Headers(); + for (const name of Object.keys(obj)) { + if (invalidTokenRegex.test(name)) { + continue; + } + if (Array.isArray(obj[name])) { + for (const val of obj[name]) { + if (invalidHeaderCharRegex.test(val)) { + continue; + } + if (headers[MAP][name] === undefined) { + headers[MAP][name] = [val]; + } else { + headers[MAP][name].push(val); + } + } + } else if (!invalidHeaderCharRegex.test(obj[name])) { + headers[MAP][name] = [obj[name]]; + } + } + return headers; +} + +const INTERNALS$1 = Symbol('Response internals'); + +// fix an issue where "STATUS_CODES" aren't a named export for node <10 +const STATUS_CODES = http.STATUS_CODES; + +/** + * Response class + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +class Response { + constructor() { + let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; + let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + Body.call(this, body, opts); + + const status = opts.status || 200; + const headers = new Headers(opts.headers); + + if (body != null && !headers.has('Content-Type')) { + const contentType = extractContentType(body); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + this[INTERNALS$1] = { + url: opts.url, + status, + statusText: opts.statusText || STATUS_CODES[status], + headers, + counter: opts.counter + }; + } + + get url() { + return this[INTERNALS$1].url; + } + + get status() { + return this[INTERNALS$1].status; + } + + /** + * Convenience property representing if the request ended normally + */ + get ok() { + return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; + } + + get redirected() { + return this[INTERNALS$1].counter > 0; + } + + get statusText() { + return this[INTERNALS$1].statusText; + } + + get headers() { + return this[INTERNALS$1].headers; + } + + /** + * Clone this response + * + * @return Response + */ + clone() { + return new Response(clone(this), { + url: this.url, + status: this.status, + statusText: this.statusText, + headers: this.headers, + ok: this.ok, + redirected: this.redirected + }); + } +} + +Body.mixIn(Response.prototype); + +Object.defineProperties(Response.prototype, { + url: { enumerable: true }, + status: { enumerable: true }, + ok: { enumerable: true }, + redirected: { enumerable: true }, + statusText: { enumerable: true }, + headers: { enumerable: true }, + clone: { enumerable: true } +}); + +Object.defineProperty(Response.prototype, Symbol.toStringTag, { + value: 'Response', + writable: false, + enumerable: false, + configurable: true +}); + +const INTERNALS$2 = Symbol('Request internals'); + +// fix an issue where "format", "parse" aren't a named export for node <10 +const parse_url = Url.parse; +const format_url = Url.format; + +const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; + +/** + * Check if a value is an instance of Request. + * + * @param Mixed input + * @return Boolean + */ +function isRequest(input) { + return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; +} + +function isAbortSignal(signal) { + const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); + return !!(proto && proto.constructor.name === 'AbortSignal'); +} + +/** + * Request class + * + * @param Mixed input Url or Request instance + * @param Object init Custom options + * @return Void + */ +class Request { + constructor(input) { + let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + let parsedURL; + + // normalize input + if (!isRequest(input)) { + if (input && input.href) { + // in order to support Node.js' Url objects; though WHATWG's URL objects + // will fall into this branch also (since their `toString()` will return + // `href` property anyway) + parsedURL = parse_url(input.href); + } else { + // coerce input to a string before attempting to parse + parsedURL = parse_url(`${input}`); + } + input = {}; + } else { + parsedURL = parse_url(input.url); + } + + let method = init.method || input.method || 'GET'; + method = method.toUpperCase(); + + if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { + throw new TypeError('Request with GET/HEAD method cannot have body'); + } + + let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; + + Body.call(this, inputBody, { + timeout: init.timeout || input.timeout || 0, + size: init.size || input.size || 0 + }); + + const headers = new Headers(init.headers || input.headers || {}); + + if (inputBody != null && !headers.has('Content-Type')) { + const contentType = extractContentType(inputBody); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + let signal = isRequest(input) ? input.signal : null; + if ('signal' in init) signal = init.signal; + + if (signal != null && !isAbortSignal(signal)) { + throw new TypeError('Expected signal to be an instanceof AbortSignal'); + } + + this[INTERNALS$2] = { + method, + redirect: init.redirect || input.redirect || 'follow', + headers, + parsedURL, + signal + }; + + // node-fetch-only options + this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; + this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; + this.counter = init.counter || input.counter || 0; + this.agent = init.agent || input.agent; + } + + get method() { + return this[INTERNALS$2].method; + } + + get url() { + return format_url(this[INTERNALS$2].parsedURL); + } + + get headers() { + return this[INTERNALS$2].headers; + } + + get redirect() { + return this[INTERNALS$2].redirect; + } + + get signal() { + return this[INTERNALS$2].signal; + } + + /** + * Clone this request + * + * @return Request + */ + clone() { + return new Request(this); + } +} + +Body.mixIn(Request.prototype); + +Object.defineProperty(Request.prototype, Symbol.toStringTag, { + value: 'Request', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(Request.prototype, { + method: { enumerable: true }, + url: { enumerable: true }, + headers: { enumerable: true }, + redirect: { enumerable: true }, + clone: { enumerable: true }, + signal: { enumerable: true } +}); + +/** + * Convert a Request to Node.js http request options. + * + * @param Request A Request instance + * @return Object The options object to be passed to http.request + */ +function getNodeRequestOptions(request) { + const parsedURL = request[INTERNALS$2].parsedURL; + const headers = new Headers(request[INTERNALS$2].headers); + + // fetch step 1.3 + if (!headers.has('Accept')) { + headers.set('Accept', '*/*'); + } + + // Basic fetch + if (!parsedURL.protocol || !parsedURL.hostname) { + throw new TypeError('Only absolute URLs are supported'); + } + + if (!/^https?:$/.test(parsedURL.protocol)) { + throw new TypeError('Only HTTP(S) protocols are supported'); + } + + if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) { + throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); + } + + // HTTP-network-or-cache fetch steps 2.4-2.7 + let contentLengthValue = null; + if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { + contentLengthValue = '0'; + } + if (request.body != null) { + const totalBytes = getTotalBytes(request); + if (typeof totalBytes === 'number') { + contentLengthValue = String(totalBytes); + } + } + if (contentLengthValue) { + headers.set('Content-Length', contentLengthValue); + } + + // HTTP-network-or-cache fetch step 2.11 + if (!headers.has('User-Agent')) { + headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); + } + + // HTTP-network-or-cache fetch step 2.15 + if (request.compress && !headers.has('Accept-Encoding')) { + headers.set('Accept-Encoding', 'gzip,deflate'); + } + + if (!headers.has('Connection') && !request.agent) { + headers.set('Connection', 'close'); + } + + // HTTP-network fetch step 4.2 + // chunked encoding is handled by Node.js + + return Object.assign({}, parsedURL, { + method: request.method, + headers: exportNodeCompatibleHeaders(headers), + agent: request.agent + }); +} + +/** + * abort-error.js + * + * AbortError interface for cancelled requests + */ + +/** + * Create AbortError instance + * + * @param String message Error message for human + * @return AbortError + */ +function AbortError(message) { + Error.call(this, message); + + this.type = 'aborted'; + this.message = message; + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} + +AbortError.prototype = Object.create(Error.prototype); +AbortError.prototype.constructor = AbortError; +AbortError.prototype.name = 'AbortError'; + +// fix an issue where "PassThrough", "resolve" aren't a named export for node <10 +const PassThrough$1 = Stream.PassThrough; +const resolve_url = Url.resolve; + +/** + * Fetch function + * + * @param Mixed url Absolute url or Request instance + * @param Object opts Fetch options + * @return Promise + */ +function fetch(url, opts) { + + // allow custom promise + if (!fetch.Promise) { + throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); + } + + Body.Promise = fetch.Promise; + + // wrap http.request into fetch + return new fetch.Promise(function (resolve, reject) { + // build request object + const request = new Request(url, opts); + const options = getNodeRequestOptions(request); + + const send = (options.protocol === 'https:' ? https : http).request; + const signal = request.signal; + + let response = null; + + const abort = function abort() { + let error = new AbortError('The user aborted a request.'); + reject(error); + if (request.body && request.body instanceof Stream.Readable) { + request.body.destroy(error); + } + if (!response || !response.body) return; + response.body.emit('error', error); + }; + + if (signal && signal.aborted) { + abort(); + return; + } + + const abortAndFinalize = function abortAndFinalize() { + abort(); + finalize(); + }; + + // send request + const req = send(options); + let reqTimeout; + + if (signal) { + signal.addEventListener('abort', abortAndFinalize); + } + + function finalize() { + req.abort(); + if (signal) signal.removeEventListener('abort', abortAndFinalize); + clearTimeout(reqTimeout); + } + + if (request.timeout) { + req.once('socket', function (socket) { + reqTimeout = setTimeout(function () { + reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); + finalize(); + }, request.timeout); + }); + } + + req.on('error', function (err) { + reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); + finalize(); + }); + + req.on('response', function (res) { + clearTimeout(reqTimeout); + + const headers = createHeadersLenient(res.headers); + + // HTTP fetch step 5 + if (fetch.isRedirect(res.statusCode)) { + // HTTP fetch step 5.2 + const location = headers.get('Location'); + + // HTTP fetch step 5.3 + const locationURL = location === null ? null : resolve_url(request.url, location); + + // HTTP fetch step 5.5 + switch (request.redirect) { + case 'error': + reject(new FetchError(`redirect mode is set to error: ${request.url}`, 'no-redirect')); + finalize(); + return; + case 'manual': + // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. + if (locationURL !== null) { + // handle corrupted header + try { + headers.set('Location', locationURL); + } catch (err) { + // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request + reject(err); + } + } + break; + case 'follow': + // HTTP-redirect fetch step 2 + if (locationURL === null) { + break; + } + + // HTTP-redirect fetch step 5 + if (request.counter >= request.follow) { + reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 6 (counter increment) + // Create a new Request object. + const requestOpts = { + headers: new Headers(request.headers), + follow: request.follow, + counter: request.counter + 1, + agent: request.agent, + compress: request.compress, + method: request.method, + body: request.body, + signal: request.signal, + timeout: request.timeout + }; + + // HTTP-redirect fetch step 9 + if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { + reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 11 + if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { + requestOpts.method = 'GET'; + requestOpts.body = undefined; + requestOpts.headers.delete('content-length'); + } + + // HTTP-redirect fetch step 15 + resolve(fetch(new Request(locationURL, requestOpts))); + finalize(); + return; + } + } + + // prepare response + res.once('end', function () { + if (signal) signal.removeEventListener('abort', abortAndFinalize); + }); + let body = res.pipe(new PassThrough$1()); + + const response_options = { + url: request.url, + status: res.statusCode, + statusText: res.statusMessage, + headers: headers, + size: request.size, + timeout: request.timeout, + counter: request.counter + }; + + // HTTP-network fetch step 12.1.1.3 + const codings = headers.get('Content-Encoding'); + + // HTTP-network fetch step 12.1.1.4: handle content codings + + // in following scenarios we ignore compression support + // 1. compression support is disabled + // 2. HEAD request + // 3. no Content-Encoding header + // 4. no content response (204) + // 5. content not modified response (304) + if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { + response = new Response(body, response_options); + resolve(response); + return; + } + + // For Node v6+ + // Be less strict when decoding compressed responses, since sometimes + // servers send slightly invalid responses that are still accepted + // by common browsers. + // Always using Z_SYNC_FLUSH is what cURL does. + const zlibOptions = { + flush: zlib.Z_SYNC_FLUSH, + finishFlush: zlib.Z_SYNC_FLUSH + }; + + // for gzip + if (codings == 'gzip' || codings == 'x-gzip') { + body = body.pipe(zlib.createGunzip(zlibOptions)); + response = new Response(body, response_options); + resolve(response); + return; + } + + // for deflate + if (codings == 'deflate' || codings == 'x-deflate') { + // handle the infamous raw deflate response from old servers + // a hack for old IIS and Apache servers + const raw = res.pipe(new PassThrough$1()); + raw.once('data', function (chunk) { + // see http://stackoverflow.com/questions/37519828 + if ((chunk[0] & 0x0F) === 0x08) { + body = body.pipe(zlib.createInflate()); + } else { + body = body.pipe(zlib.createInflateRaw()); + } + response = new Response(body, response_options); + resolve(response); + }); + return; + } + + // for br + if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { + body = body.pipe(zlib.createBrotliDecompress()); + response = new Response(body, response_options); + resolve(response); + return; + } + + // otherwise, use response as-is + response = new Response(body, response_options); + resolve(response); + }); + + writeToStream(req, request); + }); +} +/** + * Redirect code matching + * + * @param Number code Status code + * @return Boolean + */ +fetch.isRedirect = function (code) { + return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; +}; + +// expose Promise +fetch.Promise = global.Promise; + +export default fetch; +export { Headers, Request, Response, FetchError }; diff --git a/scripts/metrics/node_modules/node-fetch/package.json b/scripts/metrics/node_modules/node-fetch/package.json new file mode 100644 index 00000000000..e93129c801f --- /dev/null +++ b/scripts/metrics/node_modules/node-fetch/package.json @@ -0,0 +1,94 @@ +{ + "_from": "node-fetch", + "_id": "node-fetch@2.5.0", + "_inBundle": false, + "_integrity": "sha512-YuZKluhWGJwCcUu4RlZstdAxr8bFfOVHakc1mplwHkk8J+tqM1Y5yraYvIUpeX8aY7+crCwiELJq7Vl0o0LWXw==", + "_location": "/node-fetch", + "_phantomChildren": {}, + "_requested": { + "type": "tag", + "registry": true, + "raw": "node-fetch", + "name": "node-fetch", + "escapedName": "node-fetch", + "rawSpec": "", + "saveSpec": null, + "fetchSpec": "latest" + }, + "_requiredBy": [ + "#USER", + "/" + ], + "_resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.5.0.tgz", + "_shasum": "8028c49fc1191bba56a07adc6e2a954644a48501", + "_spec": "node-fetch", + "_where": "/Users/zachary.butler/Work/auto-buildkite-pipelines/scripts/metrics", + "author": { + "name": "David Frank" + }, + "browser": "./browser.js", + "bugs": { + "url": "https://github.com/bitinn/node-fetch/issues" + }, + "bundleDependencies": false, + "dependencies": {}, + "deprecated": false, + "description": "A light-weight module that brings window.fetch to node.js", + "devDependencies": { + "@ungap/url-search-params": "^0.1.2", + "abort-controller": "^1.1.0", + "abortcontroller-polyfill": "^1.3.0", + "babel-core": "^6.26.3", + "babel-plugin-istanbul": "^4.1.6", + "babel-preset-env": "^1.6.1", + "babel-register": "^6.16.3", + "chai": "^3.5.0", + "chai-as-promised": "^7.1.1", + "chai-iterator": "^1.1.1", + "chai-string": "~1.3.0", + "codecov": "^3.3.0", + "cross-env": "^5.2.0", + "form-data": "^2.3.3", + "is-builtin-module": "^1.0.0", + "mocha": "^5.0.0", + "nyc": "11.9.0", + "parted": "^0.1.1", + "promise": "^8.0.3", + "resumer": "0.0.0", + "rollup": "^0.63.4", + "rollup-plugin-babel": "^3.0.7", + "string-to-arraybuffer": "^1.0.2", + "whatwg-url": "^5.0.0" + }, + "engines": { + "node": "4.x || >=6.0.0" + }, + "files": [ + "lib/index.js", + "lib/index.mjs", + "lib/index.es.js", + "browser.js" + ], + "homepage": "https://github.com/bitinn/node-fetch", + "keywords": [ + "fetch", + "http", + "promise" + ], + "license": "MIT", + "main": "lib/index", + "module": "lib/index.mjs", + "name": "node-fetch", + "repository": { + "type": "git", + "url": "git+https://github.com/bitinn/node-fetch.git" + }, + "scripts": { + "build": "cross-env BABEL_ENV=rollup rollup -c", + "coverage": "cross-env BABEL_ENV=coverage nyc --reporter json --reporter text mocha -R spec test/test.js && codecov -f coverage/coverage-final.json", + "prepare": "npm run build", + "report": "cross-env BABEL_ENV=coverage nyc --reporter lcov --reporter text mocha -R spec test/test.js", + "test": "cross-env BABEL_ENV=test mocha --require babel-register --throw-deprecation test/test.js" + }, + "version": "2.5.0" +} diff --git a/scripts/metrics/node_modules/sax/LICENSE b/scripts/metrics/node_modules/sax/LICENSE new file mode 100644 index 00000000000..ccffa082c99 --- /dev/null +++ b/scripts/metrics/node_modules/sax/LICENSE @@ -0,0 +1,41 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +==== + +`String.fromCodePoint` by Mathias Bynens used according to terms of MIT +License, as follows: + + Copyright Mathias Bynens + + Permission is hereby granted, free of charge, to any person obtaining + a copy of this software and associated documentation files (the + "Software"), to deal in the Software without restriction, including + without limitation the rights to use, copy, modify, merge, publish, + distribute, sublicense, and/or sell copies of the Software, and to + permit persons to whom the Software is furnished to do so, subject to + the following conditions: + + The above copyright notice and this permission notice shall be + included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE + LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION + OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION + WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/scripts/metrics/node_modules/sax/README.md b/scripts/metrics/node_modules/sax/README.md new file mode 100644 index 00000000000..afcd3f3dd65 --- /dev/null +++ b/scripts/metrics/node_modules/sax/README.md @@ -0,0 +1,225 @@ +# sax js + +A sax-style parser for XML and HTML. + +Designed with [node](http://nodejs.org/) in mind, but should work fine in +the browser or other CommonJS implementations. + +## What This Is + +* A very simple tool to parse through an XML string. +* A stepping stone to a streaming HTML parser. +* A handy way to deal with RSS and other mostly-ok-but-kinda-broken XML + docs. + +## What This Is (probably) Not + +* An HTML Parser - That's a fine goal, but this isn't it. It's just + XML. +* A DOM Builder - You can use it to build an object model out of XML, + but it doesn't do that out of the box. +* XSLT - No DOM = no querying. +* 100% Compliant with (some other SAX implementation) - Most SAX + implementations are in Java and do a lot more than this does. +* An XML Validator - It does a little validation when in strict mode, but + not much. +* A Schema-Aware XSD Thing - Schemas are an exercise in fetishistic + masochism. +* A DTD-aware Thing - Fetching DTDs is a much bigger job. + +## Regarding `Hello, world!').close(); + +// stream usage +// takes the same options as the parser +var saxStream = require("sax").createStream(strict, options) +saxStream.on("error", function (e) { + // unhandled errors will throw, since this is a proper node + // event emitter. + console.error("error!", e) + // clear the error + this._parser.error = null + this._parser.resume() +}) +saxStream.on("opentag", function (node) { + // same object as above +}) +// pipe is supported, and it's readable/writable +// same chunks coming in also go out. +fs.createReadStream("file.xml") + .pipe(saxStream) + .pipe(fs.createWriteStream("file-copy.xml")) +``` + + +## Arguments + +Pass the following arguments to the parser function. All are optional. + +`strict` - Boolean. Whether or not to be a jerk. Default: `false`. + +`opt` - Object bag of settings regarding string formatting. All default to `false`. + +Settings supported: + +* `trim` - Boolean. Whether or not to trim text and comment nodes. +* `normalize` - Boolean. If true, then turn any whitespace into a single + space. +* `lowercase` - Boolean. If true, then lowercase tag names and attribute names + in loose mode, rather than uppercasing them. +* `xmlns` - Boolean. If true, then namespaces are supported. +* `position` - Boolean. If false, then don't track line/col/position. +* `strictEntities` - Boolean. If true, only parse [predefined XML + entities](http://www.w3.org/TR/REC-xml/#sec-predefined-ent) + (`&`, `'`, `>`, `<`, and `"`) + +## Methods + +`write` - Write bytes onto the stream. You don't have to do this all at +once. You can keep writing as much as you want. + +`close` - Close the stream. Once closed, no more data may be written until +it is done processing the buffer, which is signaled by the `end` event. + +`resume` - To gracefully handle errors, assign a listener to the `error` +event. Then, when the error is taken care of, you can call `resume` to +continue parsing. Otherwise, the parser will not continue while in an error +state. + +## Members + +At all times, the parser object will have the following members: + +`line`, `column`, `position` - Indications of the position in the XML +document where the parser currently is looking. + +`startTagPosition` - Indicates the position where the current tag starts. + +`closed` - Boolean indicating whether or not the parser can be written to. +If it's `true`, then wait for the `ready` event to write again. + +`strict` - Boolean indicating whether or not the parser is a jerk. + +`opt` - Any options passed into the constructor. + +`tag` - The current tag being dealt with. + +And a bunch of other stuff that you probably shouldn't touch. + +## Events + +All events emit with a single argument. To listen to an event, assign a +function to `on`. Functions get executed in the this-context of +the parser object. The list of supported events are also in the exported +`EVENTS` array. + +When using the stream interface, assign handlers using the EventEmitter +`on` function in the normal fashion. + +`error` - Indication that something bad happened. The error will be hanging +out on `parser.error`, and must be deleted before parsing can continue. By +listening to this event, you can keep an eye on that kind of stuff. Note: +this happens *much* more in strict mode. Argument: instance of `Error`. + +`text` - Text node. Argument: string of text. + +`doctype` - The ``. Argument: +object with `name` and `body` members. Attributes are not parsed, as +processing instructions have implementation dependent semantics. + +`sgmldeclaration` - Random SGML declarations. Stuff like `` +would trigger this kind of event. This is a weird thing to support, so it +might go away at some point. SAX isn't intended to be used to parse SGML, +after all. + +`opentagstart` - Emitted immediately when the tag name is available, +but before any attributes are encountered. Argument: object with a +`name` field and an empty `attributes` set. Note that this is the +same object that will later be emitted in the `opentag` event. + +`opentag` - An opening tag. Argument: object with `name` and `attributes`. +In non-strict mode, tag names are uppercased, unless the `lowercase` +option is set. If the `xmlns` option is set, then it will contain +namespace binding information on the `ns` member, and will have a +`local`, `prefix`, and `uri` member. + +`closetag` - A closing tag. In loose mode, tags are auto-closed if their +parent closes. In strict mode, well-formedness is enforced. Note that +self-closing tags will have `closeTag` emitted immediately after `openTag`. +Argument: tag name. + +`attribute` - An attribute node. Argument: object with `name` and `value`. +In non-strict mode, attribute names are uppercased, unless the `lowercase` +option is set. If the `xmlns` option is set, it will also contains namespace +information. + +`comment` - A comment node. Argument: the string of the comment. + +`opencdata` - The opening tag of a ``) of a `` tags trigger a `"script"` +event, and their contents are not checked for special xml characters. +If you pass `noscript: true`, then this behavior is suppressed. + +## Reporting Problems + +It's best to write a failing test if you find an issue. I will always +accept pull requests with failing tests if they demonstrate intended +behavior, but it is very hard to figure out what issue you're describing +without a test. Writing a test is also the best way for you yourself +to figure out if you really understand the issue you think you have with +sax-js. diff --git a/scripts/metrics/node_modules/sax/lib/sax.js b/scripts/metrics/node_modules/sax/lib/sax.js new file mode 100644 index 00000000000..795d607ef63 --- /dev/null +++ b/scripts/metrics/node_modules/sax/lib/sax.js @@ -0,0 +1,1565 @@ +;(function (sax) { // wrapper for non-node envs + sax.parser = function (strict, opt) { return new SAXParser(strict, opt) } + sax.SAXParser = SAXParser + sax.SAXStream = SAXStream + sax.createStream = createStream + + // When we pass the MAX_BUFFER_LENGTH position, start checking for buffer overruns. + // When we check, schedule the next check for MAX_BUFFER_LENGTH - (max(buffer lengths)), + // since that's the earliest that a buffer overrun could occur. This way, checks are + // as rare as required, but as often as necessary to ensure never crossing this bound. + // Furthermore, buffers are only tested at most once per write(), so passing a very + // large string into write() might have undesirable effects, but this is manageable by + // the caller, so it is assumed to be safe. Thus, a call to write() may, in the extreme + // edge case, result in creating at most one complete copy of the string passed in. + // Set to Infinity to have unlimited buffers. + sax.MAX_BUFFER_LENGTH = 64 * 1024 + + var buffers = [ + 'comment', 'sgmlDecl', 'textNode', 'tagName', 'doctype', + 'procInstName', 'procInstBody', 'entity', 'attribName', + 'attribValue', 'cdata', 'script' + ] + + sax.EVENTS = [ + 'text', + 'processinginstruction', + 'sgmldeclaration', + 'doctype', + 'comment', + 'opentagstart', + 'attribute', + 'opentag', + 'closetag', + 'opencdata', + 'cdata', + 'closecdata', + 'error', + 'end', + 'ready', + 'script', + 'opennamespace', + 'closenamespace' + ] + + function SAXParser (strict, opt) { + if (!(this instanceof SAXParser)) { + return new SAXParser(strict, opt) + } + + var parser = this + clearBuffers(parser) + parser.q = parser.c = '' + parser.bufferCheckPosition = sax.MAX_BUFFER_LENGTH + parser.opt = opt || {} + parser.opt.lowercase = parser.opt.lowercase || parser.opt.lowercasetags + parser.looseCase = parser.opt.lowercase ? 'toLowerCase' : 'toUpperCase' + parser.tags = [] + parser.closed = parser.closedRoot = parser.sawRoot = false + parser.tag = parser.error = null + parser.strict = !!strict + parser.noscript = !!(strict || parser.opt.noscript) + parser.state = S.BEGIN + parser.strictEntities = parser.opt.strictEntities + parser.ENTITIES = parser.strictEntities ? Object.create(sax.XML_ENTITIES) : Object.create(sax.ENTITIES) + parser.attribList = [] + + // namespaces form a prototype chain. + // it always points at the current tag, + // which protos to its parent tag. + if (parser.opt.xmlns) { + parser.ns = Object.create(rootNS) + } + + // mostly just for error reporting + parser.trackPosition = parser.opt.position !== false + if (parser.trackPosition) { + parser.position = parser.line = parser.column = 0 + } + emit(parser, 'onready') + } + + if (!Object.create) { + Object.create = function (o) { + function F () {} + F.prototype = o + var newf = new F() + return newf + } + } + + if (!Object.keys) { + Object.keys = function (o) { + var a = [] + for (var i in o) if (o.hasOwnProperty(i)) a.push(i) + return a + } + } + + function checkBufferLength (parser) { + var maxAllowed = Math.max(sax.MAX_BUFFER_LENGTH, 10) + var maxActual = 0 + for (var i = 0, l = buffers.length; i < l; i++) { + var len = parser[buffers[i]].length + if (len > maxAllowed) { + // Text/cdata nodes can get big, and since they're buffered, + // we can get here under normal conditions. + // Avoid issues by emitting the text node now, + // so at least it won't get any bigger. + switch (buffers[i]) { + case 'textNode': + closeText(parser) + break + + case 'cdata': + emitNode(parser, 'oncdata', parser.cdata) + parser.cdata = '' + break + + case 'script': + emitNode(parser, 'onscript', parser.script) + parser.script = '' + break + + default: + error(parser, 'Max buffer length exceeded: ' + buffers[i]) + } + } + maxActual = Math.max(maxActual, len) + } + // schedule the next check for the earliest possible buffer overrun. + var m = sax.MAX_BUFFER_LENGTH - maxActual + parser.bufferCheckPosition = m + parser.position + } + + function clearBuffers (parser) { + for (var i = 0, l = buffers.length; i < l; i++) { + parser[buffers[i]] = '' + } + } + + function flushBuffers (parser) { + closeText(parser) + if (parser.cdata !== '') { + emitNode(parser, 'oncdata', parser.cdata) + parser.cdata = '' + } + if (parser.script !== '') { + emitNode(parser, 'onscript', parser.script) + parser.script = '' + } + } + + SAXParser.prototype = { + end: function () { end(this) }, + write: write, + resume: function () { this.error = null; return this }, + close: function () { return this.write(null) }, + flush: function () { flushBuffers(this) } + } + + var Stream + try { + Stream = require('stream').Stream + } catch (ex) { + Stream = function () {} + } + + var streamWraps = sax.EVENTS.filter(function (ev) { + return ev !== 'error' && ev !== 'end' + }) + + function createStream (strict, opt) { + return new SAXStream(strict, opt) + } + + function SAXStream (strict, opt) { + if (!(this instanceof SAXStream)) { + return new SAXStream(strict, opt) + } + + Stream.apply(this) + + this._parser = new SAXParser(strict, opt) + this.writable = true + this.readable = true + + var me = this + + this._parser.onend = function () { + me.emit('end') + } + + this._parser.onerror = function (er) { + me.emit('error', er) + + // if didn't throw, then means error was handled. + // go ahead and clear error, so we can write again. + me._parser.error = null + } + + this._decoder = null + + streamWraps.forEach(function (ev) { + Object.defineProperty(me, 'on' + ev, { + get: function () { + return me._parser['on' + ev] + }, + set: function (h) { + if (!h) { + me.removeAllListeners(ev) + me._parser['on' + ev] = h + return h + } + me.on(ev, h) + }, + enumerable: true, + configurable: false + }) + }) + } + + SAXStream.prototype = Object.create(Stream.prototype, { + constructor: { + value: SAXStream + } + }) + + SAXStream.prototype.write = function (data) { + if (typeof Buffer === 'function' && + typeof Buffer.isBuffer === 'function' && + Buffer.isBuffer(data)) { + if (!this._decoder) { + var SD = require('string_decoder').StringDecoder + this._decoder = new SD('utf8') + } + data = this._decoder.write(data) + } + + this._parser.write(data.toString()) + this.emit('data', data) + return true + } + + SAXStream.prototype.end = function (chunk) { + if (chunk && chunk.length) { + this.write(chunk) + } + this._parser.end() + return true + } + + SAXStream.prototype.on = function (ev, handler) { + var me = this + if (!me._parser['on' + ev] && streamWraps.indexOf(ev) !== -1) { + me._parser['on' + ev] = function () { + var args = arguments.length === 1 ? [arguments[0]] : Array.apply(null, arguments) + args.splice(0, 0, ev) + me.emit.apply(me, args) + } + } + + return Stream.prototype.on.call(me, ev, handler) + } + + // this really needs to be replaced with character classes. + // XML allows all manner of ridiculous numbers and digits. + var CDATA = '[CDATA[' + var DOCTYPE = 'DOCTYPE' + var XML_NAMESPACE = 'http://www.w3.org/XML/1998/namespace' + var XMLNS_NAMESPACE = 'http://www.w3.org/2000/xmlns/' + var rootNS = { xml: XML_NAMESPACE, xmlns: XMLNS_NAMESPACE } + + // http://www.w3.org/TR/REC-xml/#NT-NameStartChar + // This implementation works on strings, a single character at a time + // as such, it cannot ever support astral-plane characters (10000-EFFFF) + // without a significant breaking change to either this parser, or the + // JavaScript language. Implementation of an emoji-capable xml parser + // is left as an exercise for the reader. + var nameStart = /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/ + + var nameBody = /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040.\d-]/ + + var entityStart = /[#:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/ + var entityBody = /[#:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040.\d-]/ + + function isWhitespace (c) { + return c === ' ' || c === '\n' || c === '\r' || c === '\t' + } + + function isQuote (c) { + return c === '"' || c === '\'' + } + + function isAttribEnd (c) { + return c === '>' || isWhitespace(c) + } + + function isMatch (regex, c) { + return regex.test(c) + } + + function notMatch (regex, c) { + return !isMatch(regex, c) + } + + var S = 0 + sax.STATE = { + BEGIN: S++, // leading byte order mark or whitespace + BEGIN_WHITESPACE: S++, // leading whitespace + TEXT: S++, // general stuff + TEXT_ENTITY: S++, // & and such. + OPEN_WAKA: S++, // < + SGML_DECL: S++, // + SCRIPT: S++, //