From 8f0bc33595f5f50514e5c639518848fa6c07d0f5 Mon Sep 17 00:00:00 2001 From: Kyle Wigley Date: Tue, 24 Aug 2021 17:12:42 -0400 Subject: [PATCH] Moving CI to GitHub actions (#3669) * test * test test * try this again * test actions in same repo * nvm revert * formatting * fix sh script for building dists * fix windows build * add concurrency * fix random 'Cannot track experimental parser info when active user is None' error * fix build workflow * test slim ci * has changes * set up postgres for other OS * update descriptions * turn off python3.9 unit tests * add changelog * clean up todo * Update .github/workflows/main.yml * create actions for common code * temp commit to test * cosmetic updates * dev review feedback * updates * fix build checks * rm auto formatting changes * review feeback: update order of script for setting up postgres on macos runner * review feedback: add reasoning for not using secrets in workflow * review feedback: rm unnecessary changes * more review feedback * test pull_request_target action * fix path to cli tool * split up lint and unit workflows for clear resposibilites * rm `branches-ignore` filter from pull request trigger * testing push event * test * try this again * test actions in same repo * nvm revert * formatting * fix windows build * add concurrency * fix build workflow * test slim ci * has changes * set up postgres for other OS * update descriptions * turn off python3.9 unit tests * add changelog * clean up todo * Update .github/workflows/main.yml * create actions for common code * cosmetic updates * dev review feedback * updates * fix build checks * rm auto formatting changes * review feedback: add reasoning for not using secrets in workflow * review feedback: rm unnecessary changes * more review feedback * test pull_request_target action * fix path to cli tool * split up lint and unit workflows for clear resposibilites * rm `branches-ignore` filter from pull request trigger * test dynamic matrix generation * update label logic * finishing touches * align naming * pass opts to pytest * slim down push matrix, there are a lot of jobs * test bump num of proc * update matrix for all event triggers * handle case when no changes require integration tests * dev review feedback * clean up and add branch name for testing * Add test results publishing as artifact (#3794) * Test failures file * Add testing branch * Adding upload steps * Adding date to name * Adding to integration * Always upload artifacts * Adding adapter type * Always publish unit test results * Adding comments * rm unecessary env var * fix changelog * update job name * clean up python deps Co-authored-by: leahwicz <60146280+leahwicz@users.noreply.github.com> --- .circleci/config.yml | 101 ------- .../actions/setup-postgres-linux/action.yml | 10 + .../actions/setup-postgres-linux/setup_db.sh | 1 + .../actions/setup-postgres-macos/action.yml | 24 ++ .../actions/setup-postgres-macos/setup_db.sh | 1 + .../actions/setup-postgres-windows/action.yml | 12 + .../setup-postgres-windows/setup_db.sh | 1 + .github/pull_request_template.md | 11 +- .github/scripts/integration-test-matrix.js | 95 +++++++ .github/workflows/integration.yml | 269 ++++++++++++++++++ .github/workflows/main.yml | 206 ++++++++++++++ .github/workflows/performance.yml | 12 +- .github/workflows/tests.yml | 139 --------- .github/workflows/unit_tests.yml | 61 ---- CHANGELOG.md | 15 +- azure-pipelines.yml | 154 ---------- dev-requirements.txt | 1 + docker-compose.yml | 2 +- scripts/{build-wheels.sh => build-dist.sh} | 7 +- scripts/build-sdists.sh | 23 -- test.env.sample | 3 +- test/integration/base.py | 16 +- test/rpc/conftest.py | 21 +- test/unit/test_parser.py | 4 + tox.ini | 23 +- 25 files changed, 678 insertions(+), 534 deletions(-) delete mode 100644 .circleci/config.yml create mode 100644 .github/actions/setup-postgres-linux/action.yml create mode 120000 .github/actions/setup-postgres-linux/setup_db.sh create mode 100644 .github/actions/setup-postgres-macos/action.yml create mode 120000 .github/actions/setup-postgres-macos/setup_db.sh create mode 100644 .github/actions/setup-postgres-windows/action.yml create mode 120000 .github/actions/setup-postgres-windows/setup_db.sh create mode 100644 .github/scripts/integration-test-matrix.js create mode 100644 .github/workflows/integration.yml create mode 100644 .github/workflows/main.yml delete mode 100644 .github/workflows/tests.yml delete mode 100644 .github/workflows/unit_tests.yml delete mode 100644 azure-pipelines.yml rename scripts/{build-wheels.sh => build-dist.sh} (90%) delete mode 100755 scripts/build-sdists.sh diff --git a/.circleci/config.yml b/.circleci/config.yml deleted file mode 100644 index 70ac0a711fc..00000000000 --- a/.circleci/config.yml +++ /dev/null @@ -1,101 +0,0 @@ -version: 2.1 -jobs: - build-wheels: - docker: &test_only - - image: fishtownanalytics/test-container:12 - environment: - DBT_INVOCATION_ENV: circle - DOCKER_TEST_DATABASE_HOST: "database" - TOX_PARALLEL_NO_SPINNER: 1 - steps: - - checkout - - run: - name: Build wheels - command: | - python3.8 -m venv "${PYTHON_ENV}" - export PYTHON_BIN="${PYTHON_ENV}/bin/python" - $PYTHON_BIN -m pip install -U pip setuptools - $PYTHON_BIN -m pip install -r requirements.txt - $PYTHON_BIN -m pip install -r dev-requirements.txt - /bin/bash ./scripts/build-wheels.sh - $PYTHON_BIN ./scripts/collect-dbt-contexts.py > ./dist/context_metadata.json - $PYTHON_BIN ./scripts/collect-artifact-schema.py > ./dist/artifact_schemas.json - environment: - PYTHON_ENV: /home/tox/build_venv/ - - store_artifacts: - path: ./dist - destination: dist - integration-postgres: - docker: - - image: fishtownanalytics/test-container:12 - environment: - DBT_INVOCATION_ENV: circle - DOCKER_TEST_DATABASE_HOST: "database" - TOX_PARALLEL_NO_SPINNER: 1 - - image: postgres - name: database - environment: - POSTGRES_USER: "root" - POSTGRES_PASSWORD: "password" - POSTGRES_DB: "dbt" - steps: - - checkout - - run: - name: Setup postgres - command: bash test/setup_db.sh - environment: - PGHOST: database - PGUSER: root - PGPASSWORD: password - PGDATABASE: postgres - - run: - name: Postgres integration tests - command: tox -p -e py36-postgres,py38-postgres -- -v -n4 - no_output_timeout: 30m - - store_artifacts: - path: ./logs - integration-snowflake: - docker: *test_only - steps: - - checkout - - run: - name: Snowflake integration tests - command: tox -p -e py36-snowflake,py38-snowflake -- -v -n4 - no_output_timeout: 30m - - store_artifacts: - path: ./logs - integration-redshift: - docker: *test_only - steps: - - checkout - - run: - name: Redshift integration tests - command: tox -p -e py36-redshift,py38-redshift -- -v -n4 - no_output_timeout: 30m - - store_artifacts: - path: ./logs - integration-bigquery: - docker: *test_only - steps: - - checkout - - run: - name: Bigquery integration test - command: tox -p -e py36-bigquery,py38-bigquery -- -v -n4 - no_output_timeout: 30m - - store_artifacts: - path: ./logs - -workflows: - version: 2 - test-everything: - jobs: - - integration-postgres - - integration-redshift - - integration-bigquery - - integration-snowflake - - build-wheels: - requires: - - integration-postgres - - integration-redshift - - integration-bigquery - - integration-snowflake diff --git a/.github/actions/setup-postgres-linux/action.yml b/.github/actions/setup-postgres-linux/action.yml new file mode 100644 index 00000000000..1c8fc772a8a --- /dev/null +++ b/.github/actions/setup-postgres-linux/action.yml @@ -0,0 +1,10 @@ +name: "Set up postgres (linux)" +description: "Set up postgres service on linux vm for dbt integration tests" +runs: + using: "composite" + steps: + - shell: bash + run: | + sudo systemctl start postgresql.service + pg_isready + sudo -u postgres bash ${{ github.action_path }}/setup_db.sh diff --git a/.github/actions/setup-postgres-linux/setup_db.sh b/.github/actions/setup-postgres-linux/setup_db.sh new file mode 120000 index 00000000000..ee75ef5048c --- /dev/null +++ b/.github/actions/setup-postgres-linux/setup_db.sh @@ -0,0 +1 @@ +../../../test/setup_db.sh \ No newline at end of file diff --git a/.github/actions/setup-postgres-macos/action.yml b/.github/actions/setup-postgres-macos/action.yml new file mode 100644 index 00000000000..af9a9fe1657 --- /dev/null +++ b/.github/actions/setup-postgres-macos/action.yml @@ -0,0 +1,24 @@ +name: "Set up postgres (macos)" +description: "Set up postgres service on macos vm for dbt integration tests" +runs: + using: "composite" + steps: + - shell: bash + run: | + brew services start postgresql + echo "Check PostgreSQL service is running" + i=10 + COMMAND='pg_isready' + while [ $i -gt -1 ]; do + if [ $i == 0 ]; then + echo "PostgreSQL service not ready, all attempts exhausted" + exit 1 + fi + echo "Check PostgreSQL service status" + eval $COMMAND && break + echo "PostgreSQL service not ready, wait 10 more sec, attempts left: $i" + sleep 10 + ((i--)) + done + createuser -s postgres + bash ${{ github.action_path }}/setup_db.sh diff --git a/.github/actions/setup-postgres-macos/setup_db.sh b/.github/actions/setup-postgres-macos/setup_db.sh new file mode 120000 index 00000000000..ee75ef5048c --- /dev/null +++ b/.github/actions/setup-postgres-macos/setup_db.sh @@ -0,0 +1 @@ +../../../test/setup_db.sh \ No newline at end of file diff --git a/.github/actions/setup-postgres-windows/action.yml b/.github/actions/setup-postgres-windows/action.yml new file mode 100644 index 00000000000..419b5e267cb --- /dev/null +++ b/.github/actions/setup-postgres-windows/action.yml @@ -0,0 +1,12 @@ +name: "Set up postgres (windows)" +description: "Set up postgres service on windows vm for dbt integration tests" +runs: + using: "composite" + steps: + - shell: pwsh + run: | + $pgService = Get-Service -Name postgresql* + Set-Service -InputObject $pgService -Status running -StartupType automatic + Start-Process -FilePath "$env:PGBIN\pg_isready" -Wait -PassThru + $env:Path += ";$env:PGBIN" + bash ${{ github.action_path }}/setup_db.sh diff --git a/.github/actions/setup-postgres-windows/setup_db.sh b/.github/actions/setup-postgres-windows/setup_db.sh new file mode 120000 index 00000000000..ee75ef5048c --- /dev/null +++ b/.github/actions/setup-postgres-windows/setup_db.sh @@ -0,0 +1 @@ +../../../test/setup_db.sh \ No newline at end of file diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 55f75edc11b..fab0f0c36ae 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -9,14 +9,13 @@ resolves # resolves #1234 --> - ### Description - ### Checklist - - [ ] I have signed the [CLA](https://docs.getdbt.com/docs/contributor-license-agreements) - - [ ] I have run this code in development and it appears to resolve the stated issue - - [ ] This PR includes tests, or tests are not required/relevant for this PR - - [ ] I have updated the `CHANGELOG.md` and added information about my change to the "dbt next" section. + +- [ ] I have signed the [CLA](https://docs.getdbt.com/docs/contributor-license-agreements) +- [ ] I have run this code in development and it appears to resolve the stated issue +- [ ] This PR includes tests, or tests are not required/relevant for this PR +- [ ] I have updated the `CHANGELOG.md` and added information about my change to the "dbt next" section. diff --git a/.github/scripts/integration-test-matrix.js b/.github/scripts/integration-test-matrix.js new file mode 100644 index 00000000000..89053678ff9 --- /dev/null +++ b/.github/scripts/integration-test-matrix.js @@ -0,0 +1,95 @@ +module.exports = ({ context }) => { + const defaultPythonVersion = "3.8"; + const supportedPythonVersions = ["3.6", "3.7", "3.8", "3.9"]; + const supportedAdapters = ["snowflake", "postgres", "bigquery", "redshift"]; + + // if PR, generate matrix based on files changed and PR labels + if (context.eventName.includes("pull_request")) { + // `changes` is a list of adapter names that have related + // file changes in the PR + // ex: ['postgres', 'snowflake'] + const changes = JSON.parse(process.env.CHANGES); + const labels = context.payload.pull_request.labels.map(({ name }) => name); + console.log("labels", labels); + console.log("changes", changes); + const testAllLabel = labels.includes("test all"); + const include = []; + + for (const adapter of supportedAdapters) { + if ( + changes.includes(adapter) || + testAllLabel || + labels.includes(`test ${adapter}`) + ) { + for (const pythonVersion of supportedPythonVersions) { + if ( + pythonVersion === defaultPythonVersion || + labels.includes(`test python${pythonVersion}`) || + testAllLabel + ) { + // always run tests on ubuntu by default + include.push({ + os: "ubuntu-latest", + adapter, + "python-version": pythonVersion, + }); + + if (labels.includes("test windows") || testAllLabel) { + include.push({ + os: "windows-latest", + adapter, + "python-version": pythonVersion, + }); + } + + if (labels.includes("test macos") || testAllLabel) { + include.push({ + os: "macos-latest", + adapter, + "python-version": pythonVersion, + }); + } + } + } + } + } + + console.log("matrix", { include }); + + return { + include, + }; + } + // if not PR, generate matrix of python version, adapter, and operating + // system to run integration tests on + + const include = []; + // run for all adapters and python versions on ubuntu + for (const adapter of supportedAdapters) { + for (const pythonVersion of supportedPythonVersions) { + include.push({ + os: 'ubuntu-latest', + adapter: adapter, + "python-version": pythonVersion, + }); + } + } + + // additionally include runs for all adapters, on macos and windows, + // but only for the default python version + for (const adapter of supportedAdapters) { + for (const operatingSystem of ["windows-latest", "macos-latest"]) { + include.push({ + os: operatingSystem, + adapter: adapter, + "python-version": defaultPythonVersion, + }); + } + } + + console.log("matrix", { include }); + + return { + include, + }; +}; diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml new file mode 100644 index 00000000000..03773591962 --- /dev/null +++ b/.github/workflows/integration.yml @@ -0,0 +1,269 @@ +# **what?** +# This workflow runs all integration tests for supported OS +# and python versions and core adapters. If triggered by PR, +# the workflow will only run tests for adapters related +# to code changes. Use the `test all` and `test ${adapter}` +# label to run all or additional tests. Use `ok to test` +# label to mark PRs from forked repositories that are safe +# to run integration tests for. Requires secrets to run +# against different warehouses. + +# **why?** +# This checks the functionality of dbt from a user's perspective +# and attempts to catch functional regressions. + +# **when?** +# This workflow will run on every push to a protected branch +# and when manually triggered. It will also run for all PRs, including +# PRs from forks. The workflow will be skipped until there is a label +# to mark the PR as safe to run. + +name: Adapter Integration Tests + +on: + # pushes to release branches + push: + branches: + - "main" + - "develop" + - "*.latest" + - "releases/*" + - "github-actions" # testing! + # all PRs, important to note that `pull_request_target` workflows + # will run in the context of the target branch of a PR + pull_request_target: + # manual tigger + workflow_dispatch: + +# explicitly turn off permissions for `GITHUB_TOKEN` +permissions: read-all + +# will cancel previous workflows triggered by the same event and for the same ref +concurrency: + group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.ref }} + cancel-in-progress: true + +# sets default shell to bash, for all operating systems +defaults: + run: + shell: bash + +jobs: + # generate test metadata about what files changed and the testing matrix to use + test-metadata: + # run if not a PR from a forked repository or has a label to mark as safe to test + if: >- + github.event_name != 'pull_request_target' || + github.event.pull_request.head.repo.full_name == github.repository || + contains(github.event.pull_request.labels.*.name, 'ok to test') + + runs-on: ubuntu-latest + + outputs: + matrix: ${{ steps.generate-matrix.outputs.result }} + + steps: + - name: Check out the repository (non-PR) + if: github.event_name != 'pull_request_target' + uses: actions/checkout@v2 + with: + persist-credentials: false + + - name: Check out the repository (PR) + if: github.event_name == 'pull_request_target' + uses: actions/checkout@v2 + with: + persist-credentials: false + ref: ${{ github.event.pull_request.head.sha }} + + - name: Check if relevant files changed + # https://github.com/marketplace/actions/paths-changes-filter + # For each filter, it sets output variable named by the filter to the text: + # 'true' - if any of changed files matches any of filter rules + # 'false' - if none of changed files matches any of filter rules + # also, returns: + # `changes` - JSON array with names of all filters matching any of the changed files + uses: dorny/paths-filter@v2 + id: get-changes + with: + token: ${{ secrets.GITHUB_TOKEN }} + filters: | + postgres: + - 'core/**' + - 'plugins/postgres/**' + snowflake: + - 'core/**' + - 'plugins/snowflake/**' + bigquery: + - 'core/**' + - 'plugins/bigquery/**' + redshift: + - 'core/**' + - 'plugins/redshift/**' + - 'plugins/postgres/**' + + - name: Generate integration test matrix + id: generate-matrix + uses: actions/github-script@v4 + env: + CHANGES: ${{ steps.get-changes.outputs.changes }} + with: + script: | + const script = require('./.github/scripts/integration-test-matrix.js') + const matrix = script({ context }) + console.log(matrix) + return matrix + + test: + name: ${{ matrix.adapter }} / python ${{ matrix.python-version }} / ${{ matrix.os }} + + # run if not a PR from a forked repository or has a label to mark as safe to test + # also checks that the matrix generated is not empty + if: >- + needs.test-metadata.outputs.matrix && + fromJSON( needs.test-metadata.outputs.matrix ).include[0] && + ( + github.event_name != 'pull_request_target' || + github.event.pull_request.head.repo.full_name == github.repository || + contains(github.event.pull_request.labels.*.name, 'ok to test') + ) + + runs-on: ${{ matrix.os }} + + needs: test-metadata + + # only block on non-postgres jobs, trying to decrease amount of concurrent processing against warehouses + concurrency: ${{ matrix.adapter != 'postgres' && matrix.adapter || github.job }}-${{ matrix.python-version }}-${{ matrix.os }} + + strategy: + fail-fast: false + matrix: ${{ fromJSON(needs.test-metadata.outputs.matrix) }} + + env: + TOXENV: integration-${{ matrix.adapter }} + PYTEST_ADDOPTS: "-v --color=yes -n4 --csv integration_results.csv" + DBT_INVOCATION_ENV: github-actions + + steps: + - name: Check out the repository + if: github.event_name != 'pull_request_target' + uses: actions/checkout@v2 + with: + persist-credentials: false + + # explicity checkout the branch for the PR, + # this is necessary for the `pull_request_target` event + - name: Check out the repository (PR) + if: github.event_name == 'pull_request_target' + uses: actions/checkout@v2 + with: + persist-credentials: false + ref: ${{ github.event.pull_request.head.sha }} + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + + - name: Set up postgres (linux) + if: | + matrix.adapter == 'postgres' && + runner.os == 'Linux' + uses: ./.github/actions/setup-postgres-linux + + - name: Set up postgres (macos) + if: | + matrix.adapter == 'postgres' && + runner.os == 'macOS' + uses: ./.github/actions/setup-postgres-macos + + - name: Set up postgres (windows) + if: | + matrix.adapter == 'postgres' && + runner.os == 'Windows' + uses: ./.github/actions/setup-postgres-windows + + - name: Install python dependencies + run: | + pip install --upgrade pip + pip install tox + pip --version + tox --version + + - name: Run tox (postgres) + if: matrix.adapter == 'postgres' + run: tox + + - name: Run tox (redshift) + if: matrix.adapter == 'redshift' + env: + REDSHIFT_TEST_DBNAME: ${{ secrets.REDSHIFT_TEST_DBNAME }} + REDSHIFT_TEST_PASS: ${{ secrets.REDSHIFT_TEST_PASS }} + REDSHIFT_TEST_USER: ${{ secrets.REDSHIFT_TEST_USER }} + REDSHIFT_TEST_PORT: ${{ secrets.REDSHIFT_TEST_PORT }} + REDSHIFT_TEST_HOST: ${{ secrets.REDSHIFT_TEST_HOST }} + run: tox + + - name: Run tox (snowflake) + if: matrix.adapter == 'snowflake' + env: + SNOWFLAKE_TEST_ACCOUNT: ${{ secrets.SNOWFLAKE_TEST_ACCOUNT }} + SNOWFLAKE_TEST_PASSWORD: ${{ secrets.SNOWFLAKE_TEST_PASSWORD }} + SNOWFLAKE_TEST_USER: ${{ secrets.SNOWFLAKE_TEST_USER }} + SNOWFLAKE_TEST_WAREHOUSE: ${{ secrets.SNOWFLAKE_TEST_WAREHOUSE }} + SNOWFLAKE_TEST_OAUTH_REFRESH_TOKEN: ${{ secrets.SNOWFLAKE_TEST_OAUTH_REFRESH_TOKEN }} + SNOWFLAKE_TEST_OAUTH_CLIENT_ID: ${{ secrets.SNOWFLAKE_TEST_OAUTH_CLIENT_ID }} + SNOWFLAKE_TEST_OAUTH_CLIENT_SECRET: ${{ secrets.SNOWFLAKE_TEST_OAUTH_CLIENT_SECRET }} + SNOWFLAKE_TEST_ALT_DATABASE: ${{ secrets.SNOWFLAKE_TEST_ALT_DATABASE }} + SNOWFLAKE_TEST_ALT_WAREHOUSE: ${{ secrets.SNOWFLAKE_TEST_ALT_WAREHOUSE }} + SNOWFLAKE_TEST_DATABASE: ${{ secrets.SNOWFLAKE_TEST_DATABASE }} + SNOWFLAKE_TEST_QUOTED_DATABASE: ${{ secrets.SNOWFLAKE_TEST_QUOTED_DATABASE }} + SNOWFLAKE_TEST_ROLE: ${{ secrets.SNOWFLAKE_TEST_ROLE }} + run: tox + + - name: Run tox (bigquery) + if: matrix.adapter == 'bigquery' + env: + BIGQUERY_TEST_SERVICE_ACCOUNT_JSON: ${{ secrets.BIGQUERY_TEST_SERVICE_ACCOUNT_JSON }} + BIGQUERY_TEST_ALT_DATABASE: ${{ secrets.BIGQUERY_TEST_ALT_DATABASE }} + run: tox + + - uses: actions/upload-artifact@v2 + if: always() + with: + name: logs + path: ./logs + + - name: Get current date + if: always() + id: date + run: echo "::set-output name=date::$(date +'%Y-%m-%dT%H_%M_%S')" #no colons allowed for artifacts + + - uses: actions/upload-artifact@v2 + if: always() + with: + name: integration_results_${{ matrix.python-version }}_${{ matrix.os }}_${{ matrix.adapter }}-${{ steps.date.outputs.date }}.csv + path: integration_results.csv + + require-label-comment: + runs-on: ubuntu-latest + + needs: test + + permissions: + pull-requests: write + + steps: + - name: Needs permission PR comment + if: >- + needs.test.result == 'skipped' && + github.event_name == 'pull_request_target' && + github.event.pull_request.head.repo.full_name != github.repository + uses: unsplash/comment-on-pr@master + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + msg: | + "You do not have permissions to run integration tests, @dbt-labs/core "\ + "needs to label this PR with `ok to test` in order to run integration tests!" + check_for_duplicate_msg: true diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml new file mode 100644 index 00000000000..e43c3833ecf --- /dev/null +++ b/.github/workflows/main.yml @@ -0,0 +1,206 @@ +# **what?** +# Runs code quality checks, unit tests, and verifies python build on +# all code commited to the repository. This workflow should not +# require any secrets since it runs for PRs from forked repos. +# By default, secrets are not passed to workflows running from +# a forked repo. + +# **why?** +# Ensure code for dbt meets a certain quality standard. + +# **when?** +# This will run for all PRs, when code is pushed to a release +# branch, and when manually triggered. + +name: Tests and Code Checks + +on: + push: + branches: + - "main" + - "develop" + - "*.latest" + - "releases/*" + - "github-actions" + pull_request: + workflow_dispatch: + +permissions: read-all + +concurrency: + group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.ref }} + cancel-in-progress: true + +defaults: + run: + shell: bash + +jobs: + code-quality: + name: ${{ matrix.toxenv }} + + runs-on: ubuntu-latest + + strategy: + fail-fast: false + matrix: + toxenv: [flake8, mypy] + + env: + TOXENV: ${{ matrix.toxenv }} + PYTEST_ADDOPTS: "-v --color=yes" + + steps: + - name: Check out the repository + uses: actions/checkout@v2 + with: + persist-credentials: false + + - name: Set up Python + uses: actions/setup-python@v2 + + - name: Install python dependencies + run: | + pip install --upgrade pip + pip install tox + pip --version + tox --version + + - name: Run tox + run: tox + + unit: + name: unit test / python ${{ matrix.python-version }} + + runs-on: ubuntu-latest + + strategy: + fail-fast: false + matrix: + python-version: [3.6, 3.7, 3.8] # TODO: support unit testing for python 3.9 (https://github.com/dbt-labs/dbt/issues/3689) + + env: + TOXENV: "unit" + PYTEST_ADDOPTS: "-v --color=yes --csv unit_results.csv" + + steps: + - name: Check out the repository + uses: actions/checkout@v2 + with: + persist-credentials: false + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + + - name: Install python dependencies + run: | + pip install --upgrade pip + pip install tox + pip --version + tox --version + + - name: Run tox + run: tox + + - name: Get current date + if: always() + id: date + run: echo "::set-output name=date::$(date +'%Y-%m-%dT%H_%M_%S')" #no colons allowed for artifacts + + - uses: actions/upload-artifact@v2 + if: always() + with: + name: unit_results_${{ matrix.python-version }}-${{ steps.date.outputs.date }}.csv + path: unit_results.csv + + build: + name: build packages + + runs-on: ubuntu-latest + + steps: + - name: Check out the repository + uses: actions/checkout@v2 + with: + persist-credentials: false + + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: 3.8 + + - name: Install python dependencies + run: | + pip install --upgrade pip + pip install --upgrade setuptools wheel twine check-wheel-contents + pip --version + + - name: Build distributions + run: ./scripts/build-dist.sh + + - name: Show distributions + run: ls -lh dist/ + + - name: Check distribution descriptions + run: | + twine check dist/* + + - name: Check wheel contents + run: | + check-wheel-contents dist/*.whl --ignore W007,W008 + + - uses: actions/upload-artifact@v2 + with: + name: dist + path: dist/ + + test-build: + name: verify packages / python ${{ matrix.python-version }} / ${{ matrix.os }} + + needs: build + + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest, macos-latest, windows-latest] + python-version: [3.6, 3.7, 3.8, 3.9] + + steps: + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + + - name: Install python dependencies + run: | + pip install --upgrade pip + pip install --upgrade wheel + pip --version + + - uses: actions/download-artifact@v2 + with: + name: dist + path: dist/ + + - name: Show distributions + run: ls -lh dist/ + + - name: Install wheel distributions + run: | + find ./dist/*.whl -maxdepth 1 -type f | xargs pip install --force-reinstall --find-links=dist/ + + - name: Check wheel distributions + run: | + dbt --version + + - name: Install source distributions + run: | + find ./dist/*.gz -maxdepth 1 -type f | xargs pip install --force-reinstall --find-links=dist/ + + - name: Check source distributions + run: | + dbt --version diff --git a/.github/workflows/performance.yml b/.github/workflows/performance.yml index 2cde70ff1c2..7d698b1c20d 100644 --- a/.github/workflows/performance.yml +++ b/.github/workflows/performance.yml @@ -1,15 +1,13 @@ - -name: Performance Regression Testing +name: Performance Regression Tests # Schedule triggers on: # runs twice a day at 10:05am and 10:05pm schedule: - - cron: '5 10,22 * * *' + - cron: "5 10,22 * * *" # Allows you to run this workflow manually from the Actions tab workflow_dispatch: jobs: - # checks fmt of runner code # purposefully not a dependency of any other job # will block merging, but not prevent developing @@ -83,7 +81,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v2.2.2 with: - python-version: '3.8' + python-version: "3.8" - name: install dbt run: pip install -r dev-requirements.txt -r editable-requirements.txt - name: install hyperfine @@ -116,11 +114,11 @@ jobs: - name: checkout latest uses: actions/checkout@v2 with: - ref: '0.20.latest' + ref: "0.20.latest" - name: Setup Python uses: actions/setup-python@v2.2.2 with: - python-version: '3.8' + python-version: "3.8" - name: move repo up a level run: mkdir ${{ github.workspace }}/../baseline/ && cp -r ${{ github.workspace }} ${{ github.workspace }}/../baseline - name: "[debug] ls new dbt location" diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml deleted file mode 100644 index 5a71973db6c..00000000000 --- a/.github/workflows/tests.yml +++ /dev/null @@ -1,139 +0,0 @@ -# This is a workflow to run our integration tests for windows and mac - -name: dbt Tests - -# Triggers -on: - # Triggers the workflow on push or pull request events and also adds a manual trigger - push: - branches: - - 'develop' - - '*.latest' - - 'releases/*' - pull_request: - branches: - - 'develop' - - '*.latest' - - 'pr/*' - - 'releases/*' - # Allows you to run this workflow manually from the Actions tab - workflow_dispatch: - -jobs: - PostgresIntegrationTest: - runs-on: 'windows-latest' #TODO: Add Mac support - environment: 'Postgres' - steps: - - uses: actions/checkout@v2 - - name: 'Install postgresql and set up database' - shell: pwsh - run: | - $serviceName = Get-Service -Name postgresql* - Set-Service -InputObject $serviceName -StartupType Automatic - Start-Service -InputObject $serviceName - & $env:PGBIN\createdb.exe -U postgres dbt - & $env:PGBIN\psql.exe -U postgres -c "CREATE ROLE root WITH PASSWORD '$env:ROOT_PASSWORD';" - & $env:PGBIN\psql.exe -U postgres -c "ALTER ROLE root WITH LOGIN;" - & $env:PGBIN\psql.exe -U postgres -c "GRANT CREATE, CONNECT ON DATABASE dbt TO root WITH GRANT OPTION;" - & $env:PGBIN\psql.exe -U postgres -c "CREATE ROLE noaccess WITH PASSWORD '$env:NOACCESS_PASSWORD' NOSUPERUSER;" - & $env:PGBIN\psql.exe -U postgres -c "ALTER ROLE noaccess WITH LOGIN;" - & $env:PGBIN\psql.exe -U postgres -c "GRANT CONNECT ON DATABASE dbt TO noaccess;" - env: - ROOT_PASSWORD: ${{ secrets.ROOT_PASSWORD }} - NOACCESS_PASSWORD: ${{ secrets.NOACCESS_PASSWORD }} - - - name: Setup Python - uses: actions/setup-python@v2.2.2 - with: - python-version: '3.7' - architecture: 'x64' - - - name: 'Install dependencies' - run: python -m pip install --upgrade pip && pip install tox - - - name: 'Run integration tests' - run: python -m tox -e py-postgres -- -v -n4 - - # These three are all similar except secure environment variables, which MUST be passed along to their tasks, - # but there's probably a better way to do this! - SnowflakeIntegrationTest: - strategy: - matrix: - os: [windows-latest, macos-latest] - runs-on: ${{ matrix.os }} - environment: 'Snowflake' - steps: - - uses: actions/checkout@v2 - - name: Setup Python - uses: actions/setup-python@v2.2.2 - with: - python-version: '3.7' - architecture: 'x64' - - - name: 'Install dependencies' - run: python -m pip install --upgrade pip && pip install tox - - - name: 'Run integration tests' - run: python -m tox -e py-snowflake -- -v -n4 - env: - SNOWFLAKE_TEST_ACCOUNT: ${{ secrets.SNOWFLAKE_TEST_ACCOUNT }} - SNOWFLAKE_TEST_PASSWORD: ${{ secrets.SNOWFLAKE_TEST_PASSWORD }} - SNOWFLAKE_TEST_USER: ${{ secrets.SNOWFLAKE_TEST_USER }} - SNOWFLAKE_TEST_WAREHOUSE: ${{ secrets.SNOWFLAKE_TEST_WAREHOUSE }} - SNOWFLAKE_TEST_OAUTH_REFRESH_TOKEN: ${{ secrets.SNOWFLAKE_TEST_OAUTH_REFRESH_TOKEN }} - SNOWFLAKE_TEST_OAUTH_CLIENT_ID: ${{ secrets.SNOWFLAKE_TEST_OAUTH_CLIENT_ID }} - SNOWFLAKE_TEST_OAUTH_CLIENT_SECRET: ${{ secrets.SNOWFLAKE_TEST_OAUTH_CLIENT_SECRET }} - SNOWFLAKE_TEST_ALT_DATABASE: ${{ secrets.SNOWFLAKE_TEST_ALT_DATABASE }} - SNOWFLAKE_TEST_ALT_WAREHOUSE: ${{ secrets.SNOWFLAKE_TEST_ALT_WAREHOUSE }} - SNOWFLAKE_TEST_DATABASE: ${{ secrets.SNOWFLAKE_TEST_DATABASE }} - SNOWFLAKE_TEST_QUOTED_DATABASE: ${{ secrets.SNOWFLAKE_TEST_QUOTED_DATABASE }} - SNOWFLAKE_TEST_ROLE: ${{ secrets.SNOWFLAKE_TEST_ROLE }} - - BigQueryIntegrationTest: - strategy: - matrix: - os: [windows-latest, macos-latest] - runs-on: ${{ matrix.os }} - environment: 'Bigquery' - steps: - - uses: actions/checkout@v2 - - name: Setup Python - uses: actions/setup-python@v2.2.2 - with: - python-version: '3.7' - architecture: 'x64' - - - name: 'Install dependencies' - run: python -m pip install --upgrade pip && pip install tox - - - name: 'Run integration tests' - run: python -m tox -e py-bigquery -- -v -n4 - env: - BIGQUERY_SERVICE_ACCOUNT_JSON: ${{ secrets.BIGQUERY_SERVICE_ACCOUNT_JSON }} - BIGQUERY_TEST_ALT_DATABASE: ${{ secrets.BIGQUERY_TEST_ALT_DATABASE }} - - RedshiftIntegrationTest: - strategy: - matrix: - os: [windows-latest, macos-latest] - runs-on: ${{ matrix.os }} - environment: 'Redshift' - steps: - - uses: actions/checkout@v2 - - name: Setup Python - uses: actions/setup-python@v2.2.2 - with: - python-version: '3.7' - architecture: 'x64' - - - name: 'Install dependencies' - run: python -m pip install --upgrade pip && pip install tox - - - name: 'Run integration tests' - run: python -m tox -e py-redshift -- -v -n4 - env: - REDSHIFT_TEST_DBNAME: ${{ secrets.REDSHIFT_TEST_DBNAME }} - REDSHIFT_TEST_PASS: ${{ secrets.REDSHIFT_TEST_PASS }} - REDSHIFT_TEST_USER: ${{ secrets.REDSHIFT_TEST_USER }} - REDSHIFT_TEST_PORT: ${{ secrets.REDSHIFT_TEST_PORT }} - REDSHIFT_TEST_HOST: ${{ secrets.REDSHIFT_TEST_HOST }} diff --git a/.github/workflows/unit_tests.yml b/.github/workflows/unit_tests.yml deleted file mode 100644 index 5109a5c088a..00000000000 --- a/.github/workflows/unit_tests.yml +++ /dev/null @@ -1,61 +0,0 @@ -# This is a workflow to run our linting and unit tests for windows, mac, and linux - -name: Linting and Unit Tests - -# Triggers -on: - # Trigger on commits to develop and releases branches - push: - branches: - - 'develop' - - '*.latest' - - 'releases/*' - pull_request: # Trigger for all PRs - workflow_dispatch: # Allow manual triggers - -jobs: - Linting: - runs-on: ubuntu-latest #no need to run on every OS - steps: - - uses: actions/checkout@v2 - - name: Setup Python - uses: actions/setup-python@v2.2.2 - with: - python-version: '3.6' - architecture: 'x64' - - - name: 'Install dependencies' - run: python -m pip install --upgrade pip && pip install tox - - - name: 'Linting' - run: tox -e mypy,flake8 -- -v - - UnitTest: - strategy: - matrix: - os: [windows-latest, ubuntu-latest, macos-latest] - runs-on: ${{ matrix.os }} - needs: Linting - steps: - - uses: actions/checkout@v2 - - name: Setup Python 3.6 - uses: actions/setup-python@v2.2.2 - with: - python-version: '3.6' - architecture: 'x64' - - name: Setup Python 3.7 - uses: actions/setup-python@v2.2.2 - with: - python-version: '3.7' - architecture: 'x64' - - name: Setup Python 3.8 - uses: actions/setup-python@v2.2.2 - with: - python-version: '3.8' - architecture: 'x64' - - - name: 'Install dependencies' - run: python -m pip install --upgrade pip && pip install tox - - - name: 'Run unit tests' - run: tox -p -e py36,py37,py38 diff --git a/CHANGELOG.md b/CHANGELOG.md index 57b5e8ef16c..c70ea46d48d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,7 +1,10 @@ ## dbt 0.21.0 (Release TBD) -## dbt 0.21.0b2 (August 19, 2021) +### Under the hood +- Use GitHub Actions for CI ([#3688](https://github.com/dbt-labs/dbt/issues/3688), [#3669](https://github.com/dbt-labs/dbt/pull/3669)) + +## dbt 0.21.0b2 (August 19, 2021) ### Features @@ -18,7 +21,6 @@ - Add `build` RPC method, and a subset of flags for `build` task ([#3595](https://github.com/dbt-labs/dbt/issues/3595), [#3674](https://github.com/dbt-labs/dbt/pull/3674)) - Get more information on partial parsing version mismatches ([#3757](https://github.com/dbt-labs/dbt/issues/3757), [#3758](https://github.com/dbt-labs/dbt/pull/3758)) - ## dbt 0.21.0b1 (August 03, 2021) ### Breaking changes @@ -61,31 +63,30 @@ Contributors: - [@jmriego](https://github.com/jmriego) ([#3526](https://github.com/dbt-labs/dbt/pull/3526)) - [@danielefrigo](https://github.com/danielefrigo) ([#3547](https://github.com/dbt-labs/dbt/pull/3547)) - ## dbt 0.20.2 (Release TBD) ### Under the hood + - Better error handling for BigQuery job labels that are too long. ([#3612](https://github.com/dbt-labs/dbt/pull/3612), [#3703](https://github.com/dbt-labs/dbt/pull/3703)) - Get more information on partial parsing version mismatches ([#3757](https://github.com/dbt-labs/dbt/issues/3757), [#3758](https://github.com/dbt-labs/dbt/pull/3758)) ### Fixes + - Fix bug in finding analysis nodes when applying analysis patch ([#3764](https://github.com/dbt-labs/dbt/issues/3764), [#3767](https://github.com/dbt-labs/dbt/pull/3767)) Contributors: -- [@sungchun12](https://github.com/sungchun12) ([#3703](https://github.com/dbt-labs/dbt/pull/3703)) +- [@sungchun12](https://github.com/sungchun12) ([#3703](https://github.com/dbt-labs/dbt/pull/3703)) ## dbt 0.20.2rc1 (August 16, 2021) - ### Under the hood + - Switch to full reparse on partial parsing exceptions. Log and report exception information. ([#3725](https://github.com/dbt-labs/dbt/issues/3725), [#3733](https://github.com/dbt-labs/dbt/pull/3733)) - Check for existence of test node when removing. ([#3711](https://github.com/dbt-labs/dbt/issues/3711), [#3750](https://github.com/dbt-labs/dbt/pull/3750)) - ## dbt 0.20.1 (August 11, 2021) - ## dbt 0.20.1rc1 (August 02, 2021) ### Features diff --git a/azure-pipelines.yml b/azure-pipelines.yml deleted file mode 100644 index c5e7bef258c..00000000000 --- a/azure-pipelines.yml +++ /dev/null @@ -1,154 +0,0 @@ -# Python package -# Create and test a Python package on multiple Python versions. -# Add steps that analyze code, save the dist with the build record, publish to a PyPI-compatible index, and more: -# https://docs.microsoft.com/azure/devops/pipelines/languages/python - -trigger: - branches: - include: - - develop - - '*.latest' - - pr/* - -jobs: -- job: UnitTest - pool: - vmImage: 'vs2017-win2016' - steps: - - task: UsePythonVersion@0 - inputs: - versionSpec: '3.7' - architecture: 'x64' - - - script: python -m pip install --upgrade pip && pip install tox - displayName: 'Install dependencies' - - - script: python -m tox -e py -- -v - displayName: Run unit tests - -- job: PostgresIntegrationTest - pool: - vmImage: 'vs2017-win2016' - dependsOn: UnitTest - - steps: - - pwsh: | - $serviceName = Get-Service -Name postgresql* - Set-Service -InputObject $serviceName -StartupType Automatic - Start-Service -InputObject $serviceName - - & $env:PGBIN\createdb.exe -U postgres dbt - & $env:PGBIN\psql.exe -U postgres -c "CREATE ROLE root WITH PASSWORD 'password';" - & $env:PGBIN\psql.exe -U postgres -c "ALTER ROLE root WITH LOGIN;" - & $env:PGBIN\psql.exe -U postgres -c "GRANT CREATE, CONNECT ON DATABASE dbt TO root WITH GRANT OPTION;" - & $env:PGBIN\psql.exe -U postgres -c "CREATE ROLE noaccess WITH PASSWORD 'password' NOSUPERUSER;" - & $env:PGBIN\psql.exe -U postgres -c "ALTER ROLE noaccess WITH LOGIN;" - & $env:PGBIN\psql.exe -U postgres -c "GRANT CONNECT ON DATABASE dbt TO noaccess;" - displayName: Install postgresql and set up database - - - task: UsePythonVersion@0 - inputs: - versionSpec: '3.7' - architecture: 'x64' - - - script: python -m pip install --upgrade pip && pip install tox - displayName: 'Install dependencies' - - - script: python -m tox -e py-postgres -- -v -n4 - displayName: Run integration tests - -# These three are all similar except secure environment variables, which MUST be passed along to their tasks, -# but there's probably a better way to do this! -- job: SnowflakeIntegrationTest - pool: - vmImage: 'vs2017-win2016' - dependsOn: UnitTest - condition: succeeded() - steps: - - task: UsePythonVersion@0 - inputs: - versionSpec: '3.7' - architecture: 'x64' - - - script: python -m pip install --upgrade pip && pip install tox - displayName: 'Install dependencies' - - - script: python -m tox -e py-snowflake -- -v -n4 - env: - SNOWFLAKE_TEST_ACCOUNT: $(SNOWFLAKE_TEST_ACCOUNT) - SNOWFLAKE_TEST_PASSWORD: $(SNOWFLAKE_TEST_PASSWORD) - SNOWFLAKE_TEST_USER: $(SNOWFLAKE_TEST_USER) - SNOWFLAKE_TEST_WAREHOUSE: $(SNOWFLAKE_TEST_WAREHOUSE) - SNOWFLAKE_TEST_OAUTH_REFRESH_TOKEN: $(SNOWFLAKE_TEST_OAUTH_REFRESH_TOKEN) - SNOWFLAKE_TEST_OAUTH_CLIENT_ID: $(SNOWFLAKE_TEST_OAUTH_CLIENT_ID) - SNOWFLAKE_TEST_OAUTH_CLIENT_SECRET: $(SNOWFLAKE_TEST_OAUTH_CLIENT_SECRET) - displayName: Run integration tests - -- job: BigQueryIntegrationTest - pool: - vmImage: 'vs2017-win2016' - dependsOn: UnitTest - condition: succeeded() - steps: - - task: UsePythonVersion@0 - inputs: - versionSpec: '3.7' - architecture: 'x64' - - script: python -m pip install --upgrade pip && pip install tox - displayName: 'Install dependencies' - - script: python -m tox -e py-bigquery -- -v -n4 - env: - BIGQUERY_SERVICE_ACCOUNT_JSON: $(BIGQUERY_SERVICE_ACCOUNT_JSON) - displayName: Run integration tests - -- job: RedshiftIntegrationTest - pool: - vmImage: 'vs2017-win2016' - dependsOn: UnitTest - condition: succeeded() - steps: - - task: UsePythonVersion@0 - inputs: - versionSpec: '3.7' - architecture: 'x64' - - - script: python -m pip install --upgrade pip && pip install tox - displayName: 'Install dependencies' - - - script: python -m tox -e py-redshift -- -v -n4 - env: - REDSHIFT_TEST_DBNAME: $(REDSHIFT_TEST_DBNAME) - REDSHIFT_TEST_PASS: $(REDSHIFT_TEST_PASS) - REDSHIFT_TEST_USER: $(REDSHIFT_TEST_USER) - REDSHIFT_TEST_PORT: $(REDSHIFT_TEST_PORT) - REDSHIFT_TEST_HOST: $(REDSHIFT_TEST_HOST) - displayName: Run integration tests - -- job: BuildWheel - pool: - vmImage: 'vs2017-win2016' - dependsOn: - - UnitTest - - PostgresIntegrationTest - - RedshiftIntegrationTest - - SnowflakeIntegrationTest - - BigQueryIntegrationTest - condition: succeeded() - steps: - - task: UsePythonVersion@0 - inputs: - versionSpec: '3.7' - architecture: 'x64' - - script: python -m pip install --upgrade pip setuptools && python -m pip install -r requirements.txt && python -m pip install -r dev-requirements.txt - displayName: Install dependencies - - task: ShellScript@2 - inputs: - scriptPath: scripts/build-wheels.sh - - task: CopyFiles@2 - inputs: - contents: 'dist\?(*.whl|*.tar.gz)' - TargetFolder: '$(Build.ArtifactStagingDirectory)' - - task: PublishBuildArtifacts@1 - inputs: - pathtoPublish: '$(Build.ArtifactStagingDirectory)' - artifactName: dists diff --git a/dev-requirements.txt b/dev-requirements.txt index 8805f0d36d4..2fe9d45cd19 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -8,6 +8,7 @@ pip-tools pytest pytest-dotenv pytest-logbook +pytest-csv pytest-xdist pytz tox>=3.13 diff --git a/docker-compose.yml b/docker-compose.yml index e8f28ada1a2..e0a6ea9816a 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -19,7 +19,7 @@ services: GROUP_ID: ${GROUP_ID:-} command: "/root/.virtualenvs/dbt/bin/pytest" environment: - DOCKER_TEST_DATABASE_HOST: "database" + POSTGRES_TEST_HOST: "database" volumes: - .:/usr/app working_dir: /usr/app diff --git a/scripts/build-wheels.sh b/scripts/build-dist.sh similarity index 90% rename from scripts/build-wheels.sh rename to scripts/build-dist.sh index efc4ba1fc3b..1cfe41a8c1f 100755 --- a/scripts/build-wheels.sh +++ b/scripts/build-dist.sh @@ -1,11 +1,12 @@ -#!/bin/bash -eo pipefail +#!/bin/bash + +set -eo pipefail DBT_PATH="$( cd "$(dirname "$0")/.." ; pwd -P )" PYTHON_BIN=${PYTHON_BIN:-python} -echo $SCRIPTPATH -echo $PYTHON_BIN +echo "$PYTHON_BIN" set -x diff --git a/scripts/build-sdists.sh b/scripts/build-sdists.sh deleted file mode 100755 index 8d0db621b23..00000000000 --- a/scripts/build-sdists.sh +++ /dev/null @@ -1,23 +0,0 @@ -#!/bin/bash -eo pipefail - -DBT_PATH="$( cd "$(dirname "$0")/.." ; pwd -P )" - -echo $SCRIPTPATH - -set -x - -rm -rf "$DBT_PATH"/dist -mkdir -p "$DBT_PATH"/dist - -for SUBPATH in core plugins/postgres plugins/redshift plugins/bigquery plugins/snowflake -do - rm -rf "$DBT_PATH"/"$SUBPATH"/dist - cd "$DBT_PATH"/"$SUBPATH" - python setup.py sdist - cp -r "$DBT_PATH"/"$SUBPATH"/dist/* "$DBT_PATH"/dist/ -done - -cd "$DBT_PATH" -python setup.py sdist - -set +x diff --git a/test.env.sample b/test.env.sample index f981f3a4d15..c31f2b88557 100644 --- a/test.env.sample +++ b/test.env.sample @@ -12,9 +12,8 @@ SNOWFLAKE_TEST_OAUTH_REFRESH_TOKEN= SNOWFLAKE_TEST_OAUTH_CLIENT_ID= SNOWFLAKE_TEST_OAUTH_CLIENT_SECRET= -BIGQUERY_SERVICE_ACCOUNT_JSON= +BIGQUERY_TEST_SERVICE_ACCOUNT_JSON= BIGQUERY_TEST_ALT_DATABASE= -BIGQUERY_POLICY_TAG= REDSHIFT_TEST_HOST= REDSHIFT_TEST_USER= diff --git a/test/integration/base.py b/test/integration/base.py index 0abc54efb5e..c7854921e26 100644 --- a/test/integration/base.py +++ b/test/integration/base.py @@ -143,7 +143,7 @@ class DBTIntegrationTest(unittest.TestCase): @property def database_host(self): - return os.environ.get('DOCKER_TEST_DATABASE_HOST', 'localhost') + return os.getenv('POSTGRES_TEST_HOST', 'localhost') def postgres_profile(self): return { @@ -156,20 +156,20 @@ def postgres_profile(self): 'type': 'postgres', 'threads': 4, 'host': self.database_host, - 'port': 5432, - 'user': 'root', - 'pass': 'password', - 'dbname': 'dbt', + 'port': int(os.getenv('POSTGRES_TEST_PORT', 5432)), + 'user': os.getenv('POSTGRES_TEST_USER', 'root'), + 'pass': os.getenv('POSTGRES_TEST_PASS', 'password'), + 'dbname': os.getenv('POSTGRES_TEST_DATABASE', 'dbt'), 'schema': self.unique_schema() }, 'noaccess': { 'type': 'postgres', 'threads': 4, 'host': self.database_host, - 'port': 5432, + 'port': int(os.getenv('POSTGRES_TEST_PORT', 5432)), 'user': 'noaccess', 'pass': 'password', - 'dbname': 'dbt', + 'dbname': os.getenv('POSTGRES_TEST_DATABASE', 'dbt'), 'schema': self.unique_schema() } }, @@ -245,7 +245,7 @@ def snowflake_profile(self): } def bigquery_profile(self): - credentials_json_str = os.getenv('BIGQUERY_SERVICE_ACCOUNT_JSON').replace("'", '') + credentials_json_str = os.getenv('BIGQUERY_TEST_SERVICE_ACCOUNT_JSON').replace("'", '') credentials = json.loads(credentials_json_str) project_id = credentials.get('project_id') diff --git a/test/rpc/conftest.py b/test/rpc/conftest.py index 561c009f92e..1a13956b36f 100644 --- a/test/rpc/conftest.py +++ b/test/rpc/conftest.py @@ -64,7 +64,6 @@ def project_root(tmpdir): return tmpdir.mkdir('project') def postgres_profile_data(unique_schema): - database_host = os.environ.get('DOCKER_TEST_DATABASE_HOST', 'localhost') return { 'config': { @@ -75,21 +74,21 @@ def postgres_profile_data(unique_schema): 'default': { 'type': 'postgres', 'threads': 4, - 'host': database_host, - 'port': 5432, - 'user': 'root', - 'pass': 'password', - 'dbname': 'dbt', + 'host': os.environ.get('POSTGRES_TEST_HOST', 'localhost'), + 'port': int(os.environ.get('POSTGRES_TEST_PORT', 5432)), + 'user': os.environ.get('POSTGRES_TEST_USER', 'root'), + 'pass': os.environ.get('POSTGRES_TEST_PASS', 'password'), + 'dbname': os.environ.get('POSTGRES_TEST_DATABASE', 'dbt'), 'schema': unique_schema, }, 'other_schema': { 'type': 'postgres', 'threads': 4, - 'host': database_host, - 'port': 5432, - 'user': 'root', - 'pass': 'password', - 'dbname': 'dbt', + 'host': os.environ.get('POSTGRES_TEST_HOST', 'localhost'), + 'port': int(os.environ.get('POSTGRES_TEST_PORT', 5432)), + 'user': os.environ.get('POSTGRES_TEST_USER', 'root'), + 'pass': os.environ.get('POSTGRES_TEST_PASS', 'password'), + 'dbname': os.environ.get('POSTGRES_TEST_DATABASE', 'dbt'), 'schema': unique_schema+'_alt', } }, diff --git a/test/unit/test_parser.py b/test/unit/test_parser.py index fc60b39a111..7274ffea877 100644 --- a/test/unit/test_parser.py +++ b/test/unit/test_parser.py @@ -7,6 +7,7 @@ import dbt.flags import dbt.parser +from dbt import tracking from dbt.exceptions import CompilationException from dbt.parser import ( ModelParser, MacroParser, DataTestParser, SchemaParser, @@ -69,6 +70,9 @@ def _generate_macros(self): def setUp(self): dbt.flags.STRICT_MODE = True dbt.flags.WARN_ERROR = True + # HACK: this is needed since tracking events can + # be sent when using the model parser + tracking.do_not_track() self.maxDiff = None diff --git a/tox.ini b/tox.ini index 9a77deb6bc1..bce3be1ea47 100644 --- a/tox.ini +++ b/tox.ini @@ -1,14 +1,6 @@ [tox] skipsdist = True -envlist = py36,py37,py38,flake8,mypy - -[testenv] -description = unit testing -skip_install = true -commands = {envpython} -m pytest {posargs} test/unit -deps = - -rdev-requirements.txt - -reditable-requirements.txt +envlist = py36,py37,py38,py39,flake8,mypy [testenv:flake8] description = flake8 code checks @@ -32,10 +24,19 @@ deps = -rdev-requirements.txt -reditable-requirements.txt -[testenv:py{36,37,38,39,}-{postgres,redshift,snowflake,bigquery}] +[testenv:{unit,py36,py37,py38,py39,py}] +description = unit testing +skip_install = true +passenv = DBT_* PYTEST_ADDOPTS +commands = {envpython} -m pytest {posargs} test/unit +deps = + -rdev-requirements.txt + -reditable-requirements.txt + +[testenv:{integration,py36,py37,py38,py39,py}-{postgres,redshift,snowflake,bigquery}] description = adapter plugin integration testing skip_install = true -passenv = * +passenv = DBT_* REDSHIFT_TEST_* BIGQUERY_TEST_* SNOWFLAKE_TEST_* POSTGRES_TEST_* PYTEST_ADDOPTS commands = postgres: {envpython} -m pytest {posargs} -m profile_postgres test/integration postgres: {envpython} -m pytest {posargs} --profile=postgres test/rpc