diff --git a/.changes/0.1.0-a8.md b/.changes/0.1.0-a8.md deleted file mode 100644 index 539ae11cc..000000000 --- a/.changes/0.1.0-a8.md +++ /dev/null @@ -1,24 +0,0 @@ -## dbt-adapter 0.1.0-a8 - February 22, 2024 - -### Features - -* Update RelationConfig to capture all fields used by adapters - -### Fixes - -* Ignore adapter-level support warnings for 'custom' constraints -* Make all adapter zone tests importable by removing "Test" prefix - -### Docs - -* Configure `changie` -* Setup ADR tracking framework -* Create issue templates -* Create PR template - -### Under the Hood - -* Configure `dependabot` -* Implement unit testing in CI -* Allow version to be specified in either __version__.py or __about__.py -* Remove __init__.py file from dbt.tests diff --git a/.changes/1.0.0.md b/.changes/1.0.0.md new file mode 100644 index 000000000..b6cc44a9f --- /dev/null +++ b/.changes/1.0.0.md @@ -0,0 +1,15 @@ +## dbt-adapter 1.0.0 - April 01, 2024 + +### Fixes + +* Add field wrapper to BaseRelation members that were missing it. +* Add "description" and "meta" fields to RelationConfig protocol + +### Under the Hood + +* Lazy load agate to improve dbt-core performance +* add BaseAdapater.MAX_SCHEMA_METADATA_RELATIONS + +### Security + +* Pin `black>=24.3` in `pyproject.toml` diff --git a/.changes/1.1.0-rc1.md b/.changes/1.1.0-rc1.md new file mode 100644 index 000000000..8b47b1098 --- /dev/null +++ b/.changes/1.1.0-rc1.md @@ -0,0 +1,12 @@ +## dbt-adapter 1.1.0-rc1 - April 17, 2024 + +### Features + +* Debug log when `type_code` fails to convert to a `data_type` +* Introduce TableLastModifiedMetadataBatch and implement BaseAdapter.calculate_freshness_from_metadata_batch +* Support for sql fixtures in unit testing + +### Under the Hood + +* Add the option to set the log level of the AdapterRegistered event +* Update dependabot config to cover GHA diff --git a/.changes/1.1.0/Features-20240323-160251.yaml b/.changes/1.1.0/Features-20240323-160251.yaml new file mode 100644 index 000000000..c4b594646 --- /dev/null +++ b/.changes/1.1.0/Features-20240323-160251.yaml @@ -0,0 +1,6 @@ +kind: Features +body: Debug log when `type_code` fails to convert to a `data_type` +time: 2024-03-23T16:02:51.306658-06:00 +custom: + Author: dbeatty10 + Issue: "8912" diff --git a/.changes/1.1.0/Features-20240325-180611.yaml b/.changes/1.1.0/Features-20240325-180611.yaml new file mode 100644 index 000000000..2299d2ee2 --- /dev/null +++ b/.changes/1.1.0/Features-20240325-180611.yaml @@ -0,0 +1,6 @@ +kind: Features +body: Introduce TableLastModifiedMetadataBatch and implement BaseAdapter.calculate_freshness_from_metadata_batch +time: 2024-03-25T18:06:11.816163-04:00 +custom: + Author: michelleark + Issue: "138" diff --git a/.changes/1.1.0/Features-20240409-084844.yaml b/.changes/1.1.0/Features-20240409-084844.yaml new file mode 100644 index 000000000..4fceebdab --- /dev/null +++ b/.changes/1.1.0/Features-20240409-084844.yaml @@ -0,0 +1,6 @@ +kind: Features +body: Support for sql fixtures in unit testing +time: 2024-04-09T08:48:44.441106-04:00 +custom: + Author: gshank + Issue: "148" diff --git a/.changes/1.1.0/Under the Hood-20240329-093307.yaml b/.changes/1.1.0/Under the Hood-20240329-093307.yaml new file mode 100644 index 000000000..85a00a096 --- /dev/null +++ b/.changes/1.1.0/Under the Hood-20240329-093307.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: Add the option to set the log level of the AdapterRegistered event +time: 2024-03-29T09:33:07.737464-05:00 +custom: + Author: emmyoop + Issue: "141" diff --git a/.changes/1.1.0/Under the Hood-20240410-184109.yaml b/.changes/1.1.0/Under the Hood-20240410-184109.yaml new file mode 100644 index 000000000..c0fc5b920 --- /dev/null +++ b/.changes/1.1.0/Under the Hood-20240410-184109.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: Update dependabot config to cover GHA +time: 2024-04-10T18:41:09.719652-04:00 +custom: + Author: mikealfare + Issue: "161" diff --git a/.changes/unreleased/Dependencies-20240424-140142.yaml b/.changes/unreleased/Dependencies-20240424-140142.yaml new file mode 100644 index 000000000..c263e3c2e --- /dev/null +++ b/.changes/unreleased/Dependencies-20240424-140142.yaml @@ -0,0 +1,6 @@ +kind: Dependencies +body: add support for py3.12 +time: 2024-04-24T14:01:42.576383-07:00 +custom: + Author: colin-rogers-dbt + Issue: "57" diff --git a/.changes/unreleased/Features-20240412-192040.yaml b/.changes/unreleased/Features-20240412-192040.yaml new file mode 100644 index 000000000..dc3f6799c --- /dev/null +++ b/.changes/unreleased/Features-20240412-192040.yaml @@ -0,0 +1,6 @@ +kind: Features +body: Cross-database `cast` macro +time: 2024-04-12T19:20:40.904842-06:00 +custom: + Author: MichelleArk dbeatty10 + Issue: "84" diff --git a/.changes/unreleased/Features-20240418-155123.yaml b/.changes/unreleased/Features-20240418-155123.yaml new file mode 100644 index 000000000..a3d9a56dd --- /dev/null +++ b/.changes/unreleased/Features-20240418-155123.yaml @@ -0,0 +1,6 @@ +kind: Features +body: Allow adapters to opt out of aliasing the subquery generated by render_limited +time: 2024-04-18T15:51:23.584295-07:00 +custom: + Author: colin-rogers-dbt + Issue: "124" diff --git a/.changes/unreleased/Features-20240418-155531.yaml b/.changes/unreleased/Features-20240418-155531.yaml new file mode 100644 index 000000000..02614b08f --- /dev/null +++ b/.changes/unreleased/Features-20240418-155531.yaml @@ -0,0 +1,7 @@ +kind: Features +body: subquery alias generated by render_limited now includes the relation name to + mitigate duplicate aliasing +time: 2024-04-18T15:55:31.826729-07:00 +custom: + Author: colin-rogers-dbt + Issue: ' 124' diff --git a/.changes/unreleased/Fixes-20240411-185203.yaml b/.changes/unreleased/Fixes-20240411-185203.yaml new file mode 100644 index 000000000..5df91f935 --- /dev/null +++ b/.changes/unreleased/Fixes-20240411-185203.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: Fix adapter-specific cast handling for constraint enforcement +time: 2024-04-11T18:52:03.960202-07:00 +custom: + Author: michelleark + Issue: "166" diff --git a/.changes/unreleased/Under the Hood-20240417-192843.yaml b/.changes/unreleased/Under the Hood-20240417-192843.yaml new file mode 100644 index 000000000..94ec9292d --- /dev/null +++ b/.changes/unreleased/Under the Hood-20240417-192843.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: Validate that dbt-core and dbt-adapters remain de-coupled +time: 2024-04-17T19:28:43.400023-04:00 +custom: + Author: mikealfare + Issue: "144" diff --git a/.changes/unreleased/Under the Hood-20240423-094843.yaml b/.changes/unreleased/Under the Hood-20240423-094843.yaml new file mode 100644 index 000000000..664f39a62 --- /dev/null +++ b/.changes/unreleased/Under the Hood-20240423-094843.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: remove dbt_version from query comment test fixture +time: 2024-04-23T09:48:43.208035-07:00 +custom: + Author: colin-rogers-dbt + Issue: "184" diff --git a/.github/ISSUE_TEMPLATE/internal-epic.yml b/.github/ISSUE_TEMPLATE/internal-epic.yml index 504514a3e..8cfb3aefe 100644 --- a/.github/ISSUE_TEMPLATE/internal-epic.yml +++ b/.github/ISSUE_TEMPLATE/internal-epic.yml @@ -30,7 +30,7 @@ body: label: Objectives description: | What are the high level goals we are trying to achieve? Provide use cases if available. - + Example: - [ ] Allow adapter maintainers to support custom materializations - [ ] Reduce maintenance burden for incremental users by offering materialized views @@ -48,7 +48,7 @@ body: Provide a list of GH issues that will build out this functionality. This may start empty, or as a checklist of items. However, it should eventually become a list of Feature Implementation tickets. - + Example: - [ ] Create new macro to select warehouse - [ ] https://github.com/dbt-labs/dbt-adapters/issues/42 @@ -59,6 +59,24 @@ body: validations: required: false + - type: textarea + attributes: + label: Documentation + description: | + Provide a list of relevant documentation. Is there a proof of concept? + Does this require and RFCs, ADRs, etc.? + If the documentation exists, link it; if it does not exist yet, reference it descriptively. + + Example: + - [ ] RFC for updating connection interface to accept new parameters + - [ ] POC: https://github.com/dbt-labs/dbt-adapters/pull/42 + value: | + ```[tasklist] + - [ ] Task + ``` + validations: + required: false + - type: textarea attributes: label: Consequences diff --git a/.github/ISSUE_TEMPLATE/internal-feature-implementation.yml b/.github/ISSUE_TEMPLATE/internal-feature-implementation.yml index 217e847b2..7a99365b9 100644 --- a/.github/ISSUE_TEMPLATE/internal-feature-implementation.yml +++ b/.github/ISSUE_TEMPLATE/internal-feature-implementation.yml @@ -41,7 +41,7 @@ body: label: Acceptance criteria description: | What is the definition of done for this feature? Include any relevant edge cases and/or test cases. - + Example: - [ ] If there are no config changes, don't alter the materialized view - [ ] If the materialized view is scheduled to refresh, a manual refresh should not be issued @@ -54,11 +54,11 @@ body: - type: textarea attributes: - label: Suggested tests + label: Testing description: | Provide scenarios to test. Include both positive and negative tests if possible. Link to existing similar tests if appropriate. - + Example: - [ ] Test with no `materialized` field in the model config. Expect pass. - [ ] Test with a `materialized` field in the model config that is not valid. Expect ConfigError. @@ -68,16 +68,27 @@ body: ``` validations: required: true - + - type: textarea attributes: - label: Are there any security concerns with these changes? + label: Security description: | + Are there any security concerns with these changes? When in doubt, run it by the security team. placeholder: | Example: Logging sensitive data validations: - required: true + required: true + + - type: textarea + attributes: + label: Docs + description: | + Are there any docs the will need to be added or updated? + placeholder: | + Example: We need to document how to configure this new authentication method. + validations: + required: true - type: textarea attributes: diff --git a/.github/actions/build-hatch/action.yml b/.github/actions/build-hatch/action.yml index 9c6359514..fe9825d46 100644 --- a/.github/actions/build-hatch/action.yml +++ b/.github/actions/build-hatch/action.yml @@ -11,9 +11,9 @@ inputs: working-dir: description: Where to run commands from, supports namespace packaging default: "./" - artifacts-dir: + archive-name: description: Where to upload the artifacts - default: "dist" + required: true runs: using: composite @@ -32,5 +32,6 @@ runs: - name: Upload artifacts uses: actions/upload-artifact@v3 with: - name: ${{ inputs.artifacts-dir}} + name: ${{ inputs.archive-name }} path: ${{ inputs.working-dir }}dist/ + retention-days: 3 diff --git a/.github/actions/publish-pypi/action.yml b/.github/actions/publish-pypi/action.yml index c97becebd..deffc6e36 100644 --- a/.github/actions/publish-pypi/action.yml +++ b/.github/actions/publish-pypi/action.yml @@ -2,7 +2,7 @@ name: Publish - PyPI description: Publish artifacts saved during build step to PyPI inputs: - artifacts-dir-name: + archive-name: description: Where to download the artifacts from required: true repository-url: @@ -16,8 +16,12 @@ runs: - name: Download artifacts uses: actions/download-artifact@v3 with: - name: ${{ inputs.artifacts-dir }} - path: . + name: ${{ inputs.archive-name }} + path: dist/ + + - name: "[DEBUG]" + run : ls -R + shell: bash - name: Publish artifacts to PyPI uses: pypa/gh-action-pypi-publish@release/v1 diff --git a/.github/actions/setup-hatch/action.yml b/.github/actions/setup-hatch/action.yml index 7b7780ef7..6b15cdbf3 100644 --- a/.github/actions/setup-hatch/action.yml +++ b/.github/actions/setup-hatch/action.yml @@ -18,5 +18,13 @@ runs: python-version: ${{ inputs.python-version }} - name: Install dev dependencies + shell: bash run: ${{ inputs.setup-command }} + + - name: Add brew to the PATH + shell: bash + run: echo "/home/linuxbrew/.linuxbrew/bin:/home/linuxbrew/.linuxbrew/sbin" >> $GITHUB_PATH + + - name: Install pre-commit shell: bash + run: brew install pre-commit diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 2a6f34492..02f010c76 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -1,8 +1,17 @@ version: 2 updates: - # python dependencies - package-ecosystem: "pip" directory: "/" schedule: interval: "daily" rebase-strategy: "disabled" + - package-ecosystem: "pip" + directory: "/dbt-tests-adapter" + schedule: + interval: "daily" + rebase-strategy: "disabled" + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "weekly" + rebase-strategy: "disabled" diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 747dbc2d1..4fc2fcf8e 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -30,6 +30,6 @@ resolves # ### Checklist - [ ] I have read [the contributing guide](https://github.com/dbt-labs/dbt-adapter/blob/main/CONTRIBUTING.md) and understand what's expected of me -- [ ] I have run this code in development and it appears to resolve the stated issue +- [ ] I have run this code in development, and it appears to resolve the stated issue - [ ] This PR includes tests, or tests are not required/relevant for this PR -- [ ] This PR has no interface changes (e.g. macros, cli, logs, json artifacts, config files, adapter interface, etc) or this PR has already received feedback and approval from Product or DX +- [ ] This PR has no interface changes (e.g. macros, cli, logs, json artifacts, config files, adapter interface, etc.) or this PR has already received feedback and approval from Product or DX diff --git a/.github/workflows/code-quality.yml b/.github/workflows/code-quality.yml index 4f5b392ee..9c2038478 100644 --- a/.github/workflows/code-quality.yml +++ b/.github/workflows/code-quality.yml @@ -10,17 +10,13 @@ on: permissions: read-all -defaults: - run: - shell: bash - # will cancel previous workflows triggered by the same event and for the same ref for PRs or same SHA otherwise concurrency: group: ${{ github.workflow }}-${{ github.event_name }}-${{ contains(github.event_name, 'pull_request') && github.event.pull_request.head.ref || github.sha }} cancel-in-progress: true jobs: - lint: + code-quality: name: Code Quality runs-on: ubuntu-latest @@ -33,8 +29,6 @@ jobs: - name: Setup `hatch` uses: ./.github/actions/setup-hatch - - name: Run linters - run: hatch run lint:all - - - name: Run typechecks - run: hatch run typecheck:all + - name: Run code quality + shell: bash + run: hatch run code-quality diff --git a/.github/workflows/github-release.yml b/.github/workflows/github-release.yml new file mode 100644 index 000000000..1c2f41b54 --- /dev/null +++ b/.github/workflows/github-release.yml @@ -0,0 +1,259 @@ +# **what?** +# Create a new release on GitHub and include any artifacts in the `/dist` directory of the GitHub artifacts store. +# +# Inputs: +# sha: The commit to attach to this release +# version_number: The release version number (i.e. 1.0.0b1, 1.2.3rc2, 1.0.0) +# changelog_path: Path to the changelog file for release notes +# test_run: Test run (Publish release as draft) +# +# **why?** +# Reusable and consistent GitHub release process. +# +# **when?** +# Call after a successful build. Build artifacts should be ready to release and live in a dist/ directory. +# +# This workflow expects the artifacts to already be built and living in the artifact store of the workflow. +# +# Validation Checks +# +# 1. If no release already exists for this commit and version, create the tag and release it to GitHub. +# 2. If a release already exists for this commit, skip creating the release but finish with a success. +# 3. If a release exists for this commit under a different tag, fail. +# 4. If the commit is already associated with a different release, fail. + +name: GitHub Release + +on: + workflow_call: + inputs: + sha: + description: The commit to attach to this release + required: true + type: string + version_number: + description: The release version number (i.e. 1.0.0b1) + required: true + type: string + changelog_path: + description: Path to the changelog file for release notes + required: true + type: string + test_run: + description: Test run (Publish release as draft) + required: true + type: boolean + archive_name: + description: artifact name to download + required: true + type: string + outputs: + tag: + description: The path to the changelog for this version + value: ${{ jobs.check-release-exists.outputs.tag }} + +permissions: + contents: write + +env: + REPO_LINK: ${{ github.server_url }}/${{ github.repository }} + NOTIFICATION_PREFIX: "[GitHub Release]" + +jobs: + log-inputs: + runs-on: ubuntu-latest + steps: + - name: "[DEBUG] Print Variables" + run: | + echo The last commit sha in the release: ${{ inputs.sha }} + echo The release version number: ${{ inputs.version_number }} + echo Expected Changelog path: ${{ inputs.changelog_path }} + echo Test run: ${{ inputs.test_run }} + echo Repo link: ${{ env.REPO_LINK }} + echo Notification prefix: ${{ env.NOTIFICATION_PREFIX }} + + check-release-exists: + runs-on: ubuntu-latest + outputs: + exists: ${{ steps.release_check.outputs.exists }} + draft_exists: ${{ steps.release_check.outputs.draft_exists }} + tag: ${{ steps.set_tag.outputs.tag }} + + steps: + - name: "Generate Release Tag" + id: set_tag + run: echo "tag=v${{ inputs.version_number }}" >> $GITHUB_OUTPUT + + # When the GitHub CLI doesn't find a release for the given tag, it will exit 1 with a + # message of "release not found". In our case, it's not an actual error, just a + # confirmation that the release does not already exists so we can go ahead and create it. + # The `|| true` makes it so the step does not exit with a non-zero exit code + # Also check if the release already exists is draft state. If it does, and we are not + # testing then we can publish that draft as is. If it's in draft and we are testing, skip the + # release. + - name: "Check If Release Exists For Tag ${{ steps.set_tag.outputs.tag }}" + id: release_check + run: | + output=$((gh release view ${{ steps.set_tag.outputs.tag }} --json isDraft,targetCommitish --repo ${{ env.REPO_LINK }}) 2>&1) || true + if [[ "$output" == "release not found" ]] + then + title="Release for tag ${{ steps.set_tag.outputs.tag }} does not exist." + message="Check passed." + echo "exists=false" >> $GITHUB_OUTPUT + echo "draft_exists=false" >> $GITHUB_OUTPUT + echo "::notice title=${{ env.NOTIFICATION_PREFIX }}: $title::$message" + exit 0 + fi + commit=$(jq -r '.targetCommitish' <<< "$output") + if [[ $commit != ${{ inputs.sha }} ]] + then + title="Release for tag ${{ steps.set_tag.outputs.tag }} already exists for commit $commit!" + message="Cannot create a new release for commit ${{ inputs.sha }}. Exiting." + echo "::error title=${{ env.NOTIFICATION_PREFIX }}: $title::$message" + exit 1 + fi + isDraft=$(jq -r '.isDraft' <<< "$output") + if [[ $isDraft == true ]] && [[ ${{ inputs.test_run }} == false ]] + then + title="Release tag ${{ steps.set_tag.outputs.tag }} already associated with the draft release." + message="Release workflow will publish the associated release." + echo "exists=false" >> $GITHUB_OUTPUT + echo "draft_exists=true" >> $GITHUB_OUTPUT + echo "::notice title=${{ env.NOTIFICATION_PREFIX }}: $title::$message" + exit 0 + fi + title="Release for tag ${{ steps.set_tag.outputs.tag }} already exists." + message="Skip GitHub Release Publishing." + echo "exists=true" >> $GITHUB_OUTPUT + echo "draft_exists=false" >> $GITHUB_OUTPUT + echo "::notice title=${{ env.NOTIFICATION_PREFIX }}: $title::$message" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + REPO: ${{ env.REPO_LINK }} + + - name: "[DEBUG] Log Job Outputs" + run: | + echo exists: ${{ steps.release_check.outputs.exists }} + echo draft_exists: ${{ steps.release_check.outputs.draft_exists }} + echo tag: ${{ steps.set_tag.outputs.tag }} + + skip-github-release: + runs-on: ubuntu-latest + needs: [check-release-exists] + if: needs.check-release-exists.outputs.exists == 'true' + + steps: + - name: "Tag Exists, Skip GitHub Release Job" + run: | + echo title="A tag already exists for ${{ needs.check-release-exists.outputs.tag }} and commit." + echo message="Skipping GitHub release." + echo "::notice title=${{ env.NOTIFICATION_PREFIX }}: $title::$message" + + audit-release-different-commit: + runs-on: ubuntu-latest + needs: [check-release-exists] + if: needs.check-release-exists.outputs.exists == 'false' + + steps: + - name: "Check If Release Already Exists For Commit" + uses: cardinalby/git-get-release-action@1.2.4 + id: check_release_commit + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + commitSha: ${{ inputs.sha }} + doNotFailIfNotFound: true # returns blank outputs when not found instead of error + searchLimit: 15 # Since we only care about recent releases, speed up the process + + - name: "[DEBUG] Print Release Details" + run: | + echo steps.check_release_commit.outputs.id: ${{ steps.check_release_commit.outputs.id }} + echo steps.check_release_commit.outputs.tag_name: ${{ steps.check_release_commit.outputs.tag_name }} + echo steps.check_release_commit.outputs.target_commitish: ${{ steps.check_release_commit.outputs.target_commitish }} + echo steps.check_release_commit.outputs.prerelease: ${{ steps.check_release_commit.outputs.prerelease }} + + # Since we already know a release for this tag does not exist, if we find anything it's for the wrong tag, exit + - name: "Check If The Tag Matches The Version Number" + if: steps.check_release_commit.outputs.id != '' + run: | + title="Tag ${{ steps.check_release_commit.outputs.tag_name }} already exists for this commit!" + message="Cannot create a new tag for ${{ needs.check-release-exists.outputs.tag }} for the same commit" + echo "::error title=${{ env.NOTIFICATION_PREFIX }}: $title::$message" + exit 1 + + publish-draft-release: + runs-on: ubuntu-latest + needs: [check-release-exists, audit-release-different-commit] + if: >- + needs.check-release-exists.outputs.draft_exists == 'true' && + inputs.test_run == false + + steps: + - name: "Publish Draft Release - ${{ needs.check-release-exists.outputs.tag }}" + run: | + gh release edit $TAG --draft=false --repo ${{ env.REPO_LINK }} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + TAG: ${{ needs.check-release-exists.outputs.tag }} + + create-github-release: + runs-on: ubuntu-latest + needs: [check-release-exists, audit-release-different-commit] + if: needs.check-release-exists.outputs.draft_exists == 'false' + + steps: + - name: "Check out repository" + uses: actions/checkout@v4 + with: + ref: ${{ inputs.sha }} + + - name: "Download Artifact ${{ inputs.archive_name }}" + uses: actions/download-artifact@v3 + with: + name: ${{ inputs.archive_name }} + path: dist/ + + - name: "[DEBUG] Display Structure Of Expected Files" + run: | + ls -R .changes + ls -l dist + + - name: "Set Release Type" + id: release_type + run: | + if ${{ contains(inputs.version_number, 'rc') || contains(inputs.version_number, 'b') }} + then + echo Release will be set as pre-release + echo "prerelease=--prerelease" >> $GITHUB_OUTPUT + else + echo This is not a prerelease + fi + + - name: "Set As Draft Release" + id: draft + run: | + if [[ ${{ inputs.test_run }} == true ]] + then + echo Release will be published as draft + echo "draft=--draft" >> $GITHUB_OUTPUT + else + echo This is not a draft release + fi + + - name: "GitHub Release Workflow Annotation" + run: | + title="Release ${{ needs.check-release-exists.outputs.tag }}" + message="Configuration: ${{ steps.release_type.outputs.prerelease }} ${{ steps.draft.outputs.draft }}" + echo "::notice title=${{ env.NOTIFICATION_PREFIX }}: $title::$message" + + - name: "Create New GitHub Release - ${{ needs.check-release-exists.outputs.tag }}" + run: | + gh release create $TAG ./dist/* --title "$TITLE" --notes-file $RELEASE_NOTES --target $COMMIT $PRERELEASE $DRAFT --repo ${{ env.REPO_LINK }} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + TAG: ${{ needs.check-release-exists.outputs.tag }} + TITLE: ${{ github.event.repository.name }} ${{ needs.check-release-exists.outputs.tag }} + RELEASE_NOTES: ${{ inputs.changelog_path }} + COMMIT: ${{ inputs.sha }} + PRERELEASE: ${{ steps.release_type.outputs.prerelease }} + DRAFT: ${{ steps.draft.outputs.draft }} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index baa32604e..1135adb84 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -60,6 +60,7 @@ on: # this is the permission that allows creating a new release permissions: contents: write + id-token: write # will cancel previous workflows triggered by the same event and for the same ref for PRs or same SHA otherwise concurrency: @@ -71,23 +72,55 @@ defaults: shell: bash jobs: + release-inputs: + name: "Release inputs" + runs-on: ubuntu-latest + outputs: + working-dir: ${{ steps.release-inputs.outputs.working-dir }} + run-unit-tests: ${{ steps.release-inputs.outputs.run-unit-tests }} + archive-name: ${{ steps.release-inputs.outputs.archive-name }} + steps: + - name: "Inputs" + id: release-inputs + run: | + working_dir="./" + run_unit_tests=true + archive_name=${{ inputs.package }}-${{ inputs.version_number }}-${{ inputs.deploy-to }} - bump-version-generate-changelog: - name: Bump package version, Generate changelog - uses: dbt-labs/dbt-adapters/.github/workflows/release_prep_hatch.yml@bumpVersion + if test "${{ inputs.package }}" = "dbt-tests-adapter" + then + working_dir="./dbt-tests-adapter/" + run_unit_tests=false + fi + + echo "working-dir=$working_dir" >> $GITHUB_OUTPUT + echo "run-unit-tests=$run_unit_tests" >> $GITHUB_OUTPUT + echo "archive-name=$archive_name" >> $GITHUB_OUTPUT + + - name: "[DEBUG]" + run: | + echo package : ${{ inputs.package }} + echo working-dir : ${{ steps.release-inputs.outputs.working-dir }} + echo run-unit-tests : ${{ steps.release-inputs.outputs.run-unit-tests }} + echo archive-name : ${{ steps.release-inputs.outputs.archive-name }} + bump-version-generate-changelog: + name: "Bump package version, Generate changelog" + uses: dbt-labs/dbt-adapters/.github/workflows/release_prep_hatch.yml@main + needs: [release-inputs] with: version_number: ${{ inputs.version_number }} deploy_to: ${{ inputs.deploy-to }} nightly_release: ${{ inputs.nightly_release }} target_branch: ${{ inputs.target_branch }} - + working-dir: ${{ needs.release-inputs.outputs.working-dir }} + run-unit-tests: ${{ fromJSON(needs.release-inputs.outputs.run-unit-tests) }} secrets: inherit log-outputs-bump-version-generate-changelog: name: "[Log output] Bump package version, Generate changelog" if: ${{ !failure() && !cancelled() }} - needs: [bump-version-generate-changelog] + needs: [release-inputs, bump-version-generate-changelog] runs-on: ubuntu-latest steps: - name: Print variables @@ -97,7 +130,7 @@ jobs: build-and-test: name: "Build and Test" - needs: [log-outputs-bump-version-generate-changelog] + needs: [release-inputs, bump-version-generate-changelog] runs-on: ubuntu-latest permissions: id-token: write # IMPORTANT: this permission is mandatory for trusted publishing @@ -107,69 +140,41 @@ jobs: with: ref: ${{ needs.bump-version-generate-changelog.outputs.final_sha }} - - name: Setup `hatch` - uses: ./.github/actions/setup-hatch - - - name: Build `dbt-adapters` - if: ${{ inputs.package == 'dbt-adapters' }} - uses: ./.github/actions/build-hatch - - - name: Build `dbt-tests-adapter` - if: ${{ inputs.package == 'dbt-tests-adapter' }} - uses: ./.github/actions/build-hatch - with: - working-dir: "./dbt-tests-adapter/" - - - name: Setup `hatch` - uses: ./.github/actions/setup-hatch - - - name: Build `dbt-adapters` - if: ${{ inputs.package == 'dbt-adapters' }} - uses: ./.github/actions/build-hatch + - name: "Setup `hatch`" + uses: dbt-labs/dbt-adapters/.github/actions/setup-hatch@main - - name: Build `dbt-tests-adapter` - if: ${{ inputs.package == 'dbt-tests-adapter' }} - uses: ./.github/actions/build-hatch + - name: "Build ${{ inputs.package }}" + uses: dbt-labs/dbt-adapters/.github/actions/build-hatch@main with: - working-dir: "./dbt-tests-adapter/" - - # this step is only needed for the release process - - name: "Upload Build Artifact" - if: ${{ github.event_name == 'workflow_call' }} - uses: actions/upload-artifact@v3 - with: - name: ${{ steps.version.outputs.version_number }} - path: | - ${{ inputs.changelog_path }} - ./dist/ - retention-days: 3 + working-dir: ${{ needs.release-inputs.outputs.working-dir }} + archive-name: ${{ needs.release-inputs.outputs.archive-name }} github-release: - name: GitHub Release - if: ${{ !failure() && !cancelled() }} - - needs: [build-and-test] - - uses: dbt-labs/dbt-release/.github/workflows/github-release.yml@main - + name: "GitHub Release" + # ToDo: update GH release to handle adding dbt-tests-adapter and dbt-adapter assets to the same release + if: ${{ !failure() && !cancelled() && inputs.package == 'dbt-adapters' }} + needs: [release-inputs, build-and-test, bump-version-generate-changelog] + uses: dbt-labs/dbt-adapters/.github/workflows/github-release.yml@main with: + sha: ${{ needs.bump-version-generate-changelog.outputs.final_sha }} version_number: ${{ inputs.version_number }} changelog_path: ${{ needs.bump-version-generate-changelog.outputs.changelog_path }} test_run: ${{ inputs.deploy-to == 'test' && true || false }} - sha: ${{ needs.bump-version-generate-changelog.outputs.final_sha }} + archive_name: ${{ needs.release-inputs.outputs.archive-name }} pypi-release: - name: Publish to PyPI + name: "Publish to PyPI" runs-on: ubuntu-latest - needs: [github-release] + needs: [release-inputs, build-and-test] environment: name: ${{ inputs.deploy-to }} url: ${{ vars.PYPI_PROJECT_URL }} steps: + - name: "Check out repository" + uses: actions/checkout@v4 + - name: "Publish to PyPI" - uses: ./.github/actions/publish-pypi + uses: dbt-labs/dbt-adapters/.github/actions/publish-pypi@main with: repository-url: ${{ vars.PYPI_REPOSITORY_URL }} - artifacts-dir: ${{ inputs.version_number }} - - + archive-name: ${{ needs.release-inputs.outputs.archive-name }} diff --git a/.github/workflows/release_prep_hatch.yml b/.github/workflows/release_prep_hatch.yml index c32e0a2f0..b043e19e9 100644 --- a/.github/workflows/release_prep_hatch.yml +++ b/.github/workflows/release_prep_hatch.yml @@ -53,6 +53,9 @@ on: type: string required: false default: '' + run-unit-tests: + type: boolean + default: false run-integration-tests: type: boolean default: false @@ -61,10 +64,17 @@ on: type: string required: false default: main + working-dir: + description: "The working directory to use for run statements" + type: string + default: "./" outputs: changelog_path: description: The path to the changelog for this version value: ${{ jobs.audit-changelog.outputs.changelog_path }} + final_sha: + description: The sha that will actually be released + value: ${{ jobs.determine-release-branch.outputs.final_sha }} secrets: FISHTOWN_BOT_PAT: description: "Token to commit/merge changes into branches" @@ -97,6 +107,9 @@ jobs: echo Target branch: ${{ inputs.target_branch }} echo Nightly release: ${{ inputs.nightly_release }} echo Optional env setup script: ${{ inputs.env_setup_script_path }} + echo run-unit-tests: ${{ inputs.run-unit-tests }} + echo run-integration-tests: ${{ inputs.run-integration-tests }} + echo working-dir: ${{ inputs.working-dir }} # ENVIRONMENT VARIABLES echo Python target version: ${{ env.PYTHON_TARGET_VERSION }} echo Notification prefix: ${{ env.NOTIFICATION_PREFIX }} @@ -188,6 +201,8 @@ jobs: is_updated=true fi echo "up_to_date=$is_updated" >> $GITHUB_OUTPUT + working-directory: ${{ inputs.working-dir }} + - name: "[Notification] Check Current Version In Code" run: | title="Version check" @@ -201,6 +216,7 @@ jobs: - name: "[DEBUG] Print Outputs" run: | echo up_to_date: ${{ steps.version-check.outputs.up_to_date }} + skip-generate-changelog: runs-on: ubuntu-latest needs: [audit-changelog] @@ -213,6 +229,7 @@ jobs: title="Skip changelog generation" message="A changelog file already exists at ${{ needs.audit-changelog.outputs.changelog_path }}, skipping generating changelog" echo "::notice title=${{ env.NOTIFICATION_PREFIX }}: $title::$message" + skip-version-bump: runs-on: ubuntu-latest needs: [audit-version-in-code] @@ -225,6 +242,7 @@ jobs: title="Skip version bump" message="The version has already been bumped to ${{ inputs.version_number }}, skipping version bump" echo "::notice title=${{ env.NOTIFICATION_PREFIX }}: $title::$message" + create-temp-branch: runs-on: ubuntu-latest needs: [audit-changelog, audit-version-in-code] @@ -333,6 +351,7 @@ jobs: if: needs.audit-version-in-code.outputs.up_to_date == 'false' run: | hatch version ${{ inputs.version_number }} + working-directory: ${{ inputs.working-dir }} - name: "[Notification] Bump Version To ${{ inputs.version_number }}" if: needs.audit-version-in-code.outputs.up_to_date == 'false' run: | @@ -344,13 +363,13 @@ jobs: - name: "Remove Trailing Whitespace Via Pre-commit" continue-on-error: true run: | - pre-commit run trailing-whitespace --files dbt_common/__about__.py CHANGELOG.md .changes/* + pre-commit run trailing-whitespace --files dbt/adapters/__about__.py CHANGELOG.md .changes/* git status # this step will fail on newline errors but also correct them - name: "Removing Extra Newlines Via Pre-commit" continue-on-error: true run: | - pre-commit run end-of-file-fixer --files dbt_common/__about__.py CHANGELOG.md .changes/* + pre-commit run end-of-file-fixer --files dbt/adapters/__about__.py CHANGELOG.md .changes/* git status - name: "Commit & Push Changes" run: | @@ -367,6 +386,7 @@ jobs: git push run-unit-tests: + if: inputs.run-unit-tests == true runs-on: ubuntu-latest needs: [create-temp-branch, generate-changelog-bump-version] @@ -427,14 +447,14 @@ jobs: python-version: ${{ env.PYTHON_TARGET_VERSION }} - name: Run tests - run: hatch run test:integration + run: hatch run integration-tests:all merge-changes-into-target-branch: runs-on: ubuntu-latest needs: [run-unit-tests, run-integration-tests, create-temp-branch, audit-version-in-code, audit-changelog] if: | !failure() && !cancelled() && - inputs.deploy_to == 'test' && + inputs.deploy_to == 'prod' && ( needs.audit-changelog.outputs.exists == 'false' || needs.audit-version-in-code.outputs.up_to_date == 'false' @@ -449,11 +469,11 @@ jobs: - name: "Checkout Repo ${{ github.repository }}" uses: actions/checkout@v3 - - name: "Merge Changes Into main" + - name: "Merge Changes Into ${{ inputs.target_branch }}" uses: everlytic/branch-merge@1.1.5 with: source_ref: ${{ needs.create-temp-branch.outputs.branch_name }} - target_branch: "main" + target_branch: ${{ inputs.target_branch }} github_token: ${{ secrets.FISHTOWN_BOT_PAT }} commit_message_template: "[Automated] Merged {source_ref} into target {target_branch} during release process" @@ -495,7 +515,7 @@ jobs: then branch=${{ needs.create-temp-branch.outputs.branch_name }} else - branch="main" + branch="${{ inputs.target_branch }}" fi echo "target_branch=$branch" >> $GITHUB_OUTPUT - name: "[Notification] Resolve Branch To Checkout" @@ -511,7 +531,12 @@ jobs: - name: "[Debug] Log Branch" run: git status + - name: "Resolve Commit SHA For Release" + id: resolve_commit_sha + run: | + echo "release_sha=$(git rev-parse HEAD)" >> $GITHUB_OUTPUT + - name: "Remove Temp Branch - ${{ needs.create-temp-branch.outputs.branch_name }}" if: ${{ inputs.deploy_to == 'prod' && inputs.nightly_release == 'false' && needs.create-temp-branch.outputs.branch_name != '' }} run: | - git push origin -d ${{ needs.create-temp-branch.outputs.branch_name }} \ No newline at end of file + git push origin -d ${{ needs.create-temp-branch.outputs.branch_name }} diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml index 26ff4aaac..7d7206552 100644 --- a/.github/workflows/unit-tests.yml +++ b/.github/workflows/unit-tests.yml @@ -23,7 +23,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.8", "3.9", "3.10", "3.11"] + python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] steps: - name: Check out repository diff --git a/.gitignore b/.gitignore index cf98fcf8b..a14d6d0db 100644 --- a/.gitignore +++ b/.gitignore @@ -153,4 +153,4 @@ dmypy.json cython_debug/ # PyCharm -.idea/ \ No newline at end of file +.idea/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 000000000..caf342092 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,57 @@ +default_language_version: + python: python3 + +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.6.0 + hooks: + - id: check-yaml + args: [--unsafe] + - id: check-json + - id: end-of-file-fixer + - id: trailing-whitespace + - id: check-case-conflict + +- repo: https://github.com/dbt-labs/pre-commit-hooks + rev: v0.1.0a1 + hooks: + - id: dbt-core-in-adapters-check + +- repo: https://github.com/psf/black + rev: 24.4.0 + hooks: + - id: black + args: + - --line-length=99 + - --target-version=py38 + - --target-version=py39 + - --target-version=py310 + - --target-version=py311 + - --force-exclude=dbt/adapters/events/adapter_types_pb2.py + +- repo: https://github.com/pycqa/flake8 + rev: 7.0.0 + hooks: + - id: flake8 + exclude: dbt/adapters/events/adapter_types_pb2.py|tests/functional/ + args: + - --max-line-length=99 + - --select=E,F,W + - --ignore=E203,E501,E704,E741,W503,W504 + - --per-file-ignores=*/__init__.py:F401 + +- repo: https://github.com/pre-commit/mirrors-mypy + rev: v1.9.0 + hooks: + - id: mypy + exclude: dbt/adapters/events/adapter_types_pb2.py|dbt-tests-adapter/dbt/__init__.py + args: + - --explicit-package-bases + - --ignore-missing-imports + - --pretty + - --show-error-codes + files: ^dbt/adapters/ + additional_dependencies: + - types-PyYAML + - types-protobuf + - types-pytz diff --git a/CHANGELOG.md b/CHANGELOG.md index 75eaec3e2..0af6f6927 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,28 +5,31 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html), and is generated by [Changie](https://github.com/miniscruff/changie). -## dbt-adapter 0.1.0-a8 - February 22, 2024 +## dbt-adapter 1.1.0-rc1 - April 17, 2024 ### Features -* Update RelationConfig to capture all fields used by adapters +* Debug log when `type_code` fails to convert to a `data_type` +* Introduce TableLastModifiedMetadataBatch and implement BaseAdapter.calculate_freshness_from_metadata_batch +* Support for sql fixtures in unit testing -### Fixes +### Under the Hood -* Ignore adapter-level support warnings for 'custom' constraints -* Make all adapter zone tests importable by removing "Test" prefix +* Add the option to set the log level of the AdapterRegistered event +* Update dependabot config to cover GHA -### Docs +## dbt-adapter 1.0.0 - April 01, 2024 -* Configure `changie` -* Setup ADR tracking framework -* Create issue templates -* Create PR template +### Fixes + +* Add field wrapper to BaseRelation members that were missing it. +* Add "description" and "meta" fields to RelationConfig protocol ### Under the Hood -* Configure `dependabot` -* Implement unit testing in CI -* Allow version to be specified in either __version__.py or __about__.py -* Remove __init__.py file from dbt.tests +* Lazy load agate to improve dbt-core performance +* add BaseAdapater.MAX_SCHEMA_METADATA_RELATIONS + +### Security +* Pin `black>=24.3` in `pyproject.toml` diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index eb0002fa3..e1b871034 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -163,7 +163,7 @@ Remember to commit and push the file that's created. ### Signing the CLA -> **_NOTE:_** All contributors to `dbt-adapter` must sign the +> **_NOTE:_** All contributors to `dbt-adapter` must sign the > [Contributor License Agreement](https://docs.getdbt.com/docs/contributor-license-agreements)(CLA). Maintainers will be unable to merge contributions until the contributor signs the CLA. diff --git a/dbt-tests-adapter/dbt/__init__.py b/dbt-tests-adapter/dbt/__init__.py new file mode 100644 index 000000000..782ff40f7 --- /dev/null +++ b/dbt-tests-adapter/dbt/__init__.py @@ -0,0 +1,6 @@ +# N.B. +# This will add to the package’s __path__ all subdirectories of directories on sys.path named after the package which effectively combines both modules into a single namespace (dbt.adapters) + +from pkgutil import extend_path + +__path__ = extend_path(__path__, __name__) diff --git a/dbt-tests-adapter/dbt/tests/__about__.py b/dbt-tests-adapter/dbt/tests/__about__.py new file mode 100644 index 000000000..6496f3e22 --- /dev/null +++ b/dbt-tests-adapter/dbt/tests/__about__.py @@ -0,0 +1 @@ +version = "1.8.0b1" diff --git a/dbt/tests/adapter/aliases/fixtures.py b/dbt-tests-adapter/dbt/tests/adapter/aliases/fixtures.py similarity index 100% rename from dbt/tests/adapter/aliases/fixtures.py rename to dbt-tests-adapter/dbt/tests/adapter/aliases/fixtures.py diff --git a/dbt/tests/adapter/aliases/test_aliases.py b/dbt-tests-adapter/dbt/tests/adapter/aliases/test_aliases.py similarity index 100% rename from dbt/tests/adapter/aliases/test_aliases.py rename to dbt-tests-adapter/dbt/tests/adapter/aliases/test_aliases.py diff --git a/dbt/tests/adapter/basic/__init__.py b/dbt-tests-adapter/dbt/tests/adapter/basic/__init__.py similarity index 100% rename from dbt/tests/adapter/basic/__init__.py rename to dbt-tests-adapter/dbt/tests/adapter/basic/__init__.py diff --git a/dbt/tests/adapter/basic/expected_catalog.py b/dbt-tests-adapter/dbt/tests/adapter/basic/expected_catalog.py similarity index 100% rename from dbt/tests/adapter/basic/expected_catalog.py rename to dbt-tests-adapter/dbt/tests/adapter/basic/expected_catalog.py diff --git a/dbt/tests/adapter/basic/files.py b/dbt-tests-adapter/dbt/tests/adapter/basic/files.py similarity index 100% rename from dbt/tests/adapter/basic/files.py rename to dbt-tests-adapter/dbt/tests/adapter/basic/files.py diff --git a/dbt/tests/adapter/basic/test_adapter_methods.py b/dbt-tests-adapter/dbt/tests/adapter/basic/test_adapter_methods.py similarity index 100% rename from dbt/tests/adapter/basic/test_adapter_methods.py rename to dbt-tests-adapter/dbt/tests/adapter/basic/test_adapter_methods.py diff --git a/dbt/tests/adapter/basic/test_base.py b/dbt-tests-adapter/dbt/tests/adapter/basic/test_base.py similarity index 100% rename from dbt/tests/adapter/basic/test_base.py rename to dbt-tests-adapter/dbt/tests/adapter/basic/test_base.py diff --git a/dbt/tests/adapter/basic/test_docs_generate.py b/dbt-tests-adapter/dbt/tests/adapter/basic/test_docs_generate.py similarity index 100% rename from dbt/tests/adapter/basic/test_docs_generate.py rename to dbt-tests-adapter/dbt/tests/adapter/basic/test_docs_generate.py diff --git a/dbt/tests/adapter/basic/test_empty.py b/dbt-tests-adapter/dbt/tests/adapter/basic/test_empty.py similarity index 100% rename from dbt/tests/adapter/basic/test_empty.py rename to dbt-tests-adapter/dbt/tests/adapter/basic/test_empty.py diff --git a/dbt/tests/adapter/basic/test_ephemeral.py b/dbt-tests-adapter/dbt/tests/adapter/basic/test_ephemeral.py similarity index 100% rename from dbt/tests/adapter/basic/test_ephemeral.py rename to dbt-tests-adapter/dbt/tests/adapter/basic/test_ephemeral.py diff --git a/dbt/tests/adapter/basic/test_generic_tests.py b/dbt-tests-adapter/dbt/tests/adapter/basic/test_generic_tests.py similarity index 100% rename from dbt/tests/adapter/basic/test_generic_tests.py rename to dbt-tests-adapter/dbt/tests/adapter/basic/test_generic_tests.py diff --git a/dbt/tests/adapter/basic/test_incremental.py b/dbt-tests-adapter/dbt/tests/adapter/basic/test_incremental.py similarity index 100% rename from dbt/tests/adapter/basic/test_incremental.py rename to dbt-tests-adapter/dbt/tests/adapter/basic/test_incremental.py diff --git a/dbt/tests/adapter/basic/test_singular_tests.py b/dbt-tests-adapter/dbt/tests/adapter/basic/test_singular_tests.py similarity index 100% rename from dbt/tests/adapter/basic/test_singular_tests.py rename to dbt-tests-adapter/dbt/tests/adapter/basic/test_singular_tests.py diff --git a/dbt/tests/adapter/basic/test_singular_tests_ephemeral.py b/dbt-tests-adapter/dbt/tests/adapter/basic/test_singular_tests_ephemeral.py similarity index 100% rename from dbt/tests/adapter/basic/test_singular_tests_ephemeral.py rename to dbt-tests-adapter/dbt/tests/adapter/basic/test_singular_tests_ephemeral.py diff --git a/dbt/tests/adapter/basic/test_snapshot_check_cols.py b/dbt-tests-adapter/dbt/tests/adapter/basic/test_snapshot_check_cols.py similarity index 100% rename from dbt/tests/adapter/basic/test_snapshot_check_cols.py rename to dbt-tests-adapter/dbt/tests/adapter/basic/test_snapshot_check_cols.py diff --git a/dbt/tests/adapter/basic/test_snapshot_timestamp.py b/dbt-tests-adapter/dbt/tests/adapter/basic/test_snapshot_timestamp.py similarity index 100% rename from dbt/tests/adapter/basic/test_snapshot_timestamp.py rename to dbt-tests-adapter/dbt/tests/adapter/basic/test_snapshot_timestamp.py diff --git a/dbt/tests/adapter/basic/test_table_materialization.py b/dbt-tests-adapter/dbt/tests/adapter/basic/test_table_materialization.py similarity index 100% rename from dbt/tests/adapter/basic/test_table_materialization.py rename to dbt-tests-adapter/dbt/tests/adapter/basic/test_table_materialization.py diff --git a/dbt/tests/adapter/basic/test_validate_connection.py b/dbt-tests-adapter/dbt/tests/adapter/basic/test_validate_connection.py similarity index 100% rename from dbt/tests/adapter/basic/test_validate_connection.py rename to dbt-tests-adapter/dbt/tests/adapter/basic/test_validate_connection.py diff --git a/dbt/tests/adapter/caching/test_caching.py b/dbt-tests-adapter/dbt/tests/adapter/caching/test_caching.py similarity index 100% rename from dbt/tests/adapter/caching/test_caching.py rename to dbt-tests-adapter/dbt/tests/adapter/caching/test_caching.py diff --git a/dbt/tests/adapter/catalog/files.py b/dbt-tests-adapter/dbt/tests/adapter/catalog/files.py similarity index 100% rename from dbt/tests/adapter/catalog/files.py rename to dbt-tests-adapter/dbt/tests/adapter/catalog/files.py diff --git a/dbt/tests/adapter/catalog/relation_types.py b/dbt-tests-adapter/dbt/tests/adapter/catalog/relation_types.py similarity index 100% rename from dbt/tests/adapter/catalog/relation_types.py rename to dbt-tests-adapter/dbt/tests/adapter/catalog/relation_types.py diff --git a/dbt/tests/adapter/column_types/fixtures.py b/dbt-tests-adapter/dbt/tests/adapter/column_types/fixtures.py similarity index 100% rename from dbt/tests/adapter/column_types/fixtures.py rename to dbt-tests-adapter/dbt/tests/adapter/column_types/fixtures.py diff --git a/dbt/tests/adapter/column_types/test_column_types.py b/dbt-tests-adapter/dbt/tests/adapter/column_types/test_column_types.py similarity index 100% rename from dbt/tests/adapter/column_types/test_column_types.py rename to dbt-tests-adapter/dbt/tests/adapter/column_types/test_column_types.py diff --git a/dbt/tests/adapter/concurrency/test_concurrency.py b/dbt-tests-adapter/dbt/tests/adapter/concurrency/test_concurrency.py similarity index 100% rename from dbt/tests/adapter/concurrency/test_concurrency.py rename to dbt-tests-adapter/dbt/tests/adapter/concurrency/test_concurrency.py diff --git a/dbt/tests/adapter/constraints/fixtures.py b/dbt-tests-adapter/dbt/tests/adapter/constraints/fixtures.py similarity index 100% rename from dbt/tests/adapter/constraints/fixtures.py rename to dbt-tests-adapter/dbt/tests/adapter/constraints/fixtures.py diff --git a/dbt/tests/adapter/constraints/test_constraints.py b/dbt-tests-adapter/dbt/tests/adapter/constraints/test_constraints.py similarity index 100% rename from dbt/tests/adapter/constraints/test_constraints.py rename to dbt-tests-adapter/dbt/tests/adapter/constraints/test_constraints.py diff --git a/dbt/tests/adapter/dbt_clone/fixtures.py b/dbt-tests-adapter/dbt/tests/adapter/dbt_clone/fixtures.py similarity index 100% rename from dbt/tests/adapter/dbt_clone/fixtures.py rename to dbt-tests-adapter/dbt/tests/adapter/dbt_clone/fixtures.py diff --git a/dbt/tests/adapter/dbt_clone/test_dbt_clone.py b/dbt-tests-adapter/dbt/tests/adapter/dbt_clone/test_dbt_clone.py similarity index 100% rename from dbt/tests/adapter/dbt_clone/test_dbt_clone.py rename to dbt-tests-adapter/dbt/tests/adapter/dbt_clone/test_dbt_clone.py diff --git a/dbt/tests/adapter/dbt_debug/test_dbt_debug.py b/dbt-tests-adapter/dbt/tests/adapter/dbt_debug/test_dbt_debug.py similarity index 100% rename from dbt/tests/adapter/dbt_debug/test_dbt_debug.py rename to dbt-tests-adapter/dbt/tests/adapter/dbt_debug/test_dbt_debug.py diff --git a/dbt/tests/adapter/dbt_show/fixtures.py b/dbt-tests-adapter/dbt/tests/adapter/dbt_show/fixtures.py similarity index 100% rename from dbt/tests/adapter/dbt_show/fixtures.py rename to dbt-tests-adapter/dbt/tests/adapter/dbt_show/fixtures.py diff --git a/dbt/tests/adapter/dbt_show/test_dbt_show.py b/dbt-tests-adapter/dbt/tests/adapter/dbt_show/test_dbt_show.py similarity index 100% rename from dbt/tests/adapter/dbt_show/test_dbt_show.py rename to dbt-tests-adapter/dbt/tests/adapter/dbt_show/test_dbt_show.py diff --git a/dbt/tests/adapter/empty/test_empty.py b/dbt-tests-adapter/dbt/tests/adapter/empty/test_empty.py similarity index 75% rename from dbt/tests/adapter/empty/test_empty.py rename to dbt-tests-adapter/dbt/tests/adapter/empty/test_empty.py index 373a13ee9..2249d98d5 100644 --- a/dbt/tests/adapter/empty/test_empty.py +++ b/dbt-tests-adapter/dbt/tests/adapter/empty/test_empty.py @@ -72,5 +72,24 @@ def test_run_with_empty(self, project): self.assert_row_count(project, "model", 0) +class BaseTestEmptyInlineSourceRef(BaseTestEmpty): + @pytest.fixture(scope="class") + def models(self): + model_sql = """ + select * from {{ source('seed_sources', 'raw_source') }} as raw_source + """ + + return { + "model.sql": model_sql, + "sources.yml": schema_sources_yml, + } + + def test_run_with_empty(self, project): + # create source from seed + run_dbt(["seed"]) + run_dbt(["run", "--empty", "--debug"]) + self.assert_row_count(project, "model", 0) + + class TestEmpty(BaseTestEmpty): pass diff --git a/dbt/tests/adapter/ephemeral/test_ephemeral.py b/dbt-tests-adapter/dbt/tests/adapter/ephemeral/test_ephemeral.py similarity index 100% rename from dbt/tests/adapter/ephemeral/test_ephemeral.py rename to dbt-tests-adapter/dbt/tests/adapter/ephemeral/test_ephemeral.py diff --git a/dbt/tests/adapter/grants/base_grants.py b/dbt-tests-adapter/dbt/tests/adapter/grants/base_grants.py similarity index 100% rename from dbt/tests/adapter/grants/base_grants.py rename to dbt-tests-adapter/dbt/tests/adapter/grants/base_grants.py diff --git a/dbt/tests/adapter/grants/test_incremental_grants.py b/dbt-tests-adapter/dbt/tests/adapter/grants/test_incremental_grants.py similarity index 100% rename from dbt/tests/adapter/grants/test_incremental_grants.py rename to dbt-tests-adapter/dbt/tests/adapter/grants/test_incremental_grants.py diff --git a/dbt/tests/adapter/grants/test_invalid_grants.py b/dbt-tests-adapter/dbt/tests/adapter/grants/test_invalid_grants.py similarity index 100% rename from dbt/tests/adapter/grants/test_invalid_grants.py rename to dbt-tests-adapter/dbt/tests/adapter/grants/test_invalid_grants.py diff --git a/dbt/tests/adapter/grants/test_model_grants.py b/dbt-tests-adapter/dbt/tests/adapter/grants/test_model_grants.py similarity index 100% rename from dbt/tests/adapter/grants/test_model_grants.py rename to dbt-tests-adapter/dbt/tests/adapter/grants/test_model_grants.py diff --git a/dbt/tests/adapter/grants/test_seed_grants.py b/dbt-tests-adapter/dbt/tests/adapter/grants/test_seed_grants.py similarity index 100% rename from dbt/tests/adapter/grants/test_seed_grants.py rename to dbt-tests-adapter/dbt/tests/adapter/grants/test_seed_grants.py diff --git a/dbt/tests/adapter/grants/test_snapshot_grants.py b/dbt-tests-adapter/dbt/tests/adapter/grants/test_snapshot_grants.py similarity index 100% rename from dbt/tests/adapter/grants/test_snapshot_grants.py rename to dbt-tests-adapter/dbt/tests/adapter/grants/test_snapshot_grants.py diff --git a/dbt/tests/adapter/hooks/data/seed_model.sql b/dbt-tests-adapter/dbt/tests/adapter/hooks/data/seed_model.sql similarity index 100% rename from dbt/tests/adapter/hooks/data/seed_model.sql rename to dbt-tests-adapter/dbt/tests/adapter/hooks/data/seed_model.sql diff --git a/dbt/tests/adapter/hooks/data/seed_run.sql b/dbt-tests-adapter/dbt/tests/adapter/hooks/data/seed_run.sql similarity index 100% rename from dbt/tests/adapter/hooks/data/seed_run.sql rename to dbt-tests-adapter/dbt/tests/adapter/hooks/data/seed_run.sql diff --git a/dbt/tests/adapter/hooks/fixtures.py b/dbt-tests-adapter/dbt/tests/adapter/hooks/fixtures.py similarity index 100% rename from dbt/tests/adapter/hooks/fixtures.py rename to dbt-tests-adapter/dbt/tests/adapter/hooks/fixtures.py diff --git a/dbt/tests/adapter/hooks/test_model_hooks.py b/dbt-tests-adapter/dbt/tests/adapter/hooks/test_model_hooks.py similarity index 99% rename from dbt/tests/adapter/hooks/test_model_hooks.py rename to dbt-tests-adapter/dbt/tests/adapter/hooks/test_model_hooks.py index 6a544af01..8423c9ca8 100644 --- a/dbt/tests/adapter/hooks/test_model_hooks.py +++ b/dbt-tests-adapter/dbt/tests/adapter/hooks/test_model_hooks.py @@ -1,6 +1,7 @@ from pathlib import Path from dbt_common.exceptions import CompilationError + # TODO: does this belong in dbt-tests-adapter? from dbt.exceptions import ParsingError import pytest diff --git a/dbt/tests/adapter/hooks/test_run_hooks.py b/dbt-tests-adapter/dbt/tests/adapter/hooks/test_run_hooks.py similarity index 100% rename from dbt/tests/adapter/hooks/test_run_hooks.py rename to dbt-tests-adapter/dbt/tests/adapter/hooks/test_run_hooks.py diff --git a/dbt/tests/adapter/incremental/fixtures.py b/dbt-tests-adapter/dbt/tests/adapter/incremental/fixtures.py similarity index 100% rename from dbt/tests/adapter/incremental/fixtures.py rename to dbt-tests-adapter/dbt/tests/adapter/incremental/fixtures.py diff --git a/dbt/tests/adapter/incremental/test_incremental_merge_exclude_columns.py b/dbt-tests-adapter/dbt/tests/adapter/incremental/test_incremental_merge_exclude_columns.py similarity index 100% rename from dbt/tests/adapter/incremental/test_incremental_merge_exclude_columns.py rename to dbt-tests-adapter/dbt/tests/adapter/incremental/test_incremental_merge_exclude_columns.py diff --git a/dbt/tests/adapter/incremental/test_incremental_on_schema_change.py b/dbt-tests-adapter/dbt/tests/adapter/incremental/test_incremental_on_schema_change.py similarity index 100% rename from dbt/tests/adapter/incremental/test_incremental_on_schema_change.py rename to dbt-tests-adapter/dbt/tests/adapter/incremental/test_incremental_on_schema_change.py diff --git a/dbt/tests/adapter/incremental/test_incremental_predicates.py b/dbt-tests-adapter/dbt/tests/adapter/incremental/test_incremental_predicates.py similarity index 100% rename from dbt/tests/adapter/incremental/test_incremental_predicates.py rename to dbt-tests-adapter/dbt/tests/adapter/incremental/test_incremental_predicates.py diff --git a/dbt/tests/adapter/incremental/test_incremental_unique_id.py b/dbt-tests-adapter/dbt/tests/adapter/incremental/test_incremental_unique_id.py similarity index 100% rename from dbt/tests/adapter/incremental/test_incremental_unique_id.py rename to dbt-tests-adapter/dbt/tests/adapter/incremental/test_incremental_unique_id.py diff --git a/dbt/tests/adapter/materialized_view/basic.py b/dbt-tests-adapter/dbt/tests/adapter/materialized_view/basic.py similarity index 100% rename from dbt/tests/adapter/materialized_view/basic.py rename to dbt-tests-adapter/dbt/tests/adapter/materialized_view/basic.py diff --git a/dbt/tests/adapter/materialized_view/changes.py b/dbt-tests-adapter/dbt/tests/adapter/materialized_view/changes.py similarity index 100% rename from dbt/tests/adapter/materialized_view/changes.py rename to dbt-tests-adapter/dbt/tests/adapter/materialized_view/changes.py diff --git a/dbt/tests/adapter/materialized_view/files.py b/dbt-tests-adapter/dbt/tests/adapter/materialized_view/files.py similarity index 100% rename from dbt/tests/adapter/materialized_view/files.py rename to dbt-tests-adapter/dbt/tests/adapter/materialized_view/files.py diff --git a/dbt/tests/adapter/persist_docs/fixtures.py b/dbt-tests-adapter/dbt/tests/adapter/persist_docs/fixtures.py similarity index 100% rename from dbt/tests/adapter/persist_docs/fixtures.py rename to dbt-tests-adapter/dbt/tests/adapter/persist_docs/fixtures.py diff --git a/dbt/tests/adapter/persist_docs/test_persist_docs.py b/dbt-tests-adapter/dbt/tests/adapter/persist_docs/test_persist_docs.py similarity index 100% rename from dbt/tests/adapter/persist_docs/test_persist_docs.py rename to dbt-tests-adapter/dbt/tests/adapter/persist_docs/test_persist_docs.py diff --git a/dbt/tests/adapter/python_model/test_python_model.py b/dbt-tests-adapter/dbt/tests/adapter/python_model/test_python_model.py similarity index 100% rename from dbt/tests/adapter/python_model/test_python_model.py rename to dbt-tests-adapter/dbt/tests/adapter/python_model/test_python_model.py diff --git a/dbt/tests/adapter/python_model/test_spark.py b/dbt-tests-adapter/dbt/tests/adapter/python_model/test_spark.py similarity index 100% rename from dbt/tests/adapter/python_model/test_spark.py rename to dbt-tests-adapter/dbt/tests/adapter/python_model/test_spark.py diff --git a/dbt/tests/adapter/query_comment/fixtures.py b/dbt-tests-adapter/dbt/tests/adapter/query_comment/fixtures.py similarity index 95% rename from dbt/tests/adapter/query_comment/fixtures.py rename to dbt-tests-adapter/dbt/tests/adapter/query_comment/fixtures.py index d8848dc08..ccaf32920 100644 --- a/dbt/tests/adapter/query_comment/fixtures.py +++ b/dbt-tests-adapter/dbt/tests/adapter/query_comment/fixtures.py @@ -10,7 +10,6 @@ {%- set comment_dict = dict( app='dbt++', macro_version='0.1.0', - dbt_version=dbt_version, message='blah: '~ message) -%} {{ return(comment_dict) }} {%- endmacro -%} diff --git a/dbt/tests/adapter/query_comment/test_query_comment.py b/dbt-tests-adapter/dbt/tests/adapter/query_comment/test_query_comment.py similarity index 91% rename from dbt/tests/adapter/query_comment/test_query_comment.py rename to dbt-tests-adapter/dbt/tests/adapter/query_comment/test_query_comment.py index 66251c1da..4453c2739 100644 --- a/dbt/tests/adapter/query_comment/test_query_comment.py +++ b/dbt-tests-adapter/dbt/tests/adapter/query_comment/test_query_comment.py @@ -1,5 +1,4 @@ import json -from importlib import import_module import pytest from dbt_common.exceptions import DbtRuntimeError @@ -53,19 +52,15 @@ def test_matches_comment(self, project): class BaseMacroArgsQueryComments(BaseDefaultQueryComments): - @pytest.fixture(scope="class") - def get_package_version(self, project): - return import_module("." + project.adapter_type, "dbt.adapters").__version__.version @pytest.fixture(scope="class") def project_config_update(self): return {"query-comment": "{{ return(ordered_to_json(query_header_args(target.name))) }}"} - def test_matches_comment(self, project, get_package_version): + def test_matches_comment(self, project): logs = self.run_get_json() expected_dct = { "app": "dbt++", - "dbt_version": get_package_version, "macro_version": "0.1.0", "message": f"blah: {project.adapter.config.target_name}", } diff --git a/dbt/tests/adapter/relations/test_changing_relation_type.py b/dbt-tests-adapter/dbt/tests/adapter/relations/test_changing_relation_type.py similarity index 100% rename from dbt/tests/adapter/relations/test_changing_relation_type.py rename to dbt-tests-adapter/dbt/tests/adapter/relations/test_changing_relation_type.py diff --git a/dbt/tests/adapter/relations/test_dropping_schema_named.py b/dbt-tests-adapter/dbt/tests/adapter/relations/test_dropping_schema_named.py similarity index 100% rename from dbt/tests/adapter/relations/test_dropping_schema_named.py rename to dbt-tests-adapter/dbt/tests/adapter/relations/test_dropping_schema_named.py diff --git a/dbt/tests/adapter/simple_copy/fixtures.py b/dbt-tests-adapter/dbt/tests/adapter/simple_copy/fixtures.py similarity index 100% rename from dbt/tests/adapter/simple_copy/fixtures.py rename to dbt-tests-adapter/dbt/tests/adapter/simple_copy/fixtures.py diff --git a/dbt/tests/adapter/simple_copy/test_copy_uppercase.py b/dbt-tests-adapter/dbt/tests/adapter/simple_copy/test_copy_uppercase.py similarity index 100% rename from dbt/tests/adapter/simple_copy/test_copy_uppercase.py rename to dbt-tests-adapter/dbt/tests/adapter/simple_copy/test_copy_uppercase.py diff --git a/dbt/tests/adapter/simple_copy/test_simple_copy.py b/dbt-tests-adapter/dbt/tests/adapter/simple_copy/test_simple_copy.py similarity index 100% rename from dbt/tests/adapter/simple_copy/test_simple_copy.py rename to dbt-tests-adapter/dbt/tests/adapter/simple_copy/test_simple_copy.py diff --git a/dbt/tests/adapter/simple_seed/fixtures.py b/dbt-tests-adapter/dbt/tests/adapter/simple_seed/fixtures.py similarity index 100% rename from dbt/tests/adapter/simple_seed/fixtures.py rename to dbt-tests-adapter/dbt/tests/adapter/simple_seed/fixtures.py diff --git a/dbt/tests/adapter/simple_seed/seed_bom.csv b/dbt-tests-adapter/dbt/tests/adapter/simple_seed/seed_bom.csv similarity index 100% rename from dbt/tests/adapter/simple_seed/seed_bom.csv rename to dbt-tests-adapter/dbt/tests/adapter/simple_seed/seed_bom.csv diff --git a/dbt/tests/adapter/simple_seed/seeds.py b/dbt-tests-adapter/dbt/tests/adapter/simple_seed/seeds.py similarity index 100% rename from dbt/tests/adapter/simple_seed/seeds.py rename to dbt-tests-adapter/dbt/tests/adapter/simple_seed/seeds.py diff --git a/dbt/tests/adapter/simple_seed/test_seed.py b/dbt-tests-adapter/dbt/tests/adapter/simple_seed/test_seed.py similarity index 100% rename from dbt/tests/adapter/simple_seed/test_seed.py rename to dbt-tests-adapter/dbt/tests/adapter/simple_seed/test_seed.py diff --git a/dbt/tests/adapter/simple_seed/test_seed_type_override.py b/dbt-tests-adapter/dbt/tests/adapter/simple_seed/test_seed_type_override.py similarity index 100% rename from dbt/tests/adapter/simple_seed/test_seed_type_override.py rename to dbt-tests-adapter/dbt/tests/adapter/simple_seed/test_seed_type_override.py diff --git a/dbt/tests/adapter/simple_snapshot/common.py b/dbt-tests-adapter/dbt/tests/adapter/simple_snapshot/common.py similarity index 100% rename from dbt/tests/adapter/simple_snapshot/common.py rename to dbt-tests-adapter/dbt/tests/adapter/simple_snapshot/common.py diff --git a/dbt/tests/adapter/simple_snapshot/seeds.py b/dbt-tests-adapter/dbt/tests/adapter/simple_snapshot/seeds.py similarity index 100% rename from dbt/tests/adapter/simple_snapshot/seeds.py rename to dbt-tests-adapter/dbt/tests/adapter/simple_snapshot/seeds.py diff --git a/dbt/tests/adapter/simple_snapshot/snapshots.py b/dbt-tests-adapter/dbt/tests/adapter/simple_snapshot/snapshots.py similarity index 100% rename from dbt/tests/adapter/simple_snapshot/snapshots.py rename to dbt-tests-adapter/dbt/tests/adapter/simple_snapshot/snapshots.py diff --git a/dbt/tests/adapter/simple_snapshot/test_snapshot.py b/dbt-tests-adapter/dbt/tests/adapter/simple_snapshot/test_snapshot.py similarity index 100% rename from dbt/tests/adapter/simple_snapshot/test_snapshot.py rename to dbt-tests-adapter/dbt/tests/adapter/simple_snapshot/test_snapshot.py diff --git a/dbt/tests/adapter/store_test_failures_tests/_files.py b/dbt-tests-adapter/dbt/tests/adapter/store_test_failures_tests/_files.py similarity index 100% rename from dbt/tests/adapter/store_test_failures_tests/_files.py rename to dbt-tests-adapter/dbt/tests/adapter/store_test_failures_tests/_files.py diff --git a/dbt/tests/adapter/store_test_failures_tests/basic.py b/dbt-tests-adapter/dbt/tests/adapter/store_test_failures_tests/basic.py similarity index 100% rename from dbt/tests/adapter/store_test_failures_tests/basic.py rename to dbt-tests-adapter/dbt/tests/adapter/store_test_failures_tests/basic.py diff --git a/dbt/tests/adapter/store_test_failures_tests/fixtures.py b/dbt-tests-adapter/dbt/tests/adapter/store_test_failures_tests/fixtures.py similarity index 100% rename from dbt/tests/adapter/store_test_failures_tests/fixtures.py rename to dbt-tests-adapter/dbt/tests/adapter/store_test_failures_tests/fixtures.py diff --git a/dbt/tests/adapter/store_test_failures_tests/test_store_test_failures.py b/dbt-tests-adapter/dbt/tests/adapter/store_test_failures_tests/test_store_test_failures.py similarity index 100% rename from dbt/tests/adapter/store_test_failures_tests/test_store_test_failures.py rename to dbt-tests-adapter/dbt/tests/adapter/store_test_failures_tests/test_store_test_failures.py diff --git a/dbt/tests/adapter/unit_testing/test_case_insensitivity.py b/dbt-tests-adapter/dbt/tests/adapter/unit_testing/test_case_insensitivity.py similarity index 100% rename from dbt/tests/adapter/unit_testing/test_case_insensitivity.py rename to dbt-tests-adapter/dbt/tests/adapter/unit_testing/test_case_insensitivity.py diff --git a/dbt/tests/adapter/unit_testing/test_invalid_input.py b/dbt-tests-adapter/dbt/tests/adapter/unit_testing/test_invalid_input.py similarity index 82% rename from dbt/tests/adapter/unit_testing/test_invalid_input.py rename to dbt-tests-adapter/dbt/tests/adapter/unit_testing/test_invalid_input.py index 6c41ceb93..c5bf2a09b 100644 --- a/dbt/tests/adapter/unit_testing/test_invalid_input.py +++ b/dbt-tests-adapter/dbt/tests/adapter/unit_testing/test_invalid_input.py @@ -46,16 +46,22 @@ def models(self): def test_invalid_input(self, project): results = run_dbt(["run"]) assert len(results) == 2 - + _, out = run_dbt_and_capture( ["test", "--select", "test_name:test_invalid_input_column_name"], expect_pass=False ) - assert "Invalid column name: 'invalid_column_name' in unit test fixture for 'my_upstream_model'." in out - + assert ( + "Invalid column name: 'invalid_column_name' in unit test fixture for 'my_upstream_model'." + in out + ) + _, out = run_dbt_and_capture( ["test", "--select", "test_name:test_invalid_expect_column_name"], expect_pass=False ) - assert "Invalid column name: 'invalid_column_name' in unit test fixture for expected output." in out + assert ( + "Invalid column name: 'invalid_column_name' in unit test fixture for expected output." + in out + ) class TestPostgresUnitTestInvalidInput(BaseUnitTestInvalidInput): diff --git a/dbt/tests/adapter/unit_testing/test_types.py b/dbt-tests-adapter/dbt/tests/adapter/unit_testing/test_types.py similarity index 100% rename from dbt/tests/adapter/unit_testing/test_types.py rename to dbt-tests-adapter/dbt/tests/adapter/unit_testing/test_types.py diff --git a/dbt/tests/adapter/utils/base_array_utils.py b/dbt-tests-adapter/dbt/tests/adapter/utils/base_array_utils.py similarity index 100% rename from dbt/tests/adapter/utils/base_array_utils.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/base_array_utils.py diff --git a/dbt/tests/adapter/utils/base_utils.py b/dbt-tests-adapter/dbt/tests/adapter/utils/base_utils.py similarity index 100% rename from dbt/tests/adapter/utils/base_utils.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/base_utils.py diff --git a/dbt/tests/adapter/utils/data_types/base_data_type_macro.py b/dbt-tests-adapter/dbt/tests/adapter/utils/data_types/base_data_type_macro.py similarity index 100% rename from dbt/tests/adapter/utils/data_types/base_data_type_macro.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/data_types/base_data_type_macro.py diff --git a/dbt/tests/adapter/utils/data_types/test_type_bigint.py b/dbt-tests-adapter/dbt/tests/adapter/utils/data_types/test_type_bigint.py similarity index 100% rename from dbt/tests/adapter/utils/data_types/test_type_bigint.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/data_types/test_type_bigint.py diff --git a/dbt/tests/adapter/utils/data_types/test_type_boolean.py b/dbt-tests-adapter/dbt/tests/adapter/utils/data_types/test_type_boolean.py similarity index 100% rename from dbt/tests/adapter/utils/data_types/test_type_boolean.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/data_types/test_type_boolean.py diff --git a/dbt/tests/adapter/utils/data_types/test_type_float.py b/dbt-tests-adapter/dbt/tests/adapter/utils/data_types/test_type_float.py similarity index 100% rename from dbt/tests/adapter/utils/data_types/test_type_float.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/data_types/test_type_float.py diff --git a/dbt/tests/adapter/utils/data_types/test_type_int.py b/dbt-tests-adapter/dbt/tests/adapter/utils/data_types/test_type_int.py similarity index 100% rename from dbt/tests/adapter/utils/data_types/test_type_int.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/data_types/test_type_int.py diff --git a/dbt/tests/adapter/utils/data_types/test_type_numeric.py b/dbt-tests-adapter/dbt/tests/adapter/utils/data_types/test_type_numeric.py similarity index 100% rename from dbt/tests/adapter/utils/data_types/test_type_numeric.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/data_types/test_type_numeric.py diff --git a/dbt/tests/adapter/utils/data_types/test_type_string.py b/dbt-tests-adapter/dbt/tests/adapter/utils/data_types/test_type_string.py similarity index 100% rename from dbt/tests/adapter/utils/data_types/test_type_string.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/data_types/test_type_string.py diff --git a/dbt/tests/adapter/utils/data_types/test_type_timestamp.py b/dbt-tests-adapter/dbt/tests/adapter/utils/data_types/test_type_timestamp.py similarity index 100% rename from dbt/tests/adapter/utils/data_types/test_type_timestamp.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/data_types/test_type_timestamp.py diff --git a/dbt/tests/adapter/utils/fixture_any_value.py b/dbt-tests-adapter/dbt/tests/adapter/utils/fixture_any_value.py similarity index 100% rename from dbt/tests/adapter/utils/fixture_any_value.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/fixture_any_value.py diff --git a/dbt/tests/adapter/utils/fixture_array_append.py b/dbt-tests-adapter/dbt/tests/adapter/utils/fixture_array_append.py similarity index 100% rename from dbt/tests/adapter/utils/fixture_array_append.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/fixture_array_append.py diff --git a/dbt/tests/adapter/utils/fixture_array_concat.py b/dbt-tests-adapter/dbt/tests/adapter/utils/fixture_array_concat.py similarity index 100% rename from dbt/tests/adapter/utils/fixture_array_concat.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/fixture_array_concat.py diff --git a/dbt/tests/adapter/utils/fixture_array_construct.py b/dbt-tests-adapter/dbt/tests/adapter/utils/fixture_array_construct.py similarity index 100% rename from dbt/tests/adapter/utils/fixture_array_construct.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/fixture_array_construct.py diff --git a/dbt/tests/adapter/utils/fixture_bool_or.py b/dbt-tests-adapter/dbt/tests/adapter/utils/fixture_bool_or.py similarity index 100% rename from dbt/tests/adapter/utils/fixture_bool_or.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/fixture_bool_or.py diff --git a/dbt-tests-adapter/dbt/tests/adapter/utils/fixture_cast.py b/dbt-tests-adapter/dbt/tests/adapter/utils/fixture_cast.py new file mode 100644 index 000000000..fe29c7068 --- /dev/null +++ b/dbt-tests-adapter/dbt/tests/adapter/utils/fixture_cast.py @@ -0,0 +1,33 @@ +# cast + +seeds__data_cast_csv = """field,output +abc,abc +123,123 +, +""" + + +models__test_cast_sql = """ +with data as ( + + select * from {{ ref('data_cast') }} + +) + +select + {{ cast('field', api.Column.translate_type('string')) }} as actual, + output as expected + +from data +""" + + +models__test_cast_yml = """ +version: 2 +models: + - name: test_cast + data_tests: + - assert_equal: + actual: actual + expected: expected +""" diff --git a/dbt/tests/adapter/utils/fixture_cast_bool_to_text.py b/dbt-tests-adapter/dbt/tests/adapter/utils/fixture_cast_bool_to_text.py similarity index 100% rename from dbt/tests/adapter/utils/fixture_cast_bool_to_text.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/fixture_cast_bool_to_text.py diff --git a/dbt/tests/adapter/utils/fixture_concat.py b/dbt-tests-adapter/dbt/tests/adapter/utils/fixture_concat.py similarity index 100% rename from dbt/tests/adapter/utils/fixture_concat.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/fixture_concat.py diff --git a/dbt/tests/adapter/utils/fixture_date_spine.py b/dbt-tests-adapter/dbt/tests/adapter/utils/fixture_date_spine.py similarity index 100% rename from dbt/tests/adapter/utils/fixture_date_spine.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/fixture_date_spine.py diff --git a/dbt/tests/adapter/utils/fixture_date_trunc.py b/dbt-tests-adapter/dbt/tests/adapter/utils/fixture_date_trunc.py similarity index 100% rename from dbt/tests/adapter/utils/fixture_date_trunc.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/fixture_date_trunc.py diff --git a/dbt/tests/adapter/utils/fixture_dateadd.py b/dbt-tests-adapter/dbt/tests/adapter/utils/fixture_dateadd.py similarity index 100% rename from dbt/tests/adapter/utils/fixture_dateadd.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/fixture_dateadd.py diff --git a/dbt/tests/adapter/utils/fixture_datediff.py b/dbt-tests-adapter/dbt/tests/adapter/utils/fixture_datediff.py similarity index 100% rename from dbt/tests/adapter/utils/fixture_datediff.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/fixture_datediff.py diff --git a/dbt/tests/adapter/utils/fixture_equals.py b/dbt-tests-adapter/dbt/tests/adapter/utils/fixture_equals.py similarity index 100% rename from dbt/tests/adapter/utils/fixture_equals.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/fixture_equals.py diff --git a/dbt/tests/adapter/utils/fixture_escape_single_quotes.py b/dbt-tests-adapter/dbt/tests/adapter/utils/fixture_escape_single_quotes.py similarity index 100% rename from dbt/tests/adapter/utils/fixture_escape_single_quotes.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/fixture_escape_single_quotes.py diff --git a/dbt/tests/adapter/utils/fixture_except.py b/dbt-tests-adapter/dbt/tests/adapter/utils/fixture_except.py similarity index 100% rename from dbt/tests/adapter/utils/fixture_except.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/fixture_except.py diff --git a/dbt/tests/adapter/utils/fixture_generate_series.py b/dbt-tests-adapter/dbt/tests/adapter/utils/fixture_generate_series.py similarity index 100% rename from dbt/tests/adapter/utils/fixture_generate_series.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/fixture_generate_series.py diff --git a/dbt/tests/adapter/utils/fixture_get_intervals_between.py b/dbt-tests-adapter/dbt/tests/adapter/utils/fixture_get_intervals_between.py similarity index 100% rename from dbt/tests/adapter/utils/fixture_get_intervals_between.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/fixture_get_intervals_between.py diff --git a/dbt/tests/adapter/utils/fixture_get_powers_of_two.py b/dbt-tests-adapter/dbt/tests/adapter/utils/fixture_get_powers_of_two.py similarity index 100% rename from dbt/tests/adapter/utils/fixture_get_powers_of_two.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/fixture_get_powers_of_two.py diff --git a/dbt/tests/adapter/utils/fixture_hash.py b/dbt-tests-adapter/dbt/tests/adapter/utils/fixture_hash.py similarity index 100% rename from dbt/tests/adapter/utils/fixture_hash.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/fixture_hash.py diff --git a/dbt/tests/adapter/utils/fixture_intersect.py b/dbt-tests-adapter/dbt/tests/adapter/utils/fixture_intersect.py similarity index 100% rename from dbt/tests/adapter/utils/fixture_intersect.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/fixture_intersect.py diff --git a/dbt/tests/adapter/utils/fixture_last_day.py b/dbt-tests-adapter/dbt/tests/adapter/utils/fixture_last_day.py similarity index 100% rename from dbt/tests/adapter/utils/fixture_last_day.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/fixture_last_day.py diff --git a/dbt/tests/adapter/utils/fixture_length.py b/dbt-tests-adapter/dbt/tests/adapter/utils/fixture_length.py similarity index 100% rename from dbt/tests/adapter/utils/fixture_length.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/fixture_length.py diff --git a/dbt/tests/adapter/utils/fixture_listagg.py b/dbt-tests-adapter/dbt/tests/adapter/utils/fixture_listagg.py similarity index 100% rename from dbt/tests/adapter/utils/fixture_listagg.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/fixture_listagg.py diff --git a/dbt/tests/adapter/utils/fixture_null_compare.py b/dbt-tests-adapter/dbt/tests/adapter/utils/fixture_null_compare.py similarity index 100% rename from dbt/tests/adapter/utils/fixture_null_compare.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/fixture_null_compare.py diff --git a/dbt/tests/adapter/utils/fixture_position.py b/dbt-tests-adapter/dbt/tests/adapter/utils/fixture_position.py similarity index 100% rename from dbt/tests/adapter/utils/fixture_position.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/fixture_position.py diff --git a/dbt/tests/adapter/utils/fixture_replace.py b/dbt-tests-adapter/dbt/tests/adapter/utils/fixture_replace.py similarity index 100% rename from dbt/tests/adapter/utils/fixture_replace.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/fixture_replace.py diff --git a/dbt/tests/adapter/utils/fixture_right.py b/dbt-tests-adapter/dbt/tests/adapter/utils/fixture_right.py similarity index 100% rename from dbt/tests/adapter/utils/fixture_right.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/fixture_right.py diff --git a/dbt/tests/adapter/utils/fixture_safe_cast.py b/dbt-tests-adapter/dbt/tests/adapter/utils/fixture_safe_cast.py similarity index 100% rename from dbt/tests/adapter/utils/fixture_safe_cast.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/fixture_safe_cast.py diff --git a/dbt/tests/adapter/utils/fixture_split_part.py b/dbt-tests-adapter/dbt/tests/adapter/utils/fixture_split_part.py similarity index 100% rename from dbt/tests/adapter/utils/fixture_split_part.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/fixture_split_part.py diff --git a/dbt/tests/adapter/utils/fixture_string_literal.py b/dbt-tests-adapter/dbt/tests/adapter/utils/fixture_string_literal.py similarity index 100% rename from dbt/tests/adapter/utils/fixture_string_literal.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/fixture_string_literal.py diff --git a/dbt/tests/adapter/utils/test_any_value.py b/dbt-tests-adapter/dbt/tests/adapter/utils/test_any_value.py similarity index 100% rename from dbt/tests/adapter/utils/test_any_value.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/test_any_value.py diff --git a/dbt/tests/adapter/utils/test_array_append.py b/dbt-tests-adapter/dbt/tests/adapter/utils/test_array_append.py similarity index 100% rename from dbt/tests/adapter/utils/test_array_append.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/test_array_append.py diff --git a/dbt/tests/adapter/utils/test_array_concat.py b/dbt-tests-adapter/dbt/tests/adapter/utils/test_array_concat.py similarity index 100% rename from dbt/tests/adapter/utils/test_array_concat.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/test_array_concat.py diff --git a/dbt/tests/adapter/utils/test_array_construct.py b/dbt-tests-adapter/dbt/tests/adapter/utils/test_array_construct.py similarity index 100% rename from dbt/tests/adapter/utils/test_array_construct.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/test_array_construct.py diff --git a/dbt/tests/adapter/utils/test_bool_or.py b/dbt-tests-adapter/dbt/tests/adapter/utils/test_bool_or.py similarity index 100% rename from dbt/tests/adapter/utils/test_bool_or.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/test_bool_or.py diff --git a/dbt-tests-adapter/dbt/tests/adapter/utils/test_cast.py b/dbt-tests-adapter/dbt/tests/adapter/utils/test_cast.py new file mode 100644 index 000000000..686b76974 --- /dev/null +++ b/dbt-tests-adapter/dbt/tests/adapter/utils/test_cast.py @@ -0,0 +1,23 @@ +import pytest + +from dbt.tests.adapter.utils import base_utils, fixture_cast + + +class BaseCast(base_utils.BaseUtils): + @pytest.fixture(scope="class") + def seeds(self): + return {"data_cast.csv": fixture_cast.seeds__data_cast_csv} + + @pytest.fixture(scope="class") + def models(self): + return { + "test_cast.yml": fixture_cast.models__test_cast_yml, + "test_cast.sql": self.interpolate_macro_namespace( + self.interpolate_macro_namespace(fixture_cast.models__test_cast_sql, "cast"), + "type_string", + ), + } + + +class TestCast(BaseCast): + pass diff --git a/dbt/tests/adapter/utils/test_cast_bool_to_text.py b/dbt-tests-adapter/dbt/tests/adapter/utils/test_cast_bool_to_text.py similarity index 100% rename from dbt/tests/adapter/utils/test_cast_bool_to_text.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/test_cast_bool_to_text.py diff --git a/dbt/tests/adapter/utils/test_concat.py b/dbt-tests-adapter/dbt/tests/adapter/utils/test_concat.py similarity index 100% rename from dbt/tests/adapter/utils/test_concat.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/test_concat.py diff --git a/dbt/tests/adapter/utils/test_current_timestamp.py b/dbt-tests-adapter/dbt/tests/adapter/utils/test_current_timestamp.py similarity index 100% rename from dbt/tests/adapter/utils/test_current_timestamp.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/test_current_timestamp.py diff --git a/dbt/tests/adapter/utils/test_date_spine.py b/dbt-tests-adapter/dbt/tests/adapter/utils/test_date_spine.py similarity index 100% rename from dbt/tests/adapter/utils/test_date_spine.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/test_date_spine.py diff --git a/dbt/tests/adapter/utils/test_date_trunc.py b/dbt-tests-adapter/dbt/tests/adapter/utils/test_date_trunc.py similarity index 100% rename from dbt/tests/adapter/utils/test_date_trunc.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/test_date_trunc.py diff --git a/dbt/tests/adapter/utils/test_dateadd.py b/dbt-tests-adapter/dbt/tests/adapter/utils/test_dateadd.py similarity index 100% rename from dbt/tests/adapter/utils/test_dateadd.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/test_dateadd.py diff --git a/dbt/tests/adapter/utils/test_datediff.py b/dbt-tests-adapter/dbt/tests/adapter/utils/test_datediff.py similarity index 100% rename from dbt/tests/adapter/utils/test_datediff.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/test_datediff.py diff --git a/dbt/tests/adapter/utils/test_equals.py b/dbt-tests-adapter/dbt/tests/adapter/utils/test_equals.py similarity index 100% rename from dbt/tests/adapter/utils/test_equals.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/test_equals.py diff --git a/dbt/tests/adapter/utils/test_escape_single_quotes.py b/dbt-tests-adapter/dbt/tests/adapter/utils/test_escape_single_quotes.py similarity index 100% rename from dbt/tests/adapter/utils/test_escape_single_quotes.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/test_escape_single_quotes.py diff --git a/dbt/tests/adapter/utils/test_except.py b/dbt-tests-adapter/dbt/tests/adapter/utils/test_except.py similarity index 100% rename from dbt/tests/adapter/utils/test_except.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/test_except.py diff --git a/dbt/tests/adapter/utils/test_generate_series.py b/dbt-tests-adapter/dbt/tests/adapter/utils/test_generate_series.py similarity index 100% rename from dbt/tests/adapter/utils/test_generate_series.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/test_generate_series.py diff --git a/dbt/tests/adapter/utils/test_get_intervals_between.py b/dbt-tests-adapter/dbt/tests/adapter/utils/test_get_intervals_between.py similarity index 100% rename from dbt/tests/adapter/utils/test_get_intervals_between.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/test_get_intervals_between.py diff --git a/dbt/tests/adapter/utils/test_get_powers_of_two.py b/dbt-tests-adapter/dbt/tests/adapter/utils/test_get_powers_of_two.py similarity index 100% rename from dbt/tests/adapter/utils/test_get_powers_of_two.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/test_get_powers_of_two.py diff --git a/dbt/tests/adapter/utils/test_hash.py b/dbt-tests-adapter/dbt/tests/adapter/utils/test_hash.py similarity index 100% rename from dbt/tests/adapter/utils/test_hash.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/test_hash.py diff --git a/dbt/tests/adapter/utils/test_intersect.py b/dbt-tests-adapter/dbt/tests/adapter/utils/test_intersect.py similarity index 100% rename from dbt/tests/adapter/utils/test_intersect.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/test_intersect.py diff --git a/dbt/tests/adapter/utils/test_last_day.py b/dbt-tests-adapter/dbt/tests/adapter/utils/test_last_day.py similarity index 100% rename from dbt/tests/adapter/utils/test_last_day.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/test_last_day.py diff --git a/dbt/tests/adapter/utils/test_length.py b/dbt-tests-adapter/dbt/tests/adapter/utils/test_length.py similarity index 100% rename from dbt/tests/adapter/utils/test_length.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/test_length.py diff --git a/dbt/tests/adapter/utils/test_listagg.py b/dbt-tests-adapter/dbt/tests/adapter/utils/test_listagg.py similarity index 100% rename from dbt/tests/adapter/utils/test_listagg.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/test_listagg.py diff --git a/dbt/tests/adapter/utils/test_null_compare.py b/dbt-tests-adapter/dbt/tests/adapter/utils/test_null_compare.py similarity index 100% rename from dbt/tests/adapter/utils/test_null_compare.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/test_null_compare.py diff --git a/dbt/tests/adapter/utils/test_position.py b/dbt-tests-adapter/dbt/tests/adapter/utils/test_position.py similarity index 100% rename from dbt/tests/adapter/utils/test_position.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/test_position.py diff --git a/dbt/tests/adapter/utils/test_replace.py b/dbt-tests-adapter/dbt/tests/adapter/utils/test_replace.py similarity index 100% rename from dbt/tests/adapter/utils/test_replace.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/test_replace.py diff --git a/dbt/tests/adapter/utils/test_right.py b/dbt-tests-adapter/dbt/tests/adapter/utils/test_right.py similarity index 100% rename from dbt/tests/adapter/utils/test_right.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/test_right.py diff --git a/dbt/tests/adapter/utils/test_safe_cast.py b/dbt-tests-adapter/dbt/tests/adapter/utils/test_safe_cast.py similarity index 100% rename from dbt/tests/adapter/utils/test_safe_cast.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/test_safe_cast.py diff --git a/dbt/tests/adapter/utils/test_split_part.py b/dbt-tests-adapter/dbt/tests/adapter/utils/test_split_part.py similarity index 100% rename from dbt/tests/adapter/utils/test_split_part.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/test_split_part.py diff --git a/dbt/tests/adapter/utils/test_string_literal.py b/dbt-tests-adapter/dbt/tests/adapter/utils/test_string_literal.py similarity index 100% rename from dbt/tests/adapter/utils/test_string_literal.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/test_string_literal.py diff --git a/dbt/tests/adapter/utils/test_timestamps.py b/dbt-tests-adapter/dbt/tests/adapter/utils/test_timestamps.py similarity index 100% rename from dbt/tests/adapter/utils/test_timestamps.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/test_timestamps.py diff --git a/dbt/tests/adapter/utils/test_validate_sql.py b/dbt-tests-adapter/dbt/tests/adapter/utils/test_validate_sql.py similarity index 100% rename from dbt/tests/adapter/utils/test_validate_sql.py rename to dbt-tests-adapter/dbt/tests/adapter/utils/test_validate_sql.py diff --git a/dbt-tests-adapter/pyproject.toml b/dbt-tests-adapter/pyproject.toml index 990f2d1a7..35722b7b9 100644 --- a/dbt-tests-adapter/pyproject.toml +++ b/dbt-tests-adapter/pyproject.toml @@ -21,6 +21,7 @@ classifiers = [ "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", ] dependencies = [ # TODO: remove `dbt-core` dependency @@ -50,16 +51,14 @@ Changelog = "https://github.com/dbt-labs/dbt-adapters/blob/main/CHANGELOG.md" requires = ["hatchling"] build-backend = "hatchling.build" -[tool.hatch.build.targets.sdist.force-include] -"../dbt/tests" = "dbt/tests" -"../dbt/__init__.py" = "dbt/__init__.py" +[tool.hatch.version] +path = "dbt/tests/__about__.py" -[tool.hatch.build.targets.wheel.force-include] -"../dbt/tests" = "dbt/tests" -"../dbt/__init__.py" = "dbt/__init__.py" +[tool.hatch.build.targets.sdist] +include = ["dbt/tests", "dbt/__init__.py"] -[tool.hatch.version] -path = "../dbt/tests/__about__.py" +[tool.hatch.build.targets.wheel] +include = ["dbt/tests", "dbt/__init__.py"] [tool.hatch.envs.build] detached = true diff --git a/dbt/adapters/__about__.py b/dbt/adapters/__about__.py index d94aee529..d37cdcc76 100644 --- a/dbt/adapters/__about__.py +++ b/dbt/adapters/__about__.py @@ -1 +1 @@ -version = "0.1.0a8" +version = "1.1.0rc1" diff --git a/dbt/adapters/base/connections.py b/dbt/adapters/base/connections.py index 25db22cd6..6e038297d 100644 --- a/dbt/adapters/base/connections.py +++ b/dbt/adapters/base/connections.py @@ -18,9 +18,9 @@ Tuple, Type, Union, + TYPE_CHECKING, ) -import agate from dbt_common.events.contextvars import get_node_info from dbt_common.events.functions import fire_event from dbt_common.exceptions import DbtInternalError, NotImplementedError @@ -48,6 +48,10 @@ ) from dbt.adapters.exceptions import FailedToConnectError, InvalidConnectionError +if TYPE_CHECKING: + import agate + + SleepTime = Union[int, float] # As taken by time.sleep. AdapterHandle = Any # Adapter connection handle objects can be any class. @@ -395,7 +399,7 @@ def execute( auto_begin: bool = False, fetch: bool = False, limit: Optional[int] = None, - ) -> Tuple[AdapterResponse, agate.Table]: + ) -> Tuple[AdapterResponse, "agate.Table"]: """Execute the given SQL. :param str sql: The sql to execute. diff --git a/dbt/adapters/base/impl.py b/dbt/adapters/base/impl.py index 6b0ccc28f..f58f8aba0 100644 --- a/dbt/adapters/base/impl.py +++ b/dbt/adapters/base/impl.py @@ -20,16 +20,9 @@ Type, TypedDict, Union, + TYPE_CHECKING, ) -import agate -from dbt_common.clients.agate_helper import ( - Integer, - empty_table, - get_column_value_uncased, - merge_tables, - table_from_rows, -) from dbt_common.clients.jinja import CallableMacroGenerator from dbt_common.contracts.constraints import ( ColumnLevelConstraint, @@ -94,6 +87,10 @@ ) from dbt.adapters.protocol import AdapterConfig, MacroContextGeneratorCallable +if TYPE_CHECKING: + import agate + + GET_CATALOG_MACRO_NAME = "get_catalog" GET_CATALOG_RELATIONS_MACRO_NAME = "get_catalog_relations" FRESHNESS_MACRO_NAME = "collect_freshness" @@ -106,7 +103,14 @@ class ConstraintSupport(str, Enum): NOT_SUPPORTED = "not_supported" -def _expect_row_value(key: str, row: agate.Row): +def _parse_callback_empty_table(*args, **kwargs) -> Tuple[str, "agate.Table"]: + # Lazy load agate_helper to avoid importing agate when it is not necessary. + from dbt_common.clients.agate_helper import empty_table + + return "", empty_table() + + +def _expect_row_value(key: str, row: "agate.Row"): if key not in row.keys(): raise DbtInternalError( 'Got a row without "{}" column, columns: {}'.format(key, row.keys()) @@ -116,13 +120,13 @@ def _expect_row_value(key: str, row: agate.Row): def _catalog_filter_schemas( used_schemas: FrozenSet[Tuple[str, str]] -) -> Callable[[agate.Row], bool]: +) -> Callable[["agate.Row"], bool]: """Return a function that takes a row and decides if the row should be included in the catalog output. """ schemas = frozenset((d.lower(), s.lower()) for d, s in used_schemas) - def test(row: agate.Row) -> bool: + def test(row: "agate.Row") -> bool: table_database = _expect_row_value("table_database", row) table_schema = _expect_row_value("table_schema", row) # the schema may be present but None, which is not an error and should @@ -253,6 +257,8 @@ class BaseAdapter(metaclass=AdapterMeta): ConstraintType.foreign_key: ConstraintSupport.ENFORCED, } + MAX_SCHEMA_METADATA_RELATIONS = 100 + # This static member variable can be overriden in concrete adapter # implementations to indicate adapter support for optional capabilities. _capabilities = CapabilityDict({}) @@ -322,14 +328,14 @@ def connection_named(self, name: str, query_header_context: Any = None) -> Itera if self.connections.query_header is not None: self.connections.query_header.reset() - @available.parse(lambda *a, **k: ("", empty_table())) + @available.parse(_parse_callback_empty_table) def execute( self, sql: str, auto_begin: bool = False, fetch: bool = False, limit: Optional[int] = None, - ) -> Tuple[AdapterResponse, agate.Table]: + ) -> Tuple[AdapterResponse, "agate.Table"]: """Execute the given SQL. This is a thin wrapper around ConnectionManager.execute. @@ -339,7 +345,7 @@ def execute( :param bool fetch: If set, fetch results. :param Optional[int] limit: If set, only fetch n number of rows :return: A tuple of the query status and results (empty if fetch=False). - :rtype: Tuple[AdapterResponse, agate.Table] + :rtype: Tuple[AdapterResponse, "agate.Table"] """ return self.connections.execute(sql=sql, auto_begin=auto_begin, fetch=fetch, limit=limit) @@ -367,8 +373,8 @@ def get_column_schema_from_query(self, sql: str) -> List[BaseColumn]: ] return columns - @available.parse(lambda *a, **k: ("", empty_table())) - def get_partitions_metadata(self, table: str) -> Tuple[agate.Table]: + @available.parse(_parse_callback_empty_table) + def get_partitions_metadata(self, table: str) -> Tuple["agate.Table"]: """ TODO: Can we move this to dbt-bigquery? Obtain partitions metadata for a BigQuery partitioned table. @@ -376,7 +382,7 @@ def get_partitions_metadata(self, table: str) -> Tuple[agate.Table]: :param str table: a partitioned table id, in standard SQL format. :return: a partition metadata tuple, as described in https://cloud.google.com/bigquery/docs/creating-partitioned-tables#getting_partition_metadata_using_meta_tables. - :rtype: agate.Table + :rtype: "agate.Table" """ if hasattr(self.connections, "get_partitions_metadata"): return self.connections.get_partitions_metadata(table=table) @@ -420,7 +426,9 @@ def _get_cache_schemas(self, relation_configs: Iterable[RelationConfig]) -> Set[ populate. """ return { - self.Relation.create_from(quoting=self.config, relation_config=relation_config).without_identifier() + self.Relation.create_from( + quoting=self.config, relation_config=relation_config + ).without_identifier() for relation_config in relation_configs } @@ -662,7 +670,7 @@ def list_relations_without_caching(self, schema_relation: BaseRelation) -> List[ # Methods about grants ### @available - def standardize_grants_dict(self, grants_table: agate.Table) -> dict: + def standardize_grants_dict(self, grants_table: "agate.Table") -> dict: """Translate the result of `show grants` (or equivalent) to match the grants which a user would configure in their project. @@ -937,7 +945,7 @@ def quote_seed_column(self, column: str, quote_config: Optional[bool]) -> str: ### @classmethod @abc.abstractmethod - def convert_text_type(cls, agate_table: agate.Table, col_idx: int) -> str: + def convert_text_type(cls, agate_table: "agate.Table", col_idx: int) -> str: """Return the type in the database that best maps to the agate.Text type for the given agate table and column index. @@ -949,7 +957,7 @@ def convert_text_type(cls, agate_table: agate.Table, col_idx: int) -> str: @classmethod @abc.abstractmethod - def convert_number_type(cls, agate_table: agate.Table, col_idx: int) -> str: + def convert_number_type(cls, agate_table: "agate.Table", col_idx: int) -> str: """Return the type in the database that best maps to the agate.Number type for the given agate table and column index. @@ -960,7 +968,7 @@ def convert_number_type(cls, agate_table: agate.Table, col_idx: int) -> str: raise NotImplementedError("`convert_number_type` is not implemented for this adapter!") @classmethod - def convert_integer_type(cls, agate_table: agate.Table, col_idx: int) -> str: + def convert_integer_type(cls, agate_table: "agate.Table", col_idx: int) -> str: """Return the type in the database that best maps to the agate.Number type for the given agate table and column index. @@ -972,7 +980,7 @@ def convert_integer_type(cls, agate_table: agate.Table, col_idx: int) -> str: @classmethod @abc.abstractmethod - def convert_boolean_type(cls, agate_table: agate.Table, col_idx: int) -> str: + def convert_boolean_type(cls, agate_table: "agate.Table", col_idx: int) -> str: """Return the type in the database that best maps to the agate.Boolean type for the given agate table and column index. @@ -984,7 +992,7 @@ def convert_boolean_type(cls, agate_table: agate.Table, col_idx: int) -> str: @classmethod @abc.abstractmethod - def convert_datetime_type(cls, agate_table: agate.Table, col_idx: int) -> str: + def convert_datetime_type(cls, agate_table: "agate.Table", col_idx: int) -> str: """Return the type in the database that best maps to the agate.DateTime type for the given agate table and column index. @@ -996,7 +1004,7 @@ def convert_datetime_type(cls, agate_table: agate.Table, col_idx: int) -> str: @classmethod @abc.abstractmethod - def convert_date_type(cls, agate_table: agate.Table, col_idx: int) -> str: + def convert_date_type(cls, agate_table: "agate.Table", col_idx: int) -> str: """Return the type in the database that best maps to the agate.Date type for the given agate table and column index. @@ -1008,7 +1016,7 @@ def convert_date_type(cls, agate_table: agate.Table, col_idx: int) -> str: @classmethod @abc.abstractmethod - def convert_time_type(cls, agate_table: agate.Table, col_idx: int) -> str: + def convert_time_type(cls, agate_table: "agate.Table", col_idx: int) -> str: """Return the type in the database that best maps to the agate.TimeDelta type for the given agate table and column index. @@ -1020,11 +1028,14 @@ def convert_time_type(cls, agate_table: agate.Table, col_idx: int) -> str: @available @classmethod - def convert_type(cls, agate_table: agate.Table, col_idx: int) -> Optional[str]: + def convert_type(cls, agate_table: "agate.Table", col_idx: int) -> Optional[str]: return cls.convert_agate_type(agate_table, col_idx) @classmethod - def convert_agate_type(cls, agate_table: agate.Table, col_idx: int) -> Optional[str]: + def convert_agate_type(cls, agate_table: "agate.Table", col_idx: int) -> Optional[str]: + import agate + from dbt_common.clients.agate_helper import Integer + agate_type: Type = agate_table.column_types[col_idx] conversions: List[Tuple[Type, Callable[..., str]]] = [ (Integer, cls.convert_integer_type), @@ -1051,6 +1062,7 @@ def execute_macro( project: Optional[str] = None, context_override: Optional[Dict[str, Any]] = None, kwargs: Optional[Dict[str, Any]] = None, + needs_conn: bool = False, ) -> AttrDict: """Look macro_name up in the manifest and execute its results. @@ -1063,6 +1075,10 @@ def execute_macro( execution context. :param kwargs: An optional dict of keyword args used to pass to the macro. + : param needs_conn: A boolean that indicates whether the specified macro + requires an open connection to execute. If needs_conn is True, a + connection is expected and opened if necessary. Otherwise (and by default), + no connection is expected prior to executing the macro. """ if kwargs is None: @@ -1095,17 +1111,23 @@ def execute_macro( macro_function = CallableMacroGenerator(macro, macro_context) + if needs_conn: + connection = self.connections.get_thread_connection() + self.connections.open(connection) + with self.connections.exception_handler(f"macro {macro_name}"): result = macro_function(**kwargs) return result @classmethod def _catalog_filter_table( - cls, table: agate.Table, used_schemas: FrozenSet[Tuple[str, str]] - ) -> agate.Table: + cls, table: "agate.Table", used_schemas: FrozenSet[Tuple[str, str]] + ) -> "agate.Table": """Filter the table as appropriate for catalog entries. Subclasses can override this to change filtering rules on a per-adapter basis. """ + from dbt_common.clients.agate_helper import table_from_rows + # force database + schema to be strings table = table_from_rows( table.rows, @@ -1119,7 +1141,7 @@ def _get_one_catalog( information_schema: InformationSchema, schemas: Set[str], used_schemas: FrozenSet[Tuple[str, str]], - ) -> agate.Table: + ) -> "agate.Table": kwargs = {"information_schema": information_schema, "schemas": schemas} table = self.execute_macro(GET_CATALOG_MACRO_NAME, kwargs=kwargs) @@ -1131,7 +1153,7 @@ def _get_one_catalog_by_relations( information_schema: InformationSchema, relations: List[BaseRelation], used_schemas: FrozenSet[Tuple[str, str]], - ) -> agate.Table: + ) -> "agate.Table": kwargs = { "information_schema": information_schema, "relations": relations, @@ -1147,10 +1169,10 @@ def get_filtered_catalog( used_schemas: FrozenSet[Tuple[str, str]], relations: Optional[Set[BaseRelation]] = None, ): - catalogs: agate.Table + catalogs: "agate.Table" if ( relations is None - or len(relations) > 100 + or len(relations) > self.MAX_SCHEMA_METADATA_RELATIONS or not self.supports(Capability.SchemaMetadataByRelations) ): # Do it the traditional way. We get the full catalog. @@ -1170,7 +1192,7 @@ def get_filtered_catalog( for r in relations } - def in_map(row: agate.Row): + def in_map(row: "agate.Row"): d = _expect_row_value("table_database", row) s = _expect_row_value("table_schema", row) i = _expect_row_value("table_name", row) @@ -1183,16 +1205,16 @@ def in_map(row: agate.Row): return catalogs, exceptions - def row_matches_relation(self, row: agate.Row, relations: Set[BaseRelation]): + def row_matches_relation(self, row: "agate.Row", relations: Set[BaseRelation]): pass def get_catalog( self, relation_configs: Iterable[RelationConfig], used_schemas: FrozenSet[Tuple[str, str]], - ) -> Tuple[agate.Table, List[Exception]]: + ) -> Tuple["agate.Table", List[Exception]]: with executor(self.config) as tpe: - futures: List[Future[agate.Table]] = [] + futures: List[Future["agate.Table"]] = [] schema_map: SchemaSearchMap = self._get_catalog_schemas(relation_configs) for info, schemas in schema_map.items(): if len(schemas) == 0: @@ -1208,9 +1230,9 @@ def get_catalog( def get_catalog_by_relations( self, used_schemas: FrozenSet[Tuple[str, str]], relations: Set[BaseRelation] - ) -> Tuple[agate.Table, List[Exception]]: + ) -> Tuple["agate.Table", List[Exception]]: with executor(self.config) as tpe: - futures: List[Future[agate.Table]] = [] + futures: List[Future["agate.Table"]] = [] relations_by_schema = self._get_catalog_relations_by_info_schema(relations) for info_schema in relations_by_schema: name = ".".join([str(info_schema.database), "information_schema"]) @@ -1240,6 +1262,8 @@ def calculate_freshness( macro_resolver: Optional[MacroResolverProtocol] = None, ) -> Tuple[Optional[AdapterResponse], FreshnessResponse]: """Calculate the freshness of sources in dbt, and return it""" + import agate + kwargs: Dict[str, Any] = { "source": source, "loaded_at_field": loaded_at_field, @@ -1250,8 +1274,8 @@ def calculate_freshness( # in older versions of dbt-core, the 'collect_freshness' macro returned the table of results directly # starting in v1.5, by default, we return both the table and the adapter response (metadata about the query) result: Union[ - AttrDict, # current: contains AdapterResponse + agate.Table - agate.Table, # previous: just table + AttrDict, # current: contains AdapterResponse + "agate.Table" + "agate.Table", # previous: just table ] result = self.execute_macro( FRESHNESS_MACRO_NAME, kwargs=kwargs, macro_resolver=macro_resolver @@ -1282,46 +1306,111 @@ def calculate_freshness( } return adapter_response, freshness + def calculate_freshness_from_metadata_batch( + self, + sources: List[BaseRelation], + macro_resolver: Optional[MacroResolverProtocol] = None, + ) -> Tuple[List[Optional[AdapterResponse]], Dict[BaseRelation, FreshnessResponse]]: + """ + Given a list of sources (BaseRelations), calculate the metadata-based freshness in batch. + This method should _not_ execute a warehouse query per source, but rather batch up + the sources into as few requests as possible to minimize the number of roundtrips required + to compute metadata-based freshness for each input source. + + :param sources: The list of sources to calculate metadata-based freshness for + :param macro_resolver: An optional macro_resolver to use for get_relation_last_modified + :return: a tuple where: + * the first element is a list of optional AdapterResponses indicating the response + for each request the method made to compute the freshness for the provided sources. + * the second element is a dictionary mapping an input source BaseRelation to a FreshnessResponse, + if it was possible to calculate a FreshnessResponse for the source. + """ + # Track schema, identifiers of sources for lookup from batch query + schema_identifier_to_source = { + ( + source.path.get_lowered_part(ComponentName.Schema), # type: ignore + source.path.get_lowered_part(ComponentName.Identifier), # type: ignore + ): source + for source in sources + } + + # Group metadata sources by information schema -- one query per information schema will be necessary + sources_by_info_schema: Dict[InformationSchema, List[BaseRelation]] = ( + self._get_catalog_relations_by_info_schema(sources) + ) + + freshness_responses: Dict[BaseRelation, FreshnessResponse] = {} + adapter_responses: List[Optional[AdapterResponse]] = [] + for ( + information_schema, + sources_for_information_schema, + ) in sources_by_info_schema.items(): + result = self.execute_macro( + GET_RELATION_LAST_MODIFIED_MACRO_NAME, + kwargs={ + "information_schema": information_schema, + "relations": sources_for_information_schema, + }, + macro_resolver=macro_resolver, + needs_conn=True, + ) + adapter_response, table = result.response, result.table # type: ignore[attr-defined] + adapter_responses.append(adapter_response) + + for row in table: + raw_relation, freshness_response = self._parse_freshness_row(row, table) + source_relation_for_result = schema_identifier_to_source[raw_relation] + freshness_responses[source_relation_for_result] = freshness_response + + return adapter_responses, freshness_responses + def calculate_freshness_from_metadata( self, source: BaseRelation, macro_resolver: Optional[MacroResolverProtocol] = None, ) -> Tuple[Optional[AdapterResponse], FreshnessResponse]: - kwargs: Dict[str, Any] = { - "information_schema": source.information_schema_only(), - "relations": [source], - } - result = self.execute_macro( - GET_RELATION_LAST_MODIFIED_MACRO_NAME, - kwargs=kwargs, + adapter_responses, freshness_responses = self.calculate_freshness_from_metadata_batch( + sources=[source], macro_resolver=macro_resolver, ) - adapter_response, table = result.response, result.table # type: ignore[attr-defined] + adapter_response = adapter_responses[0] if adapter_responses else None + return adapter_response, freshness_responses[source] - try: - row = table[0] - last_modified_val = get_column_value_uncased("last_modified", row) - snapshotted_at_val = get_column_value_uncased("snapshotted_at", row) - except Exception: - raise MacroResultError(GET_RELATION_LAST_MODIFIED_MACRO_NAME, table) - - if last_modified_val is None: + def _create_freshness_response( + self, last_modified: Optional[datetime], snapshotted_at: Optional[datetime] + ) -> FreshnessResponse: + if last_modified is None: # Interpret missing value as "infinitely long ago" max_loaded_at = datetime(1, 1, 1, 0, 0, 0, tzinfo=pytz.UTC) else: - max_loaded_at = _utc(last_modified_val, None, "last_modified") - - snapshotted_at = _utc(snapshotted_at_val, None, "snapshotted_at") + max_loaded_at = _utc(last_modified, None, "last_modified") + snapshotted_at = _utc(snapshotted_at, None, "snapshotted_at") age = (snapshotted_at - max_loaded_at).total_seconds() - freshness: FreshnessResponse = { "max_loaded_at": max_loaded_at, "snapshotted_at": snapshotted_at, "age": age, } - return adapter_response, freshness + return freshness + + def _parse_freshness_row( + self, row: "agate.Row", table: "agate.Table" + ) -> Tuple[Any, FreshnessResponse]: + from dbt_common.clients.agate_helper import get_column_value_uncased + + try: + last_modified_val = get_column_value_uncased("last_modified", row) + snapshotted_at_val = get_column_value_uncased("snapshotted_at", row) + identifier = get_column_value_uncased("identifier", row) + schema = get_column_value_uncased("schema", row) + except Exception: + raise MacroResultError(GET_RELATION_LAST_MODIFIED_MACRO_NAME, table) + + freshness_response = self._create_freshness_response(last_modified_val, snapshotted_at_val) + raw_relation = schema.lower().strip(), identifier.lower().strip() + return raw_relation, freshness_response def pre_model_hook(self, config: Mapping[str, Any]) -> Any: """A hook for running some operation before the model materialization @@ -1635,10 +1724,12 @@ def supports(cls, capability: Capability) -> bool: def catch_as_completed( - futures, # typing: List[Future[agate.Table]] -) -> Tuple[agate.Table, List[Exception]]: - # catalogs: agate.Table = agate.Table(rows=[]) - tables: List[agate.Table] = [] + futures, # typing: List[Future["agate.Table"]] +) -> Tuple["agate.Table", List[Exception]]: + from dbt_common.clients.agate_helper import merge_tables + + # catalogs: "agate.Table" =".Table(rows=[]) + tables: List["agate.Table"] = [] exceptions: List[Exception] = [] for future in as_completed(futures): diff --git a/dbt/adapters/base/relation.py b/dbt/adapters/base/relation.py index ea03b067d..210a2dcd7 100644 --- a/dbt/adapters/base/relation.py +++ b/dbt/adapters/base/relation.py @@ -47,18 +47,21 @@ class BaseRelation(FakeAPIObject, Hashable): quote_policy: Policy = field(default_factory=lambda: Policy()) dbt_created: bool = False limit: Optional[int] = None + require_alias: bool = ( + True # used to govern whether to add an alias when render_limited is called + ) # register relation types that can be renamed for the purpose of replacing relations using stages and backups # adding a relation type here also requires defining the associated rename macro # e.g. adding RelationType.View in dbt-postgres requires that you define: # include/postgres/macros/relations/view/rename.sql::postgres__get_rename_view_sql() - renameable_relations: SerializableIterable = () + renameable_relations: SerializableIterable = field(default_factory=frozenset) # register relation types that are atomically replaceable, e.g. they have "create or replace" syntax # adding a relation type here also requires defining the associated replace macro # e.g. adding RelationType.View in dbt-postgres requires that you define: # include/postgres/macros/relations/view/replace.sql::postgres__get_replace_view_sql() - replaceable_relations: SerializableIterable = () + replaceable_relations: SerializableIterable = field(default_factory=frozenset) def _is_exactish_match(self, field: ComponentName, value: str) -> bool: if self.dbt_created and self.quote_policy.get_part(field) is False: @@ -205,14 +208,22 @@ def render(self) -> str: # if there is nothing set, this will return the empty string. return ".".join(part for _, part in self._render_iterator() if part is not None) + def _render_limited_alias(self) -> str: + """Some databases require an alias for subqueries (postgres, mysql) for all others we want to avoid adding + an alias as it has the potential to introduce issues with the query if the user also defines an alias. + """ + if self.require_alias: + return f" _dbt_limit_subq_{self.table}" + return "" + def render_limited(self) -> str: rendered = self.render() if self.limit is None: return rendered elif self.limit == 0: - return f"(select * from {rendered} where false limit 0) _dbt_limit_subq" + return f"(select * from {rendered} where false limit 0){self._render_limited_alias()}" else: - return f"(select * from {rendered} limit {self.limit}) _dbt_limit_subq" + return f"(select * from {rendered} limit {self.limit}){self._render_limited_alias()}" def quoted(self, identifier): return "{quote_char}{identifier}{quote_char}".format( diff --git a/dbt/adapters/capability.py b/dbt/adapters/capability.py index 745cb27a6..305604c71 100644 --- a/dbt/adapters/capability.py +++ b/dbt/adapters/capability.py @@ -13,6 +13,9 @@ class Capability(str, Enum): TableLastModifiedMetadata = "TableLastModifiedMetadata" """Indicates support for determining the time of the last table modification by querying database metadata.""" + TableLastModifiedMetadataBatch = "TableLastModifiedMetadataBatch" + """Indicates support for performantly determining the time of the last table modification by querying database metadata in batch.""" + class Support(str, Enum): Unknown = "Unknown" diff --git a/dbt/adapters/contracts/relation.py b/dbt/adapters/contracts/relation.py index 3028bd0f6..3560c2b31 100644 --- a/dbt/adapters/contracts/relation.py +++ b/dbt/adapters/contracts/relation.py @@ -40,19 +40,21 @@ class MaterializationConfig(Mapping, ABC): contract: MaterializationContract extra: Dict[str, Any] - def __contains__(self, item): - ... + def __contains__(self, item): ... - def __delitem__(self, key): - ... + def __delitem__(self, key): ... class RelationConfig(Protocol): + resource_type: str name: str + description: str database: str schema: str identifier: str compiled_code: Optional[str] + meta: Dict[str, Any] + tags: List[str] quoting_dict: Dict[str, bool] config: Optional[MaterializationConfig] diff --git a/dbt/adapters/events/README.md b/dbt/adapters/events/README.md index fe39a18e9..c98488db5 100644 --- a/dbt/adapters/events/README.md +++ b/dbt/adapters/events/README.md @@ -14,7 +14,7 @@ When events are processed via `fire_event`, nearly everything is logged. Whether We have switched from using betterproto to using google protobuf, because of a lack of support for Struct fields in betterproto. -The google protobuf interface is janky and very much non-Pythonic. The "generated" classes in types_pb2.py do not resemble regular Python classes. They do not have normal constructors; they can only be constructed empty. They can be "filled" by setting fields individually or using a json_format method like ParseDict. We have wrapped the logging events with a class (in types.py) which allows using a constructor -- keywords only, no positional parameters. +The google protobuf interface is janky and very much non-Pythonic. The "generated" classes in types_pb2.py do not resemble regular Python classes. They do not have normal constructors; they can only be constructed empty. They can be "filled" by setting fields individually or using a json_format method like ParseDict. We have wrapped the logging events with a class (in types.py) which allows using a constructor -- keywords only, no positional parameters. ## Required for Every Event diff --git a/dbt/adapters/events/adapter_types.proto b/dbt/adapters/events/adapter_types.proto index aa0b507c4..69d643257 100644 --- a/dbt/adapters/events/adapter_types.proto +++ b/dbt/adapters/events/adapter_types.proto @@ -515,3 +515,13 @@ message ConstraintNotSupportedMsg { AdapterCommonEventInfo info = 1; ConstraintNotSupported data = 2; } + +// E050 +message TypeCodeNotFound { + int32 type_code = 1; +} + +message TypeCodeNotFoundMsg { + AdapterCommonEventInfo info = 1; + TypeCodeNotFound data = 2; +} diff --git a/dbt/adapters/events/adapter_types_pb2.py b/dbt/adapters/events/adapter_types_pb2.py index 5d41b7190..bfd440801 100644 --- a/dbt/adapters/events/adapter_types_pb2.py +++ b/dbt/adapters/events/adapter_types_pb2.py @@ -1,7 +1,6 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: adapter_types.proto -# Protobuf Python Version: 4.25.2 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool @@ -16,7 +15,7 @@ from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x13\x61\x64\x61pter_types.proto\x12\x0bproto_types\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/protobuf/struct.proto\"\xab\x02\n\x16\x41\x64\x61pterCommonEventInfo\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04\x63ode\x18\x02 \x01(\t\x12\x0b\n\x03msg\x18\x03 \x01(\t\x12\r\n\x05level\x18\x04 \x01(\t\x12\x15\n\rinvocation_id\x18\x05 \x01(\t\x12\x0b\n\x03pid\x18\x06 \x01(\x05\x12\x0e\n\x06thread\x18\x07 \x01(\t\x12&\n\x02ts\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12=\n\x05\x65xtra\x18\t \x03(\x0b\x32..proto_types.AdapterCommonEventInfo.ExtraEntry\x12\x10\n\x08\x63\x61tegory\x18\n \x01(\t\x1a,\n\nExtraEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"]\n\x13\x41\x64\x61pterNodeRelation\x12\x10\n\x08\x64\x61tabase\x18\n \x01(\t\x12\x0e\n\x06schema\x18\x0b \x01(\t\x12\r\n\x05\x61lias\x18\x0c \x01(\t\x12\x15\n\rrelation_name\x18\r \x01(\t\"\x9f\x02\n\x0f\x41\x64\x61pterNodeInfo\x12\x11\n\tnode_path\x18\x01 \x01(\t\x12\x11\n\tnode_name\x18\x02 \x01(\t\x12\x11\n\tunique_id\x18\x03 \x01(\t\x12\x15\n\rresource_type\x18\x04 \x01(\t\x12\x14\n\x0cmaterialized\x18\x05 \x01(\t\x12\x13\n\x0bnode_status\x18\x06 \x01(\t\x12\x17\n\x0fnode_started_at\x18\x07 \x01(\t\x12\x18\n\x10node_finished_at\x18\x08 \x01(\t\x12%\n\x04meta\x18\t \x01(\x0b\x32\x17.google.protobuf.Struct\x12\x37\n\rnode_relation\x18\n \x01(\x0b\x32 .proto_types.AdapterNodeRelation\"G\n\x0fReferenceKeyMsg\x12\x10\n\x08\x64\x61tabase\x18\x01 \x01(\t\x12\x0e\n\x06schema\x18\x02 \x01(\t\x12\x12\n\nidentifier\x18\x03 \x01(\t\"?\n\x19\x41\x64\x61pterDeprecationWarning\x12\x10\n\x08old_name\x18\x01 \x01(\t\x12\x10\n\x08new_name\x18\x02 \x01(\t\"\x87\x01\n\x1c\x41\x64\x61pterDeprecationWarningMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x34\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32&.proto_types.AdapterDeprecationWarning\"!\n\x1f\x43ollectFreshnessReturnSignature\"\x93\x01\n\"CollectFreshnessReturnSignatureMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12:\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32,.proto_types.CollectFreshnessReturnSignature\"\x8e\x01\n\x11\x41\x64\x61pterEventDebug\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x62\x61se_msg\x18\x03 \x01(\t\x12(\n\x04\x61rgs\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.ListValue\"w\n\x14\x41\x64\x61pterEventDebugMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.AdapterEventDebug\"\x8d\x01\n\x10\x41\x64\x61pterEventInfo\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x62\x61se_msg\x18\x03 \x01(\t\x12(\n\x04\x61rgs\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.ListValue\"u\n\x13\x41\x64\x61pterEventInfoMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.AdapterEventInfo\"\x90\x01\n\x13\x41\x64\x61pterEventWarning\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x62\x61se_msg\x18\x03 \x01(\t\x12(\n\x04\x61rgs\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.ListValue\"{\n\x16\x41\x64\x61pterEventWarningMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.AdapterEventWarning\"\xa0\x01\n\x11\x41\x64\x61pterEventError\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x62\x61se_msg\x18\x03 \x01(\t\x12(\n\x04\x61rgs\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.ListValue\x12\x10\n\x08\x65xc_info\x18\x05 \x01(\t\"w\n\x14\x41\x64\x61pterEventErrorMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.AdapterEventError\"f\n\rNewConnection\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_type\x18\x02 \x01(\t\x12\x11\n\tconn_name\x18\x03 \x01(\t\"o\n\x10NewConnectionMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.NewConnection\"=\n\x10\x43onnectionReused\x12\x11\n\tconn_name\x18\x01 \x01(\t\x12\x16\n\x0eorig_conn_name\x18\x02 \x01(\t\"u\n\x13\x43onnectionReusedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.ConnectionReused\"0\n\x1b\x43onnectionLeftOpenInCleanup\x12\x11\n\tconn_name\x18\x01 \x01(\t\"\x8b\x01\n\x1e\x43onnectionLeftOpenInCleanupMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x36\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32(.proto_types.ConnectionLeftOpenInCleanup\".\n\x19\x43onnectionClosedInCleanup\x12\x11\n\tconn_name\x18\x01 \x01(\t\"\x87\x01\n\x1c\x43onnectionClosedInCleanupMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x34\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32&.proto_types.ConnectionClosedInCleanup\"f\n\x0eRollbackFailed\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\x12\x10\n\x08\x65xc_info\x18\x03 \x01(\t\"q\n\x11RollbackFailedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.RollbackFailed\"V\n\x10\x43onnectionClosed\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\"u\n\x13\x43onnectionClosedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.ConnectionClosed\"X\n\x12\x43onnectionLeftOpen\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\"y\n\x15\x43onnectionLeftOpenMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.ConnectionLeftOpen\"N\n\x08Rollback\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\"e\n\x0bRollbackMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12#\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x15.proto_types.Rollback\"@\n\tCacheMiss\x12\x11\n\tconn_name\x18\x01 \x01(\t\x12\x10\n\x08\x64\x61tabase\x18\x02 \x01(\t\x12\x0e\n\x06schema\x18\x03 \x01(\t\"g\n\x0c\x43\x61\x63heMissMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12$\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x16.proto_types.CacheMiss\"b\n\rListRelations\x12\x10\n\x08\x64\x61tabase\x18\x01 \x01(\t\x12\x0e\n\x06schema\x18\x02 \x01(\t\x12/\n\trelations\x18\x03 \x03(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"o\n\x10ListRelationsMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.ListRelations\"g\n\x0e\x43onnectionUsed\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_type\x18\x02 \x01(\t\x12\x11\n\tconn_name\x18\x03 \x01(\t\"q\n\x11\x43onnectionUsedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.ConnectionUsed\"[\n\x08SQLQuery\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\x12\x0b\n\x03sql\x18\x03 \x01(\t\"e\n\x0bSQLQueryMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12#\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x15.proto_types.SQLQuery\"b\n\x0eSQLQueryStatus\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0e\n\x06status\x18\x02 \x01(\t\x12\x0f\n\x07\x65lapsed\x18\x03 \x01(\x02\"q\n\x11SQLQueryStatusMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.SQLQueryStatus\"O\n\tSQLCommit\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\"g\n\x0cSQLCommitMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12$\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x16.proto_types.SQLCommit\"a\n\rColTypeChange\x12\x11\n\torig_type\x18\x01 \x01(\t\x12\x10\n\x08new_type\x18\x02 \x01(\t\x12+\n\x05table\x18\x03 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"o\n\x10\x43olTypeChangeMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.ColTypeChange\"@\n\x0eSchemaCreation\x12.\n\x08relation\x18\x01 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"q\n\x11SchemaCreationMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.SchemaCreation\"<\n\nSchemaDrop\x12.\n\x08relation\x18\x01 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"i\n\rSchemaDropMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12%\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x17.proto_types.SchemaDrop\"\xde\x01\n\x0b\x43\x61\x63heAction\x12\x0e\n\x06\x61\x63tion\x18\x01 \x01(\t\x12-\n\x07ref_key\x18\x02 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\x12/\n\tref_key_2\x18\x03 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\x12/\n\tref_key_3\x18\x04 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\x12.\n\x08ref_list\x18\x05 \x03(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"k\n\x0e\x43\x61\x63heActionMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12&\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x18.proto_types.CacheAction\"\x98\x01\n\x0e\x43\x61\x63heDumpGraph\x12\x33\n\x04\x64ump\x18\x01 \x03(\x0b\x32%.proto_types.CacheDumpGraph.DumpEntry\x12\x14\n\x0c\x62\x65\x66ore_after\x18\x02 \x01(\t\x12\x0e\n\x06\x61\x63tion\x18\x03 \x01(\t\x1a+\n\tDumpEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"q\n\x11\x43\x61\x63heDumpGraphMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.CacheDumpGraph\"B\n\x11\x41\x64\x61pterRegistered\x12\x14\n\x0c\x61\x64\x61pter_name\x18\x01 \x01(\t\x12\x17\n\x0f\x61\x64\x61pter_version\x18\x02 \x01(\t\"w\n\x14\x41\x64\x61pterRegisteredMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.AdapterRegistered\"!\n\x12\x41\x64\x61pterImportError\x12\x0b\n\x03\x65xc\x18\x01 \x01(\t\"y\n\x15\x41\x64\x61pterImportErrorMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.AdapterImportError\"#\n\x0fPluginLoadError\x12\x10\n\x08\x65xc_info\x18\x01 \x01(\t\"s\n\x12PluginLoadErrorMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.PluginLoadError\"a\n\x14NewConnectionOpening\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x18\n\x10\x63onnection_state\x18\x02 \x01(\t\"}\n\x17NewConnectionOpeningMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.NewConnectionOpening\"8\n\rCodeExecution\x12\x11\n\tconn_name\x18\x01 \x01(\t\x12\x14\n\x0c\x63ode_content\x18\x02 \x01(\t\"o\n\x10\x43odeExecutionMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.CodeExecution\"6\n\x13\x43odeExecutionStatus\x12\x0e\n\x06status\x18\x01 \x01(\t\x12\x0f\n\x07\x65lapsed\x18\x02 \x01(\x02\"{\n\x16\x43odeExecutionStatusMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.CodeExecutionStatus\"%\n\x16\x43\x61talogGenerationError\x12\x0b\n\x03\x65xc\x18\x01 \x01(\t\"\x81\x01\n\x19\x43\x61talogGenerationErrorMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x31\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32#.proto_types.CatalogGenerationError\"-\n\x13WriteCatalogFailure\x12\x16\n\x0enum_exceptions\x18\x01 \x01(\x05\"{\n\x16WriteCatalogFailureMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.WriteCatalogFailure\"\x1e\n\x0e\x43\x61talogWritten\x12\x0c\n\x04path\x18\x01 \x01(\t\"q\n\x11\x43\x61talogWrittenMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.CatalogWritten\"\x14\n\x12\x43\x61nnotGenerateDocs\"y\n\x15\x43\x61nnotGenerateDocsMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.CannotGenerateDocs\"\x11\n\x0f\x42uildingCatalog\"s\n\x12\x42uildingCatalogMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.BuildingCatalog\"-\n\x18\x44\x61tabaseErrorRunningHook\x12\x11\n\thook_type\x18\x01 \x01(\t\"\x85\x01\n\x1b\x44\x61tabaseErrorRunningHookMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x33\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32%.proto_types.DatabaseErrorRunningHook\"4\n\x0cHooksRunning\x12\x11\n\tnum_hooks\x18\x01 \x01(\x05\x12\x11\n\thook_type\x18\x02 \x01(\t\"m\n\x0fHooksRunningMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\'\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x19.proto_types.HooksRunning\"T\n\x14\x46inishedRunningStats\x12\x11\n\tstat_line\x18\x01 \x01(\t\x12\x11\n\texecution\x18\x02 \x01(\t\x12\x16\n\x0e\x65xecution_time\x18\x03 \x01(\x02\"}\n\x17\x46inishedRunningStatsMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.FinishedRunningStats\"<\n\x15\x43onstraintNotEnforced\x12\x12\n\nconstraint\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x64\x61pter\x18\x02 \x01(\t\"\x7f\n\x18\x43onstraintNotEnforcedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x30\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\".proto_types.ConstraintNotEnforced\"=\n\x16\x43onstraintNotSupported\x12\x12\n\nconstraint\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x64\x61pter\x18\x02 \x01(\t\"\x81\x01\n\x19\x43onstraintNotSupportedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x31\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32#.proto_types.ConstraintNotSupportedb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x13\x61\x64\x61pter_types.proto\x12\x0bproto_types\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/protobuf/struct.proto\"\xab\x02\n\x16\x41\x64\x61pterCommonEventInfo\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04\x63ode\x18\x02 \x01(\t\x12\x0b\n\x03msg\x18\x03 \x01(\t\x12\r\n\x05level\x18\x04 \x01(\t\x12\x15\n\rinvocation_id\x18\x05 \x01(\t\x12\x0b\n\x03pid\x18\x06 \x01(\x05\x12\x0e\n\x06thread\x18\x07 \x01(\t\x12&\n\x02ts\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12=\n\x05\x65xtra\x18\t \x03(\x0b\x32..proto_types.AdapterCommonEventInfo.ExtraEntry\x12\x10\n\x08\x63\x61tegory\x18\n \x01(\t\x1a,\n\nExtraEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"]\n\x13\x41\x64\x61pterNodeRelation\x12\x10\n\x08\x64\x61tabase\x18\n \x01(\t\x12\x0e\n\x06schema\x18\x0b \x01(\t\x12\r\n\x05\x61lias\x18\x0c \x01(\t\x12\x15\n\rrelation_name\x18\r \x01(\t\"\x9f\x02\n\x0f\x41\x64\x61pterNodeInfo\x12\x11\n\tnode_path\x18\x01 \x01(\t\x12\x11\n\tnode_name\x18\x02 \x01(\t\x12\x11\n\tunique_id\x18\x03 \x01(\t\x12\x15\n\rresource_type\x18\x04 \x01(\t\x12\x14\n\x0cmaterialized\x18\x05 \x01(\t\x12\x13\n\x0bnode_status\x18\x06 \x01(\t\x12\x17\n\x0fnode_started_at\x18\x07 \x01(\t\x12\x18\n\x10node_finished_at\x18\x08 \x01(\t\x12%\n\x04meta\x18\t \x01(\x0b\x32\x17.google.protobuf.Struct\x12\x37\n\rnode_relation\x18\n \x01(\x0b\x32 .proto_types.AdapterNodeRelation\"G\n\x0fReferenceKeyMsg\x12\x10\n\x08\x64\x61tabase\x18\x01 \x01(\t\x12\x0e\n\x06schema\x18\x02 \x01(\t\x12\x12\n\nidentifier\x18\x03 \x01(\t\"?\n\x19\x41\x64\x61pterDeprecationWarning\x12\x10\n\x08old_name\x18\x01 \x01(\t\x12\x10\n\x08new_name\x18\x02 \x01(\t\"\x87\x01\n\x1c\x41\x64\x61pterDeprecationWarningMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x34\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32&.proto_types.AdapterDeprecationWarning\"!\n\x1f\x43ollectFreshnessReturnSignature\"\x93\x01\n\"CollectFreshnessReturnSignatureMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12:\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32,.proto_types.CollectFreshnessReturnSignature\"\x8e\x01\n\x11\x41\x64\x61pterEventDebug\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x62\x61se_msg\x18\x03 \x01(\t\x12(\n\x04\x61rgs\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.ListValue\"w\n\x14\x41\x64\x61pterEventDebugMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.AdapterEventDebug\"\x8d\x01\n\x10\x41\x64\x61pterEventInfo\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x62\x61se_msg\x18\x03 \x01(\t\x12(\n\x04\x61rgs\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.ListValue\"u\n\x13\x41\x64\x61pterEventInfoMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.AdapterEventInfo\"\x90\x01\n\x13\x41\x64\x61pterEventWarning\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x62\x61se_msg\x18\x03 \x01(\t\x12(\n\x04\x61rgs\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.ListValue\"{\n\x16\x41\x64\x61pterEventWarningMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.AdapterEventWarning\"\xa0\x01\n\x11\x41\x64\x61pterEventError\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x62\x61se_msg\x18\x03 \x01(\t\x12(\n\x04\x61rgs\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.ListValue\x12\x10\n\x08\x65xc_info\x18\x05 \x01(\t\"w\n\x14\x41\x64\x61pterEventErrorMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.AdapterEventError\"f\n\rNewConnection\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_type\x18\x02 \x01(\t\x12\x11\n\tconn_name\x18\x03 \x01(\t\"o\n\x10NewConnectionMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.NewConnection\"=\n\x10\x43onnectionReused\x12\x11\n\tconn_name\x18\x01 \x01(\t\x12\x16\n\x0eorig_conn_name\x18\x02 \x01(\t\"u\n\x13\x43onnectionReusedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.ConnectionReused\"0\n\x1b\x43onnectionLeftOpenInCleanup\x12\x11\n\tconn_name\x18\x01 \x01(\t\"\x8b\x01\n\x1e\x43onnectionLeftOpenInCleanupMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x36\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32(.proto_types.ConnectionLeftOpenInCleanup\".\n\x19\x43onnectionClosedInCleanup\x12\x11\n\tconn_name\x18\x01 \x01(\t\"\x87\x01\n\x1c\x43onnectionClosedInCleanupMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x34\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32&.proto_types.ConnectionClosedInCleanup\"f\n\x0eRollbackFailed\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\x12\x10\n\x08\x65xc_info\x18\x03 \x01(\t\"q\n\x11RollbackFailedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.RollbackFailed\"V\n\x10\x43onnectionClosed\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\"u\n\x13\x43onnectionClosedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.ConnectionClosed\"X\n\x12\x43onnectionLeftOpen\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\"y\n\x15\x43onnectionLeftOpenMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.ConnectionLeftOpen\"N\n\x08Rollback\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\"e\n\x0bRollbackMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12#\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x15.proto_types.Rollback\"@\n\tCacheMiss\x12\x11\n\tconn_name\x18\x01 \x01(\t\x12\x10\n\x08\x64\x61tabase\x18\x02 \x01(\t\x12\x0e\n\x06schema\x18\x03 \x01(\t\"g\n\x0c\x43\x61\x63heMissMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12$\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x16.proto_types.CacheMiss\"b\n\rListRelations\x12\x10\n\x08\x64\x61tabase\x18\x01 \x01(\t\x12\x0e\n\x06schema\x18\x02 \x01(\t\x12/\n\trelations\x18\x03 \x03(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"o\n\x10ListRelationsMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.ListRelations\"g\n\x0e\x43onnectionUsed\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_type\x18\x02 \x01(\t\x12\x11\n\tconn_name\x18\x03 \x01(\t\"q\n\x11\x43onnectionUsedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.ConnectionUsed\"[\n\x08SQLQuery\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\x12\x0b\n\x03sql\x18\x03 \x01(\t\"e\n\x0bSQLQueryMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12#\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x15.proto_types.SQLQuery\"b\n\x0eSQLQueryStatus\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0e\n\x06status\x18\x02 \x01(\t\x12\x0f\n\x07\x65lapsed\x18\x03 \x01(\x02\"q\n\x11SQLQueryStatusMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.SQLQueryStatus\"O\n\tSQLCommit\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\"g\n\x0cSQLCommitMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12$\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x16.proto_types.SQLCommit\"a\n\rColTypeChange\x12\x11\n\torig_type\x18\x01 \x01(\t\x12\x10\n\x08new_type\x18\x02 \x01(\t\x12+\n\x05table\x18\x03 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"o\n\x10\x43olTypeChangeMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.ColTypeChange\"@\n\x0eSchemaCreation\x12.\n\x08relation\x18\x01 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"q\n\x11SchemaCreationMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.SchemaCreation\"<\n\nSchemaDrop\x12.\n\x08relation\x18\x01 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"i\n\rSchemaDropMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12%\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x17.proto_types.SchemaDrop\"\xde\x01\n\x0b\x43\x61\x63heAction\x12\x0e\n\x06\x61\x63tion\x18\x01 \x01(\t\x12-\n\x07ref_key\x18\x02 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\x12/\n\tref_key_2\x18\x03 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\x12/\n\tref_key_3\x18\x04 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\x12.\n\x08ref_list\x18\x05 \x03(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"k\n\x0e\x43\x61\x63heActionMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12&\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x18.proto_types.CacheAction\"\x98\x01\n\x0e\x43\x61\x63heDumpGraph\x12\x33\n\x04\x64ump\x18\x01 \x03(\x0b\x32%.proto_types.CacheDumpGraph.DumpEntry\x12\x14\n\x0c\x62\x65\x66ore_after\x18\x02 \x01(\t\x12\x0e\n\x06\x61\x63tion\x18\x03 \x01(\t\x1a+\n\tDumpEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"q\n\x11\x43\x61\x63heDumpGraphMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.CacheDumpGraph\"B\n\x11\x41\x64\x61pterRegistered\x12\x14\n\x0c\x61\x64\x61pter_name\x18\x01 \x01(\t\x12\x17\n\x0f\x61\x64\x61pter_version\x18\x02 \x01(\t\"w\n\x14\x41\x64\x61pterRegisteredMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.AdapterRegistered\"!\n\x12\x41\x64\x61pterImportError\x12\x0b\n\x03\x65xc\x18\x01 \x01(\t\"y\n\x15\x41\x64\x61pterImportErrorMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.AdapterImportError\"#\n\x0fPluginLoadError\x12\x10\n\x08\x65xc_info\x18\x01 \x01(\t\"s\n\x12PluginLoadErrorMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.PluginLoadError\"a\n\x14NewConnectionOpening\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x18\n\x10\x63onnection_state\x18\x02 \x01(\t\"}\n\x17NewConnectionOpeningMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.NewConnectionOpening\"8\n\rCodeExecution\x12\x11\n\tconn_name\x18\x01 \x01(\t\x12\x14\n\x0c\x63ode_content\x18\x02 \x01(\t\"o\n\x10\x43odeExecutionMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.CodeExecution\"6\n\x13\x43odeExecutionStatus\x12\x0e\n\x06status\x18\x01 \x01(\t\x12\x0f\n\x07\x65lapsed\x18\x02 \x01(\x02\"{\n\x16\x43odeExecutionStatusMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.CodeExecutionStatus\"%\n\x16\x43\x61talogGenerationError\x12\x0b\n\x03\x65xc\x18\x01 \x01(\t\"\x81\x01\n\x19\x43\x61talogGenerationErrorMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x31\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32#.proto_types.CatalogGenerationError\"-\n\x13WriteCatalogFailure\x12\x16\n\x0enum_exceptions\x18\x01 \x01(\x05\"{\n\x16WriteCatalogFailureMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.WriteCatalogFailure\"\x1e\n\x0e\x43\x61talogWritten\x12\x0c\n\x04path\x18\x01 \x01(\t\"q\n\x11\x43\x61talogWrittenMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.CatalogWritten\"\x14\n\x12\x43\x61nnotGenerateDocs\"y\n\x15\x43\x61nnotGenerateDocsMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.CannotGenerateDocs\"\x11\n\x0f\x42uildingCatalog\"s\n\x12\x42uildingCatalogMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.BuildingCatalog\"-\n\x18\x44\x61tabaseErrorRunningHook\x12\x11\n\thook_type\x18\x01 \x01(\t\"\x85\x01\n\x1b\x44\x61tabaseErrorRunningHookMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x33\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32%.proto_types.DatabaseErrorRunningHook\"4\n\x0cHooksRunning\x12\x11\n\tnum_hooks\x18\x01 \x01(\x05\x12\x11\n\thook_type\x18\x02 \x01(\t\"m\n\x0fHooksRunningMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\'\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x19.proto_types.HooksRunning\"T\n\x14\x46inishedRunningStats\x12\x11\n\tstat_line\x18\x01 \x01(\t\x12\x11\n\texecution\x18\x02 \x01(\t\x12\x16\n\x0e\x65xecution_time\x18\x03 \x01(\x02\"}\n\x17\x46inishedRunningStatsMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.FinishedRunningStats\"<\n\x15\x43onstraintNotEnforced\x12\x12\n\nconstraint\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x64\x61pter\x18\x02 \x01(\t\"\x7f\n\x18\x43onstraintNotEnforcedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x30\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\".proto_types.ConstraintNotEnforced\"=\n\x16\x43onstraintNotSupported\x12\x12\n\nconstraint\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x64\x61pter\x18\x02 \x01(\t\"\x81\x01\n\x19\x43onstraintNotSupportedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x31\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32#.proto_types.ConstraintNotSupported\"%\n\x10TypeCodeNotFound\x12\x11\n\ttype_code\x18\x01 \x01(\x05\"u\n\x13TypeCodeNotFoundMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.TypeCodeNotFoundb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -203,4 +202,8 @@ _globals['_CONSTRAINTNOTSUPPORTED']._serialized_end=8961 _globals['_CONSTRAINTNOTSUPPORTEDMSG']._serialized_start=8964 _globals['_CONSTRAINTNOTSUPPORTEDMSG']._serialized_end=9093 + _globals['_TYPECODENOTFOUND']._serialized_start=9095 + _globals['_TYPECODENOTFOUND']._serialized_end=9132 + _globals['_TYPECODENOTFOUNDMSG']._serialized_start=9134 + _globals['_TYPECODENOTFOUNDMSG']._serialized_end=9251 # @@protoc_insertion_point(module_scope) diff --git a/dbt/adapters/events/types.py b/dbt/adapters/events/types.py index ddd435b91..7d98269d9 100644 --- a/dbt/adapters/events/types.py +++ b/dbt/adapters/events/types.py @@ -2,6 +2,7 @@ from dbt.adapters.events.base_types import ( DebugLevel, + DynamicLevel, ErrorLevel, InfoLevel, WarnLevel, @@ -281,7 +282,7 @@ def message(self) -> str: # Skipping E032, E033, E034 -class AdapterRegistered(InfoLevel): +class AdapterRegistered(DynamicLevel): def code(self) -> str: return "E034" @@ -421,3 +422,16 @@ def message(self) -> str: "be ignored. Set 'warn_unsupported: false' on this constraint to ignore this warning." ) return line_wrap_message(warning_tag(msg)) + + +class TypeCodeNotFound(DebugLevel): + def code(self) -> str: + return "E050" + + def message(self) -> str: + msg = ( + f"The `type_code` {self.type_code} was not recognized, which may affect error " + "messages for enforced contracts that fail as well as `Column.data_type` values " + "returned by `get_column_schema_from_query`" + ) + return line_wrap_message(warning_tag(msg)) diff --git a/dbt/adapters/factory.py b/dbt/adapters/factory.py index e5c7be788..b1854f67d 100644 --- a/dbt/adapters/factory.py +++ b/dbt/adapters/factory.py @@ -7,6 +7,7 @@ from typing import Any, Dict, List, Optional, Set, Type from dbt_common.events.functions import fire_event +from dbt_common.events.base_types import EventLevel from dbt_common.exceptions import DbtInternalError, DbtRuntimeError from dbt_common.semver import VersionSpecifier @@ -96,12 +97,18 @@ def load_plugin(self, name: str) -> Type[Credentials]: return plugin.credentials - def register_adapter(self, config: AdapterRequiredConfig, mp_context: SpawnContext) -> None: + def register_adapter( + self, + config: AdapterRequiredConfig, + mp_context: SpawnContext, + adapter_registered_log_level: Optional[EventLevel] = EventLevel.INFO, + ) -> None: adapter_name = config.credentials.type adapter_type = self.get_adapter_class_by_name(adapter_name) adapter_version = self._adapter_version(adapter_name) fire_event( - AdapterRegistered(adapter_name=adapter_name, adapter_version=adapter_version) + AdapterRegistered(adapter_name=adapter_name, adapter_version=adapter_version), + level=adapter_registered_log_level, ) with self.lock: if adapter_name in self.adapters: @@ -188,8 +195,12 @@ def get_adapter_constraint_support(self, name: Optional[str]) -> List[str]: FACTORY: AdapterContainer = AdapterContainer() -def register_adapter(config: AdapterRequiredConfig, mp_context: SpawnContext) -> None: - FACTORY.register_adapter(config, mp_context) +def register_adapter( + config: AdapterRequiredConfig, + mp_context: SpawnContext, + adapter_registered_log_level: Optional[EventLevel] = EventLevel.INFO, +) -> None: + FACTORY.register_adapter(config, mp_context, adapter_registered_log_level) def get_adapter(config: AdapterRequiredConfig): diff --git a/dbt/adapters/protocol.py b/dbt/adapters/protocol.py index f27394adb..352198663 100644 --- a/dbt/adapters/protocol.py +++ b/dbt/adapters/protocol.py @@ -10,10 +10,10 @@ Type, TypeVar, Tuple, + TYPE_CHECKING, ) from typing_extensions import Protocol -import agate from dbt_common.clients.jinja import MacroProtocol from dbt_common.contracts.config.base import BaseConfig @@ -25,6 +25,9 @@ from dbt.adapters.contracts.macros import MacroResolverProtocol from dbt.adapters.contracts.relation import HasQuoting, Policy, RelationConfig +if TYPE_CHECKING: + import agate + @dataclass class AdapterConfig(BaseConfig): @@ -44,8 +47,7 @@ class ColumnProtocol(Protocol): class RelationProtocol(Protocol): @classmethod - def get_default_quote_policy(cls) -> Policy: - ... + def get_default_quote_policy(cls) -> Policy: ... @classmethod def create_from( @@ -53,8 +55,7 @@ def create_from( quoting: HasQuoting, relation_config: RelationConfig, **kwargs: Any, - ) -> Self: - ... + ) -> Self: ... AdapterConfig_T = TypeVar("AdapterConfig_T", bound=AdapterConfig) @@ -70,8 +71,7 @@ def __call__( config: AdapterRequiredConfig, macro_resolver: MacroResolverProtocol, package_name: Optional[str], - ) -> Dict[str, Any]: - ... + ) -> Dict[str, Any]: ... # TODO CT-211 @@ -93,81 +93,58 @@ class AdapterProtocol( # type: ignore[misc] ConnectionManager: Type[ConnectionManager_T] connections: ConnectionManager_T - def __init__(self, config: AdapterRequiredConfig) -> None: - ... + def __init__(self, config: AdapterRequiredConfig) -> None: ... - def set_macro_resolver(self, macro_resolver: MacroResolverProtocol) -> None: - ... + def set_macro_resolver(self, macro_resolver: MacroResolverProtocol) -> None: ... - def get_macro_resolver(self) -> Optional[MacroResolverProtocol]: - ... + def get_macro_resolver(self) -> Optional[MacroResolverProtocol]: ... - def clear_macro_resolver(self) -> None: - ... + def clear_macro_resolver(self) -> None: ... def set_macro_context_generator( self, macro_context_generator: MacroContextGeneratorCallable, - ) -> None: - ... + ) -> None: ... @classmethod def type(cls) -> str: pass - def set_query_header(self, query_header_context: Dict[str, Any]) -> None: - ... + def set_query_header(self, query_header_context: Dict[str, Any]) -> None: ... @staticmethod - def get_thread_identifier() -> Hashable: - ... + def get_thread_identifier() -> Hashable: ... - def get_thread_connection(self) -> Connection: - ... + def get_thread_connection(self) -> Connection: ... - def set_thread_connection(self, conn: Connection) -> None: - ... + def set_thread_connection(self, conn: Connection) -> None: ... - def get_if_exists(self) -> Optional[Connection]: - ... + def get_if_exists(self) -> Optional[Connection]: ... - def clear_thread_connection(self) -> None: - ... + def clear_thread_connection(self) -> None: ... - def clear_transaction(self) -> None: - ... + def clear_transaction(self) -> None: ... - def exception_handler(self, sql: str) -> ContextManager: - ... + def exception_handler(self, sql: str) -> ContextManager: ... - def set_connection_name(self, name: Optional[str] = None) -> Connection: - ... + def set_connection_name(self, name: Optional[str] = None) -> Connection: ... - def cancel_open(self) -> Optional[List[str]]: - ... + def cancel_open(self) -> Optional[List[str]]: ... - def open(cls, connection: Connection) -> Connection: - ... + def open(cls, connection: Connection) -> Connection: ... - def release(self) -> None: - ... + def release(self) -> None: ... - def cleanup_all(self) -> None: - ... + def cleanup_all(self) -> None: ... - def begin(self) -> None: - ... + def begin(self) -> None: ... - def commit(self) -> None: - ... + def commit(self) -> None: ... - def close(cls, connection: Connection) -> Connection: - ... + def close(cls, connection: Connection) -> Connection: ... - def commit_if_has_connection(self) -> None: - ... + def commit_if_has_connection(self) -> None: ... def execute( self, sql: str, auto_begin: bool = False, fetch: bool = False - ) -> Tuple[AdapterResponse, agate.Table]: - ... + ) -> Tuple[AdapterResponse, "agate.Table"]: ... diff --git a/dbt/adapters/relation_configs/README.md b/dbt/adapters/relation_configs/README.md index 6be3bc59d..22d6bf78d 100644 --- a/dbt/adapters/relation_configs/README.md +++ b/dbt/adapters/relation_configs/README.md @@ -1,6 +1,6 @@ # RelationConfig This package serves as an initial abstraction for managing the inspection of existing relations and determining -changes on those relations. It arose from the materialized view work and is currently only supporting +changes on those relations. It arose from the materialized view work and is currently only supporting materialized views for Postgres and Redshift as well as dynamic tables for Snowflake. There are three main classes in this package. diff --git a/dbt/adapters/relation_configs/config_base.py b/dbt/adapters/relation_configs/config_base.py index e8131b675..62d140595 100644 --- a/dbt/adapters/relation_configs/config_base.py +++ b/dbt/adapters/relation_configs/config_base.py @@ -1,9 +1,11 @@ from dataclasses import dataclass -from typing import Dict, Union +from typing import Dict, Union, TYPE_CHECKING -import agate from dbt_common.utils import filter_null_values +if TYPE_CHECKING: + import agate + """ This is what relation metadata from the database looks like. It's a dictionary because there will be @@ -18,7 +20,7 @@ ]) } """ -RelationResults = Dict[str, Union[agate.Row, agate.Table]] +RelationResults = Dict[str, Union["agate.Row", "agate.Table"]] @dataclass(frozen=True) diff --git a/dbt/adapters/relation_configs/config_change.py b/dbt/adapters/relation_configs/config_change.py index 9d3c8e012..a776dc6bc 100644 --- a/dbt/adapters/relation_configs/config_change.py +++ b/dbt/adapters/relation_configs/config_change.py @@ -16,7 +16,9 @@ class RelationConfigChangeAction(StrEnum): @dataclass(frozen=True, eq=True, unsafe_hash=True) class RelationConfigChange(RelationConfigBase, ABC): action: RelationConfigChangeAction - context: Hashable # this is usually a RelationConfig, e.g. IndexConfig, but shouldn't be limited + context: ( + Hashable # this is usually a RelationConfig, e.g. IndexConfig, but shouldn't be limited + ) @property @abstractmethod diff --git a/dbt/adapters/sql/connections.py b/dbt/adapters/sql/connections.py index 6313bb70e..9f1914429 100644 --- a/dbt/adapters/sql/connections.py +++ b/dbt/adapters/sql/connections.py @@ -1,9 +1,7 @@ import abc import time -from typing import Any, Dict, Iterable, List, Optional, Tuple +from typing import Any, Dict, Iterable, List, Optional, Tuple, TYPE_CHECKING -import agate -from dbt_common.clients.agate_helper import empty_table, table_from_data_flat from dbt_common.events.contextvars import get_node_info from dbt_common.events.functions import fire_event from dbt_common.exceptions import DbtInternalError, NotImplementedError @@ -24,6 +22,10 @@ ) from dbt.record import QueryRecord +if TYPE_CHECKING: + import agate + + class SQLConnectionManager(BaseConnectionManager): """The default connection manager with some common SQL methods implemented. @@ -127,7 +129,9 @@ def process_results( return [dict(zip(column_names, row)) for row in rows] @classmethod - def get_result_from_cursor(cls, cursor: Any, limit: Optional[int]) -> agate.Table: + def get_result_from_cursor(cls, cursor: Any, limit: Optional[int]) -> "agate.Table": + from dbt_common.clients.agate_helper import table_from_data_flat + data: List[Any] = [] column_names: List[str] = [] @@ -148,7 +152,9 @@ def execute( auto_begin: bool = False, fetch: bool = False, limit: Optional[int] = None, - ) -> Tuple[AdapterResponse, agate.Table]: + ) -> Tuple[AdapterResponse, "agate.Table"]: + from dbt_common.clients.agate_helper import empty_table + sql = self._add_query_comment(sql) _, cursor = self.add_query(sql, auto_begin) response = self.get_response(cursor) diff --git a/dbt/adapters/sql/impl.py b/dbt/adapters/sql/impl.py index c3a75cc65..8c6e0e8e4 100644 --- a/dbt/adapters/sql/impl.py +++ b/dbt/adapters/sql/impl.py @@ -1,6 +1,5 @@ -from typing import Any, List, Optional, Tuple, Type +from typing import Any, List, Optional, Tuple, Type, TYPE_CHECKING -import agate from dbt_common.events.functions import fire_event from dbt.adapters.base import BaseAdapter, BaseRelation, available @@ -23,6 +22,9 @@ ALTER_COLUMN_TYPE_MACRO_NAME = "alter_column_type" VALIDATE_SQL_MACRO_NAME = "validate_sql" +if TYPE_CHECKING: + import agate + class SQLAdapter(BaseAdapter): """The default adapter with the common agate conversions and some SQL @@ -65,33 +67,35 @@ def add_query( return self.connections.add_query(sql, auto_begin, bindings, abridge_sql_log) @classmethod - def convert_text_type(cls, agate_table: agate.Table, col_idx: int) -> str: + def convert_text_type(cls, agate_table: "agate.Table", col_idx: int) -> str: return "text" @classmethod - def convert_number_type(cls, agate_table: agate.Table, col_idx: int) -> str: + def convert_number_type(cls, agate_table: "agate.Table", col_idx: int) -> str: + import agate + # TODO CT-211 decimals = agate_table.aggregate(agate.MaxPrecision(col_idx)) # type: ignore[attr-defined] return "float8" if decimals else "integer" @classmethod - def convert_integer_type(cls, agate_table: agate.Table, col_idx: int) -> str: + def convert_integer_type(cls, agate_table: "agate.Table", col_idx: int) -> str: return "integer" @classmethod - def convert_boolean_type(cls, agate_table: agate.Table, col_idx: int) -> str: + def convert_boolean_type(cls, agate_table: "agate.Table", col_idx: int) -> str: return "boolean" @classmethod - def convert_datetime_type(cls, agate_table: agate.Table, col_idx: int) -> str: + def convert_datetime_type(cls, agate_table: "agate.Table", col_idx: int) -> str: return "timestamp without time zone" @classmethod - def convert_date_type(cls, agate_table: agate.Table, col_idx: int) -> str: + def convert_date_type(cls, agate_table: "agate.Table", col_idx: int) -> str: return "date" @classmethod - def convert_time_type(cls, agate_table: agate.Table, col_idx: int) -> str: + def convert_time_type(cls, agate_table: "agate.Table", col_idx: int) -> str: return "time" @classmethod diff --git a/dbt/include/global_project/macros/adapters/columns.sql b/dbt/include/global_project/macros/adapters/columns.sql index e1099649c..663a827b1 100644 --- a/dbt/include/global_project/macros/adapters/columns.sql +++ b/dbt/include/global_project/macros/adapters/columns.sql @@ -53,7 +53,7 @@ {%- do col_naked_numeric.append(col['name']) -%} {%- endif -%} {% set col_name = adapter.quote(col['name']) if col.get('quote') else col['name'] %} - cast(null as {{ col['data_type'] }}) as {{ col_name }}{{ ", " if not loop.last }} + {{ cast('null', col['data_type']) }} as {{ col_name }}{{ ", " if not loop.last }} {%- endfor -%} {%- if (col_err | length) > 0 -%} {{ exceptions.column_type_missing(column_names=col_err) }} diff --git a/dbt/include/global_project/macros/materializations/tests/helpers.sql b/dbt/include/global_project/macros/materializations/tests/helpers.sql index ead727d97..a385d1eab 100644 --- a/dbt/include/global_project/macros/materializations/tests/helpers.sql +++ b/dbt/include/global_project/macros/materializations/tests/helpers.sql @@ -41,4 +41,4 @@ dbt_internal_unit_test_expected as ( select * from dbt_internal_unit_test_actual union all select * from dbt_internal_unit_test_expected -{%- endmacro %} \ No newline at end of file +{%- endmacro %} diff --git a/dbt/include/global_project/macros/materializations/tests/unit.sql b/dbt/include/global_project/macros/materializations/tests/unit.sql index 6d7b632ca..78c6f6bc3 100644 --- a/dbt/include/global_project/macros/materializations/tests/unit.sql +++ b/dbt/include/global_project/macros/materializations/tests/unit.sql @@ -3,6 +3,7 @@ {% set relations = [] %} {% set expected_rows = config.get('expected_rows') %} + {% set expected_sql = config.get('expected_sql') %} {% set tested_expected_column_names = expected_rows[0].keys() if (expected_rows | length ) > 0 else get_columns_in_query(sql) %} %} {%- set target_relation = this.incorporate(type='table') -%} @@ -11,10 +12,13 @@ {%- set columns_in_relation = adapter.get_columns_in_relation(temp_relation) -%} {%- set column_name_to_data_types = {} -%} {%- for column in columns_in_relation -%} - {%- do column_name_to_data_types.update({column.name|lower: column.data_type}) -%} + {%- do column_name_to_data_types.update({column.name|lower: column.data_type}) -%} {%- endfor -%} - {% set unit_test_sql = get_unit_test_sql(sql, get_expected_sql(expected_rows, column_name_to_data_types), tested_expected_column_names) %} + {% if not expected_sql %} + {% set expected_sql = get_expected_sql(expected_rows, column_name_to_data_types) %} + {% endif %} + {% set unit_test_sql = get_unit_test_sql(sql, expected_sql, tested_expected_column_names) %} {% call statement('main', fetch_result=True) -%} diff --git a/dbt/include/global_project/macros/relations/materialized_view/drop.sql b/dbt/include/global_project/macros/relations/materialized_view/drop.sql index e60e1dc24..b218d0f3c 100644 --- a/dbt/include/global_project/macros/relations/materialized_view/drop.sql +++ b/dbt/include/global_project/macros/relations/materialized_view/drop.sql @@ -5,7 +5,7 @@ actually executes the drop, and `get_drop_sql`, which returns the template. */ #} {% macro drop_materialized_view(relation) -%} - {{ return(adapter.dispatch('drop_materialized_view', 'dbt')(relation)) }} + {{- adapter.dispatch('drop_materialized_view', 'dbt')(relation) -}} {%- endmacro %} diff --git a/dbt/include/global_project/macros/relations/table/drop.sql b/dbt/include/global_project/macros/relations/table/drop.sql index 359bab66d..d7d5941c4 100644 --- a/dbt/include/global_project/macros/relations/table/drop.sql +++ b/dbt/include/global_project/macros/relations/table/drop.sql @@ -5,7 +5,7 @@ actually executes the drop, and `get_drop_sql`, which returns the template. */ #} {% macro drop_table(relation) -%} - {{ return(adapter.dispatch('drop_table', 'dbt')(relation)) }} + {{- adapter.dispatch('drop_table', 'dbt')(relation) -}} {%- endmacro %} diff --git a/dbt/include/global_project/macros/relations/view/drop.sql b/dbt/include/global_project/macros/relations/view/drop.sql index c905f8da0..7e1924fae 100644 --- a/dbt/include/global_project/macros/relations/view/drop.sql +++ b/dbt/include/global_project/macros/relations/view/drop.sql @@ -5,7 +5,7 @@ actually executes the drop, and `get_drop_sql`, which returns the template. */ #} {% macro drop_view(relation) -%} - {{ return(adapter.dispatch('drop_view', 'dbt')(relation)) }} + {{- adapter.dispatch('drop_view', 'dbt')(relation) -}} {%- endmacro %} diff --git a/dbt/include/global_project/macros/unit_test_sql/get_fixture_sql.sql b/dbt/include/global_project/macros/unit_test_sql/get_fixture_sql.sql index ca39a39cb..53d7a93b3 100644 --- a/dbt/include/global_project/macros/unit_test_sql/get_fixture_sql.sql +++ b/dbt/include/global_project/macros/unit_test_sql/get_fixture_sql.sql @@ -79,7 +79,7 @@ union all {%- endif -%} {%- set column_type = column_name_to_data_types[column_name] %} - + {#-- sanitize column_value: wrap yaml strings in quotes, apply cast --#} {%- set column_value_clean = column_value -%} {%- if column_value is string -%} diff --git a/dbt/tests/__about__.py b/dbt/tests/__about__.py deleted file mode 100644 index 759fdcd7a..000000000 --- a/dbt/tests/__about__.py +++ /dev/null @@ -1 +0,0 @@ -version = "1.8.0a2" diff --git a/pyproject.toml b/pyproject.toml index 1bc90a59d..a4b011a8f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -21,9 +21,10 @@ classifiers = [ "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", ] dependencies = [ - "dbt-common<1.0", + "dbt-common<2.0", "pytz>=2015.7", # installed via dbt-common but used directly "agate>=1.0,<2.0", @@ -54,35 +55,16 @@ include = ["dbt/adapters", "dbt/include", "dbt/__init__.py"] [tool.hatch.envs.default] dependencies = [ "dbt_common @ git+https://github.com/dbt-labs/dbt-common.git", + 'pre-commit==3.7.0;python_version>="3.9"', + 'pre-commit==3.5.0;python_version=="3.8"', ] - -[tool.hatch.envs.lint] -detached = true -dependencies = [ - "black", - "flake8", - "Flake8-pyproject", -] -[tool.hatch.envs.lint.scripts] -all = [ - "- black-only", - "- flake8-only", -] -black-only = "python -m black ." -flake8-only = "python -m flake8 ." - -[tool.hatch.envs.typecheck] -dependencies = [ - "mypy", - "types-PyYAML", - "types-protobuf", - "types-pytz", -] -[tool.hatch.envs.typecheck.scripts] -all = "python -m mypy ." +[tool.hatch.envs.default.scripts] +dev = "pre-commit install" +code-quality = "pre-commit run --all-files" [tool.hatch.envs.unit-tests] dependencies = [ + "dbt_common @ git+https://github.com/dbt-labs/dbt-common.git", "pytest", "pytest-dotenv", "pytest-xdist", @@ -114,36 +96,8 @@ check-sdist = [ ] protobuf = "protoc -I=./dbt/adapters/events --python_out=./dbt/adapters/events ./dbt/adapters/events/adapter_types.proto" -[tool.black] -extend-exclude = "dbt/adapters/events/adapter_types_pb2.py" -line-length = 99 -target-version = ['py38'] - -[tool.flake8] -select = ["E", "W", "F"] -ignore = ["E203", "E501", "E741", "W503", "W504"] -exclude = [ - "dbt/adapters/events/adapter_types_pb2.py", - "tests/functional", - "venv", -] -per-file-ignores = ["*/__init__.py: F401"] - [tool.mypy] -namespace_packages = true -show_error_codes = true -explicit_package_bases = true -ignore_missing_imports = true -pretty = true mypy_path = "third-party-stubs/" -files = [ - "dbt", - "tests/unit", -] -exclude = [ - "dbt/adapters/events/adapter_types_pb2.py", - "venv", -] [[tool.mypy.overrides]] module = ["dbt.adapters.events.adapter_types_pb2"] follow_imports = "skip" diff --git a/tests/unit/test_events.py b/tests/unit/test_events.py index bda7c6f41..22e71896e 100644 --- a/tests/unit/test_events.py +++ b/tests/unit/test_events.py @@ -153,6 +153,7 @@ def test_event_codes(self): types.FinishedRunningStats(stat_line="", execution="", execution_time=0), types.ConstraintNotEnforced(constraint="", adapter=""), types.ConstraintNotSupported(constraint="", adapter=""), + types.TypeCodeNotFound(type_code=0), ] diff --git a/tests/unit/test_relation.py b/tests/unit/test_relation.py index aa9cda258..a1c01c5c1 100644 --- a/tests/unit/test_relation.py +++ b/tests/unit/test_relation.py @@ -43,25 +43,38 @@ def test_can_be_replaced_default(): @pytest.mark.parametrize( - "limit,expected_result", + "limit,require_alias,expected_result", [ - (None, '"test_database"."test_schema"."test_identifier"'), + (None, False, '"test_database"."test_schema"."test_identifier"'), ( 0, - '(select * from "test_database"."test_schema"."test_identifier" where false limit 0) _dbt_limit_subq', + True, + '(select * from "test_database"."test_schema"."test_identifier" where false limit 0) _dbt_limit_subq_test_identifier', ), ( 1, - '(select * from "test_database"."test_schema"."test_identifier" limit 1) _dbt_limit_subq', + True, + '(select * from "test_database"."test_schema"."test_identifier" limit 1) _dbt_limit_subq_test_identifier', + ), + ( + 0, + False, + '(select * from "test_database"."test_schema"."test_identifier" where false limit 0)', + ), + ( + 1, + False, + '(select * from "test_database"."test_schema"."test_identifier" limit 1)', ), ], ) -def test_render_limited(limit, expected_result): +def test_render_limited(limit, require_alias, expected_result): my_relation = BaseRelation.create( database="test_database", schema="test_schema", identifier="test_identifier", limit=limit, + require_alias=require_alias, ) actual_result = my_relation.render_limited() assert actual_result == expected_result