diff --git a/.changes/0.1.0/Docs-20240109-131629.yaml b/.changes/0.1.0/Docs-20240109-131629.yaml
deleted file mode 100644
index 22b2ad3f..00000000
--- a/.changes/0.1.0/Docs-20240109-131629.yaml
+++ /dev/null
@@ -1,6 +0,0 @@
-kind: Docs
-body: Configure `changie`
-time: 2024-01-09T13:16:29.763021-05:00
-custom:
- Author: mikealfare
- Issue: 16
diff --git a/.changes/0.1.0/Docs-20240109-131736.yaml b/.changes/0.1.0/Docs-20240109-131736.yaml
deleted file mode 100644
index 43186903..00000000
--- a/.changes/0.1.0/Docs-20240109-131736.yaml
+++ /dev/null
@@ -1,6 +0,0 @@
-kind: Docs
-body: Setup ADR tracking framework
-time: 2024-01-09T13:17:36.094147-05:00
-custom:
- Author: mikealfare
- Issue: "11"
diff --git a/.changes/0.1.0/Docs-20240109-131858.yaml b/.changes/0.1.0/Docs-20240109-131858.yaml
deleted file mode 100644
index decef9a7..00000000
--- a/.changes/0.1.0/Docs-20240109-131858.yaml
+++ /dev/null
@@ -1,6 +0,0 @@
-kind: Docs
-body: Create issue templates
-time: 2024-01-09T13:18:58.11819-05:00
-custom:
- Author: mikealfare
- Issue: "12"
diff --git a/.changes/0.1.0/Docs-20240109-131917.yaml b/.changes/0.1.0/Docs-20240109-131917.yaml
deleted file mode 100644
index 3c531060..00000000
--- a/.changes/0.1.0/Docs-20240109-131917.yaml
+++ /dev/null
@@ -1,6 +0,0 @@
-kind: Docs
-body: Create PR template
-time: 2024-01-09T13:19:17.749914-05:00
-custom:
- Author: mikealfare
- Issue: "13"
diff --git a/.changes/0.1.0/Features-20240212-123544.yaml b/.changes/0.1.0/Features-20240212-123544.yaml
deleted file mode 100644
index 239ad59f..00000000
--- a/.changes/0.1.0/Features-20240212-123544.yaml
+++ /dev/null
@@ -1,6 +0,0 @@
-kind: Features
-body: Update RelationConfig to capture all fields used by adapters
-time: 2024-02-12T12:35:44.653555-08:00
-custom:
- Author: colin-rogers-dbt
- Issue: "30"
diff --git a/.changes/0.1.0/Fixes-20240215-141545.yaml b/.changes/0.1.0/Fixes-20240215-141545.yaml
deleted file mode 100644
index ced62f25..00000000
--- a/.changes/0.1.0/Fixes-20240215-141545.yaml
+++ /dev/null
@@ -1,6 +0,0 @@
-kind: Fixes
-body: Ignore adapter-level support warnings for 'custom' constraints
-time: 2024-02-15T14:15:45.764145+01:00
-custom:
- Author: jtcohen6
- Issue: "90"
diff --git a/.changes/0.1.0/Fixes-20240216-135420.yaml b/.changes/0.1.0/Fixes-20240216-135420.yaml
deleted file mode 100644
index a04cd26b..00000000
--- a/.changes/0.1.0/Fixes-20240216-135420.yaml
+++ /dev/null
@@ -1,6 +0,0 @@
-kind: Fixes
-body: Make all adapter zone tests importable by removing "Test" prefix
-time: 2024-02-16T13:54:20.411864-05:00
-custom:
- Author: mikealfare
- Issue: "93"
diff --git a/.changes/0.1.0/Under the Hood-20240109-131958.yaml b/.changes/0.1.0/Under the Hood-20240109-131958.yaml
deleted file mode 100644
index a062a299..00000000
--- a/.changes/0.1.0/Under the Hood-20240109-131958.yaml
+++ /dev/null
@@ -1,6 +0,0 @@
-kind: Under the Hood
-body: Configure `dependabot`
-time: 2024-01-09T13:19:58.060742-05:00
-custom:
- Author: mikealfare
- Issue: "14"
diff --git a/.changes/0.1.0/Under the Hood-20240112-230236.yaml b/.changes/0.1.0/Under the Hood-20240112-230236.yaml
deleted file mode 100644
index 1470ac6e..00000000
--- a/.changes/0.1.0/Under the Hood-20240112-230236.yaml
+++ /dev/null
@@ -1,6 +0,0 @@
-kind: Under the Hood
-body: Implement unit testing in CI
-time: 2024-01-12T23:02:36.630106-05:00
-custom:
- Author: mikealfare
- Issue: "10"
diff --git a/.changes/0.1.0/Under the Hood-20240123-121220.yaml b/.changes/0.1.0/Under the Hood-20240123-121220.yaml
deleted file mode 100644
index 8d01f256..00000000
--- a/.changes/0.1.0/Under the Hood-20240123-121220.yaml
+++ /dev/null
@@ -1,6 +0,0 @@
-kind: Under the Hood
-body: Allow version to be specified in either __version__.py or __about__.py
-time: 2024-01-23T12:12:20.529147-05:00
-custom:
- Author: mikealfare
- Issue: "44"
diff --git a/.changes/0.1.0/Under the Hood-20240220-164223.yaml b/.changes/0.1.0/Under the Hood-20240220-164223.yaml
deleted file mode 100644
index eefa441e..00000000
--- a/.changes/0.1.0/Under the Hood-20240220-164223.yaml
+++ /dev/null
@@ -1,6 +0,0 @@
-kind: Under the Hood
-body: Remove __init__.py file from dbt.tests
-time: 2024-02-20T16:42:23.706-05:00
-custom:
- Author: gshank
- Issue: "96"
diff --git a/.changes/1.0.0.md b/.changes/1.0.0.md
index b6cc44a9..c46c8148 100644
--- a/.changes/1.0.0.md
+++ b/.changes/1.0.0.md
@@ -1,15 +1,32 @@
-## dbt-adapter 1.0.0 - April 01, 2024
+## dbt-adapters 1.0.0 - April 01, 2024
+
+### Features
+
+* Update RelationConfig to capture all fields used by adapters ([#30](https://github.com/dbt-labs/dbt-adapters/issues/30))
### Fixes
-* Add field wrapper to BaseRelation members that were missing it.
-* Add "description" and "meta" fields to RelationConfig protocol
+* Add field wrapper to BaseRelation members that were missing it. ([#108](https://github.com/dbt-labs/dbt-adapters/issues/108))
+* Add "description" and "meta" fields to RelationConfig protocol ([#119](https://github.com/dbt-labs/dbt-adapters/issues/119))
+* Ignore adapter-level support warnings for 'custom' constraints ([#90](https://github.com/dbt-labs/dbt-adapters/issues/90))
+* Make all adapter zone tests importable by removing "Test" prefix ([#93](https://github.com/dbt-labs/dbt-adapters/issues/93))
+
+### Docs
+
+* Configure `changie` ([#16](https://github.com/dbt-labs/dbt-adapters/issues/16))
+* Setup ADR tracking framework ([#11](https://github.com/dbt-labs/dbt-adapters/issues/11))
+* Create issue templates ([#12](https://github.com/dbt-labs/dbt-adapters/issues/12))
+* Create PR template ([#13](https://github.com/dbt-labs/dbt-adapters/issues/13))
### Under the Hood
-* Lazy load agate to improve dbt-core performance
-* add BaseAdapater.MAX_SCHEMA_METADATA_RELATIONS
+* Lazy load agate to improve dbt-core performance ([#125](https://github.com/dbt-labs/dbt-adapters/issues/125))
+* add BaseAdapater.MAX_SCHEMA_METADATA_RELATIONS ([#131](https://github.com/dbt-labs/dbt-adapters/issues/131))
+* Configure `dependabot` ([#14](https://github.com/dbt-labs/dbt-adapters/issues/14))
+* Implement unit testing in CI ([#22](https://github.com/dbt-labs/dbt-adapters/issues/22))
+* Allow version to be specified in either __version__.py or __about__.py ([#44](https://github.com/dbt-labs/dbt-adapters/issues/44))
+* Remove __init__.py file from dbt.tests ([#96](https://github.com/dbt-labs/dbt-adapters/issues/96))
### Security
-* Pin `black>=24.3` in `pyproject.toml`
+* Pin `black>=24.3` in `pyproject.toml` ([#140](https://github.com/dbt-labs/dbt-adapters/issues/140))
diff --git a/.changes/1.1.0.md b/.changes/1.1.0.md
new file mode 100644
index 00000000..224d8e85
--- /dev/null
+++ b/.changes/1.1.0.md
@@ -0,0 +1,29 @@
+## dbt-adapters 1.1.0 - May 01, 2024
+
+### Features
+
+* Debug log when `type_code` fails to convert to a `data_type` ([#135](https://github.com/dbt-labs/dbt-adapters/issues/135))
+* Introduce TableLastModifiedMetadataBatch and implement BaseAdapter.calculate_freshness_from_metadata_batch ([#127](https://github.com/dbt-labs/dbt-adapters/issues/127))
+* Support for sql fixtures in unit testing ([#146](https://github.com/dbt-labs/dbt-adapters/issues/146))
+* Cross-database `cast` macro ([#173](https://github.com/dbt-labs/dbt-adapters/issues/173))
+* Allow adapters to opt out of aliasing the subquery generated by render_limited ([#179](https://github.com/dbt-labs/dbt-adapters/issues/179))
+* subquery alias generated by render_limited now includes the relation name to mitigate duplicate aliasing ([#179](https://github.com/dbt-labs/dbt-adapters/issues/179))
+
+### Fixes
+
+* Fix adapter-specific cast handling for constraint enforcement ([#165](https://github.com/dbt-labs/dbt-adapters/issues/165))
+
+### Docs
+
+* Use `dbt-adapters` throughout the contributing guide ([#137](https://github.com/dbt-labs/dbt-adapters/issues/137))
+
+### Under the Hood
+
+* Add the option to set the log level of the AdapterRegistered event ([#141](https://github.com/dbt-labs/dbt-adapters/issues/141))
+* Update dependabot config to cover GHA ([#161](https://github.com/dbt-labs/dbt-adapters/issues/161))
+* Validate that dbt-core and dbt-adapters remain de-coupled ([#174](https://github.com/dbt-labs/dbt-adapters/issues/174))
+* remove dbt_version from query comment test fixture ([#184](https://github.com/dbt-labs/dbt-adapters/issues/184))
+
+### Dependencies
+
+* add support for py3.12 ([#185](https://github.com/dbt-labs/dbt-adapters/issues/185))
diff --git a/.changes/1.1.1.md b/.changes/1.1.1.md
new file mode 100644
index 00000000..9e590f94
--- /dev/null
+++ b/.changes/1.1.1.md
@@ -0,0 +1,5 @@
+## dbt-adapters 1.1.1 - May 07, 2024
+
+### Features
+
+* Enable serialization contexts ([#197](https://github.com/dbt-labs/dbt-adapters/issues/197))
diff --git a/.changes/1.2.1.md b/.changes/1.2.1.md
new file mode 100644
index 00000000..e554b90b
--- /dev/null
+++ b/.changes/1.2.1.md
@@ -0,0 +1,15 @@
+## dbt-adapters 1.2.1 - May 21, 2024
+
+### Features
+
+* Improvement of the compile error message in the get_fixture-sql.sql when the relation or the model not exist ([#203](https://github.com/dbt-labs/dbt-adapters/issues/203))
+* Cross-database `date` macro ([#191](https://github.com/dbt-labs/dbt-adapters/issues/191))
+
+### Fixes
+
+* Update Clone test to reflect core change removing `deferred` attribute from nodes ([#194](https://github.com/dbt-labs/dbt-adapters/issues/194))
+
+### Under the Hood
+
+* Add query recording for adapters which use SQLConnectionManager ([#195](https://github.com/dbt-labs/dbt-adapters/issues/195))
+* Improve memory efficiency of process_results() ([#217](https://github.com/dbt-labs/dbt-adapters/issues/217))
diff --git a/.changes/1.3.0.md b/.changes/1.3.0.md
new file mode 100644
index 00000000..6a23c3ba
--- /dev/null
+++ b/.changes/1.3.0.md
@@ -0,0 +1,5 @@
+## dbt-adapters 1.3.0 - June 18, 2024
+
+### Features
+
+* Add get_catalog_for_single_relation macro and capability to enable adapters to optimize catalog generation ([#231](https://github.com/dbt-labs/dbt-adapters/issues/231))
diff --git a/.changes/1.3.1.md b/.changes/1.3.1.md
new file mode 100644
index 00000000..b8ec7374
--- /dev/null
+++ b/.changes/1.3.1.md
@@ -0,0 +1 @@
+## dbt-adapters 1.3.1 - June 20, 2024
diff --git a/.changes/1.3.2.md b/.changes/1.3.2.md
new file mode 100644
index 00000000..6963a4c3
--- /dev/null
+++ b/.changes/1.3.2.md
@@ -0,0 +1,6 @@
+## dbt-adapters 1.3.2 - July 02, 2024
+
+### Under the Hood
+
+* Fix query timer resolution ([#246](https://github.com/dbt-labs/dbt-adapters/issues/246))
+* Add optional release_connection parameter to connection_named method ([#247](https://github.com/dbt-labs/dbt-adapters/issues/247))
diff --git a/.changes/1.3.3.md b/.changes/1.3.3.md
new file mode 100644
index 00000000..c62a0562
--- /dev/null
+++ b/.changes/1.3.3.md
@@ -0,0 +1,9 @@
+## dbt-adapters 1.3.3 - July 09, 2024
+
+### Fixes
+
+* Fix scenario where using the `--empty` flag causes metadata queries to contain limit clauses ([#213](https://github.com/dbt-labs/dbt-adapters/issues/213))
+
+### Under the Hood
+
+* --limit flag no longer subshells the query. This resolves the dbt Cloud experience issue where limit prevents ordering elements.. ([#207](https://github.com/dbt-labs/dbt-adapters/issues/207))
diff --git a/.changes/1.4.0.md b/.changes/1.4.0.md
new file mode 100644
index 00000000..fc6279db
--- /dev/null
+++ b/.changes/1.4.0.md
@@ -0,0 +1,13 @@
+## dbt-adapters 1.4.0 - July 30, 2024
+
+### Features
+
+- render 'to' and 'to_columns' fields on foreign key constraints, and bump dbt-common lower bound to 1.6 ([#271](https://github.com/dbt-labs/dbt-adapters/issues/271))
+
+### Fixes
+
+- Incremental table varchar column definition changed ([#276](https://github.com/dbt-labs/dbt-adapters/issues/276))
+
+### Under the Hood
+
+- Rework record/replay to record at the database connection level. ([#244](https://github.com/dbt-labs/dbt-adapters/issues/244))
diff --git a/.changes/unreleased/Fixes-20240610-195300.yaml b/.changes/unreleased/Fixes-20240610-195300.yaml
new file mode 100644
index 00000000..1f8cd5a5
--- /dev/null
+++ b/.changes/unreleased/Fixes-20240610-195300.yaml
@@ -0,0 +1,6 @@
+kind: Fixes
+body: Use model alias for the CTE identifier generated during ephemeral materialization
+time: 2024-06-10T19:53:00.086488231Z
+custom:
+ Author: jeancochrane
+ Issue: "5273"
diff --git a/.changes/unreleased/Under the Hood-20240329-093307.yaml b/.changes/unreleased/Under the Hood-20240329-093307.yaml
deleted file mode 100644
index 85a00a09..00000000
--- a/.changes/unreleased/Under the Hood-20240329-093307.yaml
+++ /dev/null
@@ -1,6 +0,0 @@
-kind: Under the Hood
-body: Add the option to set the log level of the AdapterRegistered event
-time: 2024-03-29T09:33:07.737464-05:00
-custom:
- Author: emmyoop
- Issue: "141"
diff --git a/.changes/unreleased/Under the Hood-20240801-220551.yaml b/.changes/unreleased/Under the Hood-20240801-220551.yaml
new file mode 100644
index 00000000..25b54a65
--- /dev/null
+++ b/.changes/unreleased/Under the Hood-20240801-220551.yaml
@@ -0,0 +1,6 @@
+kind: Under the Hood
+body: Updating changie.yaml to add contributors and PR links
+time: 2024-08-01T22:05:51.327652-04:00
+custom:
+ Author: leahwicz
+ Issue: "219"
diff --git a/.changie.yaml b/.changie.yaml
index 9f78b81e..8f1d8615 100644
--- a/.changie.yaml
+++ b/.changie.yaml
@@ -1,20 +1,65 @@
changesDir: .changes
unreleasedDir: unreleased
headerPath: header.tpl.md
+versionHeaderPath: ""
changelogPath: CHANGELOG.md
versionExt: md
-envPrefix: CHANGIE_
-versionFormat: '## dbt-adapter {{.Version}} - {{.Time.Format "January 02, 2006"}}'
+envPrefix: "CHANGIE_"
+versionFormat: '## dbt-adapters {{.Version}} - {{.Time.Format "January 02, 2006"}}'
kindFormat: '### {{.Kind}}'
-changeFormat: '* {{.Body}}'
+changeFormat: |-
+ {{- $IssueList := list }}
+ {{- $changes := splitList " " $.Custom.Issue }}
+ {{- range $issueNbr := $changes }}
+ {{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-adapters/issues/nbr)" | replace "nbr" $issueNbr }}
+ {{- $IssueList = append $IssueList $changeLink }}
+ {{- end -}}
+ - {{.Body}} ({{ range $index, $element := $IssueList }}{{if $index}}, {{end}}{{$element}}{{end}})
+
kinds:
- - label: Breaking Changes
- - label: Features
- - label: Fixes
- - label: Docs
- - label: Under the Hood
- - label: Dependencies
- - label: Security
+- label: Breaking Changes
+- label: Features
+- label: Fixes
+- label: Under the Hood
+- label: Dependencies
+ changeFormat: |-
+ {{- $PRList := list }}
+ {{- $changes := splitList " " $.Custom.PR }}
+ {{- range $pullrequest := $changes }}
+ {{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-adapters/pull/nbr)" | replace "nbr" $pullrequest }}
+ {{- $PRList = append $PRList $changeLink }}
+ {{- end -}}
+ - {{.Body}} ({{ range $index, $element := $PRList }}{{if $index}}, {{end}}{{$element}}{{end}})
+ skipGlobalChoices: true
+ additionalChoices:
+ - key: Author
+ label: GitHub Username(s) (separated by a single space if multiple)
+ type: string
+ minLength: 3
+ - key: PR
+ label: GitHub Pull Request Number (separated by a single space if multiple)
+ type: string
+ minLength: 1
+- label: Security
+ changeFormat: |-
+ {{- $PRList := list }}
+ {{- $changes := splitList " " $.Custom.PR }}
+ {{- range $pullrequest := $changes }}
+ {{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-adapters/pull/nbr)" | replace "nbr" $pullrequest }}
+ {{- $PRList = append $PRList $changeLink }}
+ {{- end -}}
+ - {{.Body}} ({{ range $index, $element := $PRList }}{{if $index}}, {{end}}{{$element}}{{end}})
+ skipGlobalChoices: true
+ additionalChoices:
+ - key: Author
+ label: GitHub Username(s) (separated by a single space if multiple)
+ type: string
+ minLength: 3
+ - key: PR
+ label: GitHub Pull Request Number (separated by a single space if multiple)
+ type: string
+ minLength: 1
+
newlines:
afterChangelogHeader: 1
afterKind: 1
@@ -31,3 +76,57 @@ custom:
label: GitHub Issue Number (separated by a single space if multiple)
type: string
minLength: 1
+
+
+footerFormat: |
+ {{- $contributorDict := dict }}
+ {{- /* ensure all names in this list are all lowercase for later matching purposes */}}
+ {{- $core_team := splitList " " .Env.CORE_TEAM }}
+ {{- /* ensure we always skip snyk and dependabot in addition to the core team */}}
+ {{- $maintainers := list "dependabot[bot]" "snyk-bot"}}
+ {{- range $team_member := $core_team }}
+ {{- $team_member_lower := lower $team_member }}
+ {{- $maintainers = append $maintainers $team_member_lower }}
+ {{- end }}
+ {{- range $change := .Changes }}
+ {{- $authorList := splitList " " $change.Custom.Author }}
+ {{- /* loop through all authors for a single changelog */}}
+ {{- range $author := $authorList }}
+ {{- $authorLower := lower $author }}
+ {{- /* we only want to include non-core team contributors */}}
+ {{- if not (has $authorLower $maintainers)}}
+ {{- $changeList := splitList " " $change.Custom.Author }}
+ {{- $IssueList := list }}
+ {{- $changeLink := $change.Kind }}
+ {{- if or (eq $change.Kind "Dependencies") (eq $change.Kind "Security") }}
+ {{- $changes := splitList " " $change.Custom.PR }}
+ {{- range $issueNbr := $changes }}
+ {{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-adapters/pull/nbr)" | replace "nbr" $issueNbr }}
+ {{- $IssueList = append $IssueList $changeLink }}
+ {{- end -}}
+ {{- else }}
+ {{- $changes := splitList " " $change.Custom.Issue }}
+ {{- range $issueNbr := $changes }}
+ {{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-adapters/issues/nbr)" | replace "nbr" $issueNbr }}
+ {{- $IssueList = append $IssueList $changeLink }}
+ {{- end -}}
+ {{- end }}
+ {{- /* check if this contributor has other changes associated with them already */}}
+ {{- if hasKey $contributorDict $author }}
+ {{- $contributionList := get $contributorDict $author }}
+ {{- $contributionList = concat $contributionList $IssueList }}
+ {{- $contributorDict := set $contributorDict $author $contributionList }}
+ {{- else }}
+ {{- $contributionList := $IssueList }}
+ {{- $contributorDict := set $contributorDict $author $contributionList }}
+ {{- end }}
+ {{- end}}
+ {{- end}}
+ {{- end }}
+ {{- /* no indentation here for formatting so the final markdown doesn't have unneeded indentations */}}
+ {{- if $contributorDict}}
+ ### Contributors
+ {{- range $k,$v := $contributorDict }}
+ - [@{{$k}}](https://github.com/{{$k}}) ({{ range $index, $element := $v }}{{if $index}}, {{end}}{{$element}}{{end}})
+ {{- end }}
+ {{- end }}
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
new file mode 100644
index 00000000..02ed72d4
--- /dev/null
+++ b/.github/CODEOWNERS
@@ -0,0 +1,3 @@
+# This codeowners file is used to ensure all PRs require reviews from the adapters team
+
+* @dbt-labs/adapters
diff --git a/.github/ISSUE_TEMPLATE/bug-report.yml b/.github/ISSUE_TEMPLATE/bug-report.yml
index 3bd90bf7..27cb521e 100644
--- a/.github/ISSUE_TEMPLATE/bug-report.yml
+++ b/.github/ISSUE_TEMPLATE/bug-report.yml
@@ -1,5 +1,5 @@
name: 🐞 Bug
-description: Report a bug or an issue you've found with dbt-adapter
+description: Report a bug or an issue you've found with dbt-adapters
title: "[Bug]
"
labels: ["bug", "triage"]
body:
@@ -62,11 +62,11 @@ body:
examples:
- **OS**: Ubuntu 20.04
- **Python**: 3.11.6 (`python3 --version`)
- - **dbt-adapter**: 1.0.0
+ - **dbt-adapters**: 1.0.0
value: |
- OS:
- Python:
- - dbt-adapter:
+ - dbt-adapters:
render: markdown
validations:
required: false
diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml
index 2e23e0fd..a89889af 100644
--- a/.github/ISSUE_TEMPLATE/config.yml
+++ b/.github/ISSUE_TEMPLATE/config.yml
@@ -7,5 +7,5 @@ contact_links:
url: mailto:support@getdbt.com
about: Are you using dbt Cloud? Contact our support team for help!
- name: Participate in Discussions
- url: https://github.com/dbt-labs/dbt-adapter/discussions
- about: Do you have a Big Idea for dbt-adapter? Read open discussions, or start a new one
+ url: https://github.com/dbt-labs/dbt-adapters/discussions
+ about: Do you have a Big Idea for dbt-adapters? Read open discussions, or start a new one
diff --git a/.github/ISSUE_TEMPLATE/feature-request.yml b/.github/ISSUE_TEMPLATE/feature-request.yml
index 25b28aae..22960c2d 100644
--- a/.github/ISSUE_TEMPLATE/feature-request.yml
+++ b/.github/ISSUE_TEMPLATE/feature-request.yml
@@ -1,5 +1,5 @@
name: ✨ Feature
-description: Propose a straightforward extension of dbt-adapter functionality
+description: Propose a straightforward extension of dbt-adapters functionality
title: "[Feature] "
labels: ["enhancement", "triage"]
body:
@@ -14,15 +14,15 @@ body:
We want to make sure that features are distinct and discoverable,
so that other members of the community can find them and offer their thoughts.
- Issues are the right place to request straightforward extensions of existing dbt-adapter functionality.
- For "big ideas" about future capabilities of dbt-adapter, we ask that you open a
- [discussion](https://github.com/dbt-labs/dbt-adapter/discussions/new?category=ideas) in the "Ideas" category instead.
+ Issues are the right place to request straightforward extensions of existing dbt-adapters functionality.
+ For "big ideas" about future capabilities of dbt-adapters, we ask that you open a
+ [discussion](https://github.com/dbt-labs/dbt-adapters/discussions/new?category=ideas) in the "Ideas" category instead.
options:
- label: I have read the [expectations for open source contributors](https://docs.getdbt.com/docs/contributing/oss-expectations)
required: true
- label: I have searched the existing issues, and I could not find an existing issue for this feature
required: true
- - label: I am requesting a straightforward extension of existing dbt-adapter functionality, rather than a Big Idea better suited to a discussion
+ - label: I am requesting a straightforward extension of existing dbt-adapters functionality, rather than a Big Idea better suited to a discussion
required: true
- type: textarea
attributes:
diff --git a/.github/ISSUE_TEMPLATE/internal-epic.yml b/.github/ISSUE_TEMPLATE/internal-epic.yml
index 2f3bfaa7..8cfb3aef 100644
--- a/.github/ISSUE_TEMPLATE/internal-epic.yml
+++ b/.github/ISSUE_TEMPLATE/internal-epic.yml
@@ -30,7 +30,7 @@ body:
label: Objectives
description: |
What are the high level goals we are trying to achieve? Provide use cases if available.
-
+
Example:
- [ ] Allow adapter maintainers to support custom materializations
- [ ] Reduce maintenance burden for incremental users by offering materialized views
@@ -48,7 +48,7 @@ body:
Provide a list of GH issues that will build out this functionality.
This may start empty, or as a checklist of items.
However, it should eventually become a list of Feature Implementation tickets.
-
+
Example:
- [ ] Create new macro to select warehouse
- [ ] https://github.com/dbt-labs/dbt-adapters/issues/42
@@ -66,7 +66,7 @@ body:
Provide a list of relevant documentation. Is there a proof of concept?
Does this require and RFCs, ADRs, etc.?
If the documentation exists, link it; if it does not exist yet, reference it descriptively.
-
+
Example:
- [ ] RFC for updating connection interface to accept new parameters
- [ ] POC: https://github.com/dbt-labs/dbt-adapters/pull/42
diff --git a/.github/ISSUE_TEMPLATE/internal-feature-implementation.yml b/.github/ISSUE_TEMPLATE/internal-feature-implementation.yml
index ab3c4ffc..7a99365b 100644
--- a/.github/ISSUE_TEMPLATE/internal-feature-implementation.yml
+++ b/.github/ISSUE_TEMPLATE/internal-feature-implementation.yml
@@ -41,7 +41,7 @@ body:
label: Acceptance criteria
description: |
What is the definition of done for this feature? Include any relevant edge cases and/or test cases.
-
+
Example:
- [ ] If there are no config changes, don't alter the materialized view
- [ ] If the materialized view is scheduled to refresh, a manual refresh should not be issued
@@ -58,7 +58,7 @@ body:
description: |
Provide scenarios to test. Include both positive and negative tests if possible.
Link to existing similar tests if appropriate.
-
+
Example:
- [ ] Test with no `materialized` field in the model config. Expect pass.
- [ ] Test with a `materialized` field in the model config that is not valid. Expect ConfigError.
@@ -68,7 +68,7 @@ body:
```
validations:
required: true
-
+
- type: textarea
attributes:
label: Security
diff --git a/.github/ISSUE_TEMPLATE/regression-report.yml b/.github/ISSUE_TEMPLATE/regression-report.yml
index 01775507..6831ede2 100644
--- a/.github/ISSUE_TEMPLATE/regression-report.yml
+++ b/.github/ISSUE_TEMPLATE/regression-report.yml
@@ -1,5 +1,5 @@
name: ☣️ Regression
-description: Report a regression you've observed in a newer version of dbt-adapter
+description: Report a regression you've observed in a newer version of dbt-adapters
title: "[Regression] "
labels: ["regression", "triage"]
body:
@@ -57,13 +57,13 @@ body:
examples:
- **OS**: Ubuntu 20.04
- **Python**: 3.11.6 (`python3 --version`)
- - **dbt-adapter (working version)**: 1.1.0
- - **dbt-adapter (regression version)**: 1.2.0
+ - **dbt-adapters (working version)**: 1.1.0
+ - **dbt-adapters (regression version)**: 1.2.0
value: |
- OS:
- Python:
- - dbt-adapter (working version):
- - dbt-adapter (regression version):
+ - dbt-adapters (working version):
+ - dbt-adapters (regression version):
render: markdown
validations:
required: true
diff --git a/.github/actions/build-hatch/action.yml b/.github/actions/build-hatch/action.yml
index fe9825d4..6d81339a 100644
--- a/.github/actions/build-hatch/action.yml
+++ b/.github/actions/build-hatch/action.yml
@@ -13,7 +13,7 @@ inputs:
default: "./"
archive-name:
description: Where to upload the artifacts
- required: true
+ default: ""
runs:
using: composite
@@ -30,7 +30,8 @@ runs:
working-directory: ${{ inputs.working-dir }}
- name: Upload artifacts
- uses: actions/upload-artifact@v3
+ if: ${{ inputs.archive-name != '' }}
+ uses: actions/upload-artifact@v4
with:
name: ${{ inputs.archive-name }}
path: ${{ inputs.working-dir }}dist/
diff --git a/.github/actions/publish-pypi/action.yml b/.github/actions/publish-pypi/action.yml
index deffc6e3..25bc3a8d 100644
--- a/.github/actions/publish-pypi/action.yml
+++ b/.github/actions/publish-pypi/action.yml
@@ -14,7 +14,7 @@ runs:
steps:
- name: Download artifacts
- uses: actions/download-artifact@v3
+ uses: actions/download-artifact@v4
with:
name: ${{ inputs.archive-name }}
path: dist/
diff --git a/.github/actions/publish-results/action.yml b/.github/actions/publish-results/action.yml
index d863d659..0d5cb7e6 100644
--- a/.github/actions/publish-results/action.yml
+++ b/.github/actions/publish-results/action.yml
@@ -19,7 +19,7 @@ runs:
run: echo "ts=$(date +'%Y-%m-%dT%H-%M-%S')" >> $GITHUB_OUTPUT #no colons allowed for artifacts
shell: bash
- - uses: actions/upload-artifact@v3
+ - uses: actions/upload-artifact@v4
with:
name: ${{ inputs.file-name }}_python-${{ inputs.python-version }}_${{ steps.timestamp.outputs.ts }}.csv
path: ${{ inputs.source-file }}
diff --git a/.github/actions/setup-hatch/action.yml b/.github/actions/setup-hatch/action.yml
index 7b7780ef..6bf8ea10 100644
--- a/.github/actions/setup-hatch/action.yml
+++ b/.github/actions/setup-hatch/action.yml
@@ -13,10 +13,18 @@ runs:
using: composite
steps:
- name: Set up Python ${{ inputs.python-version }}
- uses: actions/setup-python@v4
+ uses: actions/setup-python@v5
with:
python-version: ${{ inputs.python-version }}
- name: Install dev dependencies
+ shell: bash
run: ${{ inputs.setup-command }}
+
+ - name: Add brew to the PATH
+ shell: bash
+ run: echo "/home/linuxbrew/.linuxbrew/bin:/home/linuxbrew/.linuxbrew/sbin" >> $GITHUB_PATH
+
+ - name: Install pre-commit
shell: bash
+ run: brew install pre-commit
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
index 2a6f3449..907926a3 100644
--- a/.github/dependabot.yml
+++ b/.github/dependabot.yml
@@ -1,8 +1,29 @@
version: 2
updates:
- # python dependencies
- package-ecosystem: "pip"
directory: "/"
schedule:
interval: "daily"
rebase-strategy: "disabled"
+ ignore:
+ - dependency-name: "*"
+ update-types:
+ - version-update:semver-patch
+ - package-ecosystem: "pip"
+ directory: "/dbt-tests-adapter"
+ schedule:
+ interval: "daily"
+ rebase-strategy: "disabled"
+ ignore:
+ - dependency-name: "*"
+ update-types:
+ - version-update:semver-patch
+ - package-ecosystem: "github-actions"
+ directory: "/"
+ schedule:
+ interval: "weekly"
+ rebase-strategy: "disabled"
+ ignore:
+ - dependency-name: "*"
+ update-types:
+ - version-update:semver-patch
diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md
index 4fc2fcf8..3879b653 100644
--- a/.github/pull_request_template.md
+++ b/.github/pull_request_template.md
@@ -29,7 +29,7 @@ resolves #
### Checklist
-- [ ] I have read [the contributing guide](https://github.com/dbt-labs/dbt-adapter/blob/main/CONTRIBUTING.md) and understand what's expected of me
+- [ ] I have read [the contributing guide](https://github.com/dbt-labs/dbt-adapters/blob/main/CONTRIBUTING.md) and understand what's expected of me
- [ ] I have run this code in development, and it appears to resolve the stated issue
- [ ] This PR includes tests, or tests are not required/relevant for this PR
- [ ] This PR has no interface changes (e.g. macros, cli, logs, json artifacts, config files, adapter interface, etc.) or this PR has already received feedback and approval from Product or DX
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index 33d94ff4..00afd704 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -20,11 +20,6 @@ on:
types: [checks_requested]
workflow_dispatch:
workflow_call:
- inputs:
- changelog_path:
- description: "Path to changelog file"
- required: true
- type: string
permissions: read-all
@@ -51,35 +46,9 @@ jobs:
uses: ./.github/actions/setup-hatch
- name: Build `dbt-adapters`
- if: ${{ inputs.package == 'dbt-adapters' }}
uses: ./.github/actions/build-hatch
- name: Build `dbt-tests-adapter`
- if: ${{ inputs.package == 'dbt-tests-adapter' }}
uses: ./.github/actions/build-hatch
with:
working-dir: "./dbt-tests-adapter/"
-
- - name: Setup `hatch`
- uses: ./.github/actions/setup-hatch
-
- - name: Build `dbt-adapters`
- if: ${{ inputs.package == 'dbt-adapters' }}
- uses: ./.github/actions/build-hatch
-
- - name: Build `dbt-tests-adapter`
- if: ${{ inputs.package == 'dbt-tests-adapter' }}
- uses: ./.github/actions/build-hatch
- with:
- working-dir: "./dbt-tests-adapter/"
-
- # this step is only needed for the release process
- - name: "Upload Build Artifact"
- if: ${{ github.event_name == 'workflow_call' }}
- uses: actions/upload-artifact@v3
- with:
- name: ${{ steps.version.outputs.version_number }}
- path: |
- ${{ inputs.changelog_path }}
- ./dist/
- retention-days: 3
diff --git a/.github/workflows/changelog-existence.yml b/.github/workflows/changelog-existence.yml
index d778f565..8732177f 100644
--- a/.github/workflows/changelog-existence.yml
+++ b/.github/workflows/changelog-existence.yml
@@ -19,9 +19,6 @@ name: Check Changelog Entry
on:
pull_request_target:
types: [opened, reopened, labeled, unlabeled, synchronize]
- paths-ignore: ['.changes/**', '.github/**', 'tests/**', 'third-party-stubs/**', '**.md', '**.yml']
-
- workflow_dispatch:
defaults:
run:
diff --git a/.github/workflows/code-quality.yml b/.github/workflows/code-quality.yml
index 4f5b392e..9c203847 100644
--- a/.github/workflows/code-quality.yml
+++ b/.github/workflows/code-quality.yml
@@ -10,17 +10,13 @@ on:
permissions: read-all
-defaults:
- run:
- shell: bash
-
# will cancel previous workflows triggered by the same event and for the same ref for PRs or same SHA otherwise
concurrency:
group: ${{ github.workflow }}-${{ github.event_name }}-${{ contains(github.event_name, 'pull_request') && github.event.pull_request.head.ref || github.sha }}
cancel-in-progress: true
jobs:
- lint:
+ code-quality:
name: Code Quality
runs-on: ubuntu-latest
@@ -33,8 +29,6 @@ jobs:
- name: Setup `hatch`
uses: ./.github/actions/setup-hatch
- - name: Run linters
- run: hatch run lint:all
-
- - name: Run typechecks
- run: hatch run typecheck:all
+ - name: Run code quality
+ shell: bash
+ run: hatch run code-quality
diff --git a/.github/workflows/docs-issue.yml b/.github/workflows/docs-issue.yml
new file mode 100644
index 00000000..f49cf517
--- /dev/null
+++ b/.github/workflows/docs-issue.yml
@@ -0,0 +1,41 @@
+# **what?**
+# Open an issue in docs.getdbt.com when an issue is labeled `user docs` and closed as completed
+
+# **why?**
+# To reduce barriers for keeping docs up to date
+
+# **when?**
+# When an issue is labeled `user docs` and is closed as completed. Can be labeled before or after the issue is closed.
+
+
+name: Open issues in docs.getdbt.com repo when an issue is labeled
+run-name: "Open an issue in docs.getdbt.com for issue #${{ github.event.issue.number }}"
+
+on:
+ issues:
+ types: [labeled, closed]
+
+defaults:
+ run:
+ shell: bash
+
+permissions:
+ issues: write # comments on issues
+
+jobs:
+ open_issues:
+ # we only want to run this when the issue is closed as completed and the label `user docs` has been assigned.
+ # If this logic does not exist in this workflow, it runs the
+ # risk of duplicaton of issues being created due to merge and label both triggering this workflow to run and neither having
+ # generating the comment before the other runs. This lives here instead of the shared workflow because this is where we
+ # decide if it should run or not.
+ if: |
+ (github.event.issue.state == 'closed' && github.event.issue.state_reason == 'completed') && (
+ (github.event.action == 'closed' && contains(github.event.issue.labels.*.name, 'user docs')) ||
+ (github.event.action == 'labeled' && github.event.label.name == 'user docs'))
+ uses: dbt-labs/actions/.github/workflows/open-issue-in-repo.yml@main
+ with:
+ issue_repository: "dbt-labs/docs.getdbt.com"
+ issue_title: "Docs Changes Needed from ${{ github.event.repository.name }} Issue #${{ github.event.issue.number }}"
+ issue_body: "At a minimum, update body to include a link to the page on docs.getdbt.com requiring updates and what part(s) of the page you would like to see updated."
+ secrets: inherit
diff --git a/.github/workflows/github-release.yml b/.github/workflows/github-release.yml
index fd20d9ab..ad0cc2d8 100644
--- a/.github/workflows/github-release.yml
+++ b/.github/workflows/github-release.yml
@@ -208,7 +208,7 @@ jobs:
ref: ${{ inputs.sha }}
- name: "Download Artifact ${{ inputs.archive_name }}"
- uses: actions/download-artifact@v3
+ uses: actions/download-artifact@v4
with:
name: ${{ inputs.archive_name }}
path: dist/
@@ -256,4 +256,4 @@ jobs:
RELEASE_NOTES: ${{ inputs.changelog_path }}
COMMIT: ${{ inputs.sha }}
PRERELEASE: ${{ steps.release_type.outputs.prerelease }}
- DRAFT: ${{ steps.draft.outputs.draft }}
\ No newline at end of file
+ DRAFT: ${{ steps.draft.outputs.draft }}
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index 1135adb8..828350dd 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -151,7 +151,7 @@ jobs:
github-release:
name: "GitHub Release"
- # ToDo: update GH release to handle adding dbt-tests-adapter and dbt-adapter assets to the same release
+ # ToDo: update GH release to handle adding dbt-tests-adapter and dbt-adapters assets to the same release
if: ${{ !failure() && !cancelled() && inputs.package == 'dbt-adapters' }}
needs: [release-inputs, build-and-test, bump-version-generate-changelog]
uses: dbt-labs/dbt-adapters/.github/workflows/github-release.yml@main
diff --git a/.github/workflows/release_prep_hatch.yml b/.github/workflows/release_prep_hatch.yml
index 32a267e0..a6105786 100644
--- a/.github/workflows/release_prep_hatch.yml
+++ b/.github/workflows/release_prep_hatch.yml
@@ -34,7 +34,7 @@
#
name: Version Bump and Changelog Generation
-run-name: Bump ${{ inputs.package }}==${{ inputs.version_number }} for release to ${{ inputs.deploy_to }} and generate changelog
+run-name: Bump to ${{ inputs.version_number }} for release to ${{ inputs.deploy_to }} and generate changelog
on:
workflow_call:
inputs:
@@ -131,7 +131,7 @@ jobs:
- name: "Audit Version And Parse Into Parts"
id: semver
- uses: dbt-labs/actions/parse-semver@v1.1.0
+ uses: dbt-labs/actions/parse-semver@v1.1.1
with:
version: ${{ inputs.version_number }}
@@ -288,7 +288,7 @@ jobs:
steps:
- name: "Checkout ${{ github.repository }} Branch ${{ needs.create-temp-branch.outputs.branch_name }}"
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
ref: ${{ needs.create-temp-branch.outputs.branch_name }}
- name: Setup `hatch`
@@ -392,13 +392,13 @@ jobs:
steps:
- name: "Checkout ${{ github.repository }} Branch ${{ needs.create-temp-branch.outputs.branch_name }}"
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
ref: ${{ needs.create-temp-branch.outputs.branch_name }}
- name: "Setup `hatch`"
uses: ./.github/actions/setup-hatch
- name: "Run Unit Tests"
- run: hatch run unit-tests:all
+ run: hatch run unit-tests
run-integration-tests:
runs-on: ubuntu-20.04
@@ -407,7 +407,7 @@ jobs:
steps:
- name: "Checkout ${{ github.repository }} Branch ${{ needs.create-temp-branch.outputs.branch_name }}"
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
ref: ${{ needs.create-temp-branch.outputs.branch_name }}
@@ -447,7 +447,7 @@ jobs:
python-version: ${{ env.PYTHON_TARGET_VERSION }}
- name: Run tests
- run: hatch run integration-tests:all
+ run: hatch run integration-tests
merge-changes-into-target-branch:
runs-on: ubuntu-latest
@@ -467,7 +467,7 @@ jobs:
echo needs.audit-changelog.outputs.exists: ${{ needs.audit-changelog.outputs.exists }}
echo needs.audit-version-in-code.outputs.up_to_date: ${{ needs.audit-version-in-code.outputs.up_to_date }}
- name: "Checkout Repo ${{ github.repository }}"
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: "Merge Changes Into ${{ inputs.target_branch }}"
uses: everlytic/branch-merge@1.1.5
@@ -524,7 +524,7 @@ jobs:
message="The ${{ steps.resolve_branch.outputs.target_branch }} branch will be used for release"
echo "::notice title=${{ env.NOTIFICATION_PREFIX }}: $title::$message"
- name: "Checkout Resolved Branch - ${{ steps.resolve_branch.outputs.target_branch }}"
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
ref: ${{ steps.resolve_branch.outputs.target_branch }}
@@ -539,4 +539,4 @@ jobs:
- name: "Remove Temp Branch - ${{ needs.create-temp-branch.outputs.branch_name }}"
if: ${{ inputs.deploy_to == 'prod' && inputs.nightly_release == 'false' && needs.create-temp-branch.outputs.branch_name != '' }}
run: |
- git push origin -d ${{ needs.create-temp-branch.outputs.branch_name }}
\ No newline at end of file
+ git push origin -d ${{ needs.create-temp-branch.outputs.branch_name }}
diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml
index 26ff4aaa..b61c83d7 100644
--- a/.github/workflows/unit-tests.yml
+++ b/.github/workflows/unit-tests.yml
@@ -23,7 +23,7 @@ jobs:
strategy:
fail-fast: false
matrix:
- python-version: ["3.8", "3.9", "3.10", "3.11"]
+ python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"]
steps:
- name: Check out repository
@@ -37,7 +37,7 @@ jobs:
python-version: ${{ matrix.python-version }}
- name: Run unit tests
- run: hatch run unit-tests:all
+ run: hatch run unit-tests
shell: bash
- name: Publish results
diff --git a/.gitignore b/.gitignore
index cf98fcf8..29c470c5 100644
--- a/.gitignore
+++ b/.gitignore
@@ -153,4 +153,14 @@ dmypy.json
cython_debug/
# PyCharm
-.idea/
\ No newline at end of file
+.idea/
+
+# MacOS
+.DS_Store
+
+# VSCode
+.vscode/
+.venv/
+
+# Vim
+*.swp
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 3d80b955..caf34209 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,63 +1,57 @@
-# For more on configuring pre-commit hooks (see https://pre-commit.com/)
-
-# Force all unspecified python hooks to run python 3.8
default_language_version:
- python: python3
+ python: python3
repos:
-- repo: https://github.com/pre-commit/pre-commit-hooks
- rev: v4.4.0
- hooks:
- - id: check-yaml
- args: [--unsafe]
- - id: check-json
- - id: end-of-file-fixer
- - id: trailing-whitespace
- - id: check-case-conflict
-- repo: https://github.com/psf/black
- rev: 23.1.0
- hooks:
- - id: black
- additional_dependencies: ['click~=8.1']
- args:
- - "--line-length=99"
- - "--target-version=py38"
- - id: black
- alias: black-check
- stages: [manual]
- additional_dependencies: ['click~=8.1']
- args:
- - "--line-length=99"
- - "--target-version=py38"
- - "--check"
- - "--diff"
-- repo: https://github.com/pycqa/flake8
- rev: 6.0.0
- hooks:
- - id: flake8
- - id: flake8
- alias: flake8-check
- stages: [manual]
-- repo: https://github.com/pre-commit/mirrors-mypy
- rev: v1.1.1
- hooks:
- - id: mypy
- # N.B.: Mypy is... a bit fragile.
- #
- # By using `language: system` we run this hook in the local
- # environment instead of a pre-commit isolated one. This is needed
- # to ensure mypy correctly parses the project.
+- repo: https://github.com/pre-commit/pre-commit-hooks
+ rev: v4.6.0
+ hooks:
+ - id: check-yaml
+ args: [--unsafe]
+ - id: check-json
+ - id: end-of-file-fixer
+ - id: trailing-whitespace
+ - id: check-case-conflict
+
+- repo: https://github.com/dbt-labs/pre-commit-hooks
+ rev: v0.1.0a1
+ hooks:
+ - id: dbt-core-in-adapters-check
+
+- repo: https://github.com/psf/black
+ rev: 24.4.0
+ hooks:
+ - id: black
+ args:
+ - --line-length=99
+ - --target-version=py38
+ - --target-version=py39
+ - --target-version=py310
+ - --target-version=py311
+ - --force-exclude=dbt/adapters/events/adapter_types_pb2.py
+
+- repo: https://github.com/pycqa/flake8
+ rev: 7.0.0
+ hooks:
+ - id: flake8
+ exclude: dbt/adapters/events/adapter_types_pb2.py|tests/functional/
+ args:
+ - --max-line-length=99
+ - --select=E,F,W
+ - --ignore=E203,E501,E704,E741,W503,W504
+ - --per-file-ignores=*/__init__.py:F401
- # It may cause trouble in that it adds environmental variables out
- # of our control to the mix. Unfortunately, there's nothing we can
- # do about per pre-commit's author.
- # See https://github.com/pre-commit/pre-commit/issues/730 for details.
- args: [--show-error-codes, --ignore-missing-imports, --explicit-package-bases]
- files: ^dbt/adapters/.*
- language: system
- - id: mypy
- alias: mypy-check
- stages: [manual]
- args: [--show-error-codes, --pretty, --ignore-missing-imports, --explicit-package-bases]
- files: ^dbt/adapters
- language: system
+- repo: https://github.com/pre-commit/mirrors-mypy
+ rev: v1.9.0
+ hooks:
+ - id: mypy
+ exclude: dbt/adapters/events/adapter_types_pb2.py|dbt-tests-adapter/dbt/__init__.py
+ args:
+ - --explicit-package-bases
+ - --ignore-missing-imports
+ - --pretty
+ - --show-error-codes
+ files: ^dbt/adapters/
+ additional_dependencies:
+ - types-PyYAML
+ - types-protobuf
+ - types-pytz
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 9a024c80..4146e95e 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -5,18 +5,126 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html),
and is generated by [Changie](https://github.com/miniscruff/changie).
-## dbt-adapter 1.0.0 - April 01, 2024
+## dbt-adapters 1.4.0 - July 30, 2024
+
+### Features
+
+- render 'to' and 'to_columns' fields on foreign key constraints, and bump dbt-common lower bound to 1.6 ([#271](https://github.com/dbt-labs/dbt-adapters/issues/271))
+
+### Fixes
+
+- Incremental table varchar column definition changed ([#276](https://github.com/dbt-labs/dbt-adapters/issues/276))
+
+### Under the Hood
+
+- Rework record/replay to record at the database connection level. ([#244](https://github.com/dbt-labs/dbt-adapters/issues/244))
+
+## dbt-adapters 1.3.3 - July 09, 2024
### Fixes
-* Add field wrapper to BaseRelation members that were missing it.
-* Add "description" and "meta" fields to RelationConfig protocol
+* Fix scenario where using the `--empty` flag causes metadata queries to contain limit clauses ([#213](https://github.com/dbt-labs/dbt-adapters/issues/213))
+
+### Under the Hood
+
+* --limit flag no longer subshells the query. This resolves the dbt Cloud experience issue where limit prevents ordering elements.. ([#207](https://github.com/dbt-labs/dbt-adapters/issues/207))
+
+## dbt-adapters 1.3.2 - July 02, 2024
+
+### Under the Hood
+
+* Fix query timer resolution ([#246](https://github.com/dbt-labs/dbt-adapters/issues/246))
+* Add optional release_connection parameter to connection_named method ([#247](https://github.com/dbt-labs/dbt-adapters/issues/247))
+
+## dbt-adapters 1.3.1 - June 20, 2024
+
+## dbt-adapters 1.3.0 - June 18, 2024
+
+### Features
+
+* Add get_catalog_for_single_relation macro and capability to enable adapters to optimize catalog generation ([#231](https://github.com/dbt-labs/dbt-adapters/issues/231))
+
+## dbt-adapters 1.2.1 - May 21, 2024
+
+### Features
+
+* Improvement of the compile error message in the get_fixture-sql.sql when the relation or the model not exist ([#203](https://github.com/dbt-labs/dbt-adapters/issues/203))
+* Cross-database `date` macro ([#191](https://github.com/dbt-labs/dbt-adapters/issues/191))
+
+### Fixes
+
+* Update Clone test to reflect core change removing `deferred` attribute from nodes ([#194](https://github.com/dbt-labs/dbt-adapters/issues/194))
+
+### Under the Hood
+
+* Add query recording for adapters which use SQLConnectionManager ([#195](https://github.com/dbt-labs/dbt-adapters/issues/195))
+* Improve memory efficiency of process_results() ([#217](https://github.com/dbt-labs/dbt-adapters/issues/217))
+
+## dbt-adapters 1.1.1 - May 07, 2024
+
+### Features
+
+* Enable serialization contexts ([#197](https://github.com/dbt-labs/dbt-adapters/issues/197))
+
+## dbt-adapters 1.1.0 - May 01, 2024
+
+### Features
+
+* Debug log when `type_code` fails to convert to a `data_type` ([#135](https://github.com/dbt-labs/dbt-adapters/issues/135))
+* Introduce TableLastModifiedMetadataBatch and implement BaseAdapter.calculate_freshness_from_metadata_batch ([#127](https://github.com/dbt-labs/dbt-adapters/issues/127))
+* Support for sql fixtures in unit testing ([#146](https://github.com/dbt-labs/dbt-adapters/issues/146))
+* Cross-database `cast` macro ([#173](https://github.com/dbt-labs/dbt-adapters/issues/173))
+* Allow adapters to opt out of aliasing the subquery generated by render_limited ([#179](https://github.com/dbt-labs/dbt-adapters/issues/179))
+* subquery alias generated by render_limited now includes the relation name to mitigate duplicate aliasing ([#179](https://github.com/dbt-labs/dbt-adapters/issues/179))
+
+### Fixes
+
+* Fix adapter-specific cast handling for constraint enforcement ([#165](https://github.com/dbt-labs/dbt-adapters/issues/165))
+
+### Docs
+
+* Use `dbt-adapters` throughout the contributing guide ([#137](https://github.com/dbt-labs/dbt-adapters/issues/137))
+
+### Under the Hood
+
+* Add the option to set the log level of the AdapterRegistered event ([#141](https://github.com/dbt-labs/dbt-adapters/issues/141))
+* Update dependabot config to cover GHA ([#161](https://github.com/dbt-labs/dbt-adapters/issues/161))
+* Validate that dbt-core and dbt-adapters remain de-coupled ([#174](https://github.com/dbt-labs/dbt-adapters/issues/174))
+* remove dbt_version from query comment test fixture ([#184](https://github.com/dbt-labs/dbt-adapters/issues/184))
+
+### Dependencies
+
+* add support for py3.12 ([#185](https://github.com/dbt-labs/dbt-adapters/issues/185))
+
+## dbt-adapters 1.0.0 - April 01, 2024
+
+### Features
+
+* Update RelationConfig to capture all fields used by adapters ([#30](https://github.com/dbt-labs/dbt-adapters/issues/30))
+
+### Fixes
+
+* Add field wrapper to BaseRelation members that were missing it. ([#108](https://github.com/dbt-labs/dbt-adapters/issues/108))
+* Add "description" and "meta" fields to RelationConfig protocol ([#119](https://github.com/dbt-labs/dbt-adapters/issues/119))
+* Ignore adapter-level support warnings for 'custom' constraints ([#90](https://github.com/dbt-labs/dbt-adapters/issues/90))
+* Make all adapter zone tests importable by removing "Test" prefix ([#93](https://github.com/dbt-labs/dbt-adapters/issues/93))
+
+### Docs
+
+* Configure `changie` ([#16](https://github.com/dbt-labs/dbt-adapters/issues/16))
+* Setup ADR tracking framework ([#11](https://github.com/dbt-labs/dbt-adapters/issues/11))
+* Create issue templates ([#12](https://github.com/dbt-labs/dbt-adapters/issues/12))
+* Create PR template ([#13](https://github.com/dbt-labs/dbt-adapters/issues/13))
### Under the Hood
-* Lazy load agate to improve dbt-core performance
-* add BaseAdapater.MAX_SCHEMA_METADATA_RELATIONS
+* Lazy load agate to improve dbt-core performance ([#125](https://github.com/dbt-labs/dbt-adapters/issues/125))
+* add BaseAdapater.MAX_SCHEMA_METADATA_RELATIONS ([#131](https://github.com/dbt-labs/dbt-adapters/issues/131))
+* Configure `dependabot` ([#14](https://github.com/dbt-labs/dbt-adapters/issues/14))
+* Implement unit testing in CI ([#22](https://github.com/dbt-labs/dbt-adapters/issues/22))
+* Allow version to be specified in either __version__.py or __about__.py ([#44](https://github.com/dbt-labs/dbt-adapters/issues/44))
+* Remove __init__.py file from dbt.tests ([#96](https://github.com/dbt-labs/dbt-adapters/issues/96))
### Security
-* Pin `black>=24.3` in `pyproject.toml`
+* Pin `black>=24.3` in `pyproject.toml` ([#140](https://github.com/dbt-labs/dbt-adapters/issues/140))
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index eb0002fa..1a6e92a2 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -1,4 +1,4 @@
-# Contributing to `dbt-adapter`
+# Contributing to `dbt-adapters`
- [About this document](#about-this-document)
- [Getting the code](#getting-the-code)
@@ -10,20 +10,17 @@
## About this document
-This document is a guide for anyone interested in contributing to `dbt-adapter`.
-It outlines how to install `dbt-adapter` for development,
+This document is a guide for anyone interested in contributing to `dbt-adapters`.
+It outlines how to install `dbt-adapters` for development,
run tests locally, update documentation, and submit pull requests.
This guide assumes users are developing on a Linux or MacOS system.
The following utilities are needed or will be installed in this guide:
- `pip`
-- `virturalenv`
+- `hatch`
- `git`
- `changie`
-If local functional testing is required, then a database instance
-and appropriate credentials are also required.
-
In addition to this guide, users are highly encouraged to read the `dbt-core`
[CONTRIBUTING.md](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md).
Almost all information there is applicable here.
@@ -31,7 +28,7 @@ Almost all information there is applicable here.
## Getting the code
-`git` is required to download, modify, and sync the `dbt-adapter` code.
+`git` is required to download, modify, and sync the `dbt-adapters` code.
There are several ways to install Git. For MacOS:
- Install [Xcode](https://developer.apple.com/support/xcode/)
@@ -39,52 +36,66 @@ There are several ways to install Git. For MacOS:
### External contributors
-Contributors external to the `dbt-labs` GitHub organization can contribute to `dbt-adapter`
-by forking the `dbt-adapter` repository. For more on forking, check out the
+Contributors external to the `dbt-labs` GitHub organization can contribute to `dbt-adapters`
+by forking the `dbt-adapters` repository. For more on forking, check out the
[GitHub docs on forking](https://help.github.com/en/articles/fork-a-repo). To contribute:
-1. Fork the `dbt-labs/dbt-adapter` repository (e.g. `{forked-org}/dbt-adapter`)
-2. Clone `{forked-org}/dbt-adapter` locally
+1. Fork the `dbt-labs/dbt-adapters` repository (e.g. `{forked-org}/dbt-adapters`)
+2. Clone `{forked-org}/dbt-adapters` locally
3. Check out a new branch locally
4. Make changes in the new branch
-5. Push the new branch to `{forked-org}/dbt-adapter`
-6. Open a pull request in `dbt-labs/dbt-adapter` to merge `{forked-org}/dbt-adapter/{new-branch}` into `main`
+5. Push the new branch to `{forked-org}/dbt-adapters`
+6. Open a pull request in `dbt-labs/dbt-adapters` to merge `{forked-org}/dbt-adapters/{new-branch}` into `main`
### dbt Labs contributors
-Contributors in the `dbt Labs` GitHub organization have push access to the `dbt-adapter` repo.
-Rather than forking `dbt-labs/dbt-adapter`, use `dbt-labs/dbt-adapter` directly. To contribute:
+Contributors in the `dbt Labs` GitHub organization have push access to the `dbt-adapters` repo.
+Rather than forking `dbt-labs/dbt-adapters`, use `dbt-labs/dbt-adapters` directly. To contribute:
-1. Clone `dbt-labs/dbt-adapter` locally
+1. Clone `dbt-labs/dbt-adapters` locally
2. Check out a new branch locally
3. Make changes in the new branch
-4. Push the new branch to `dbt-labs/dbt-adapter`
-5. Open a pull request in `dbt-labs/dbt-adapter` to merge `{new-branch}` into `main`
+4. Push the new branch to `dbt-labs/dbt-adapters`
+5. Open a pull request in `dbt-labs/dbt-adapters` to merge `{new-branch}` into `main`
## Developing
### Installation
-1. Ensure the latest version of `pip` is installed:
+1. Ensure the latest versions of `pip` and `hatch` are installed:
```shell
- pip install --upgrade pip
+ pip install --user --upgrade pip hatch
+ ```
+2. This step is optional, but it's recommended. Configure `hatch` to create its virtual environments in the project. Add this block to your `hatch` `config.toml` file:
+ ```toml
+ # MacOS: ~/Library/Application Support/hatch/config.toml
+ [dirs.env]
+ virtual = ".hatch"
```
-2. Configure and activate a virtual environment using `virtualenv` as described in
-[Setting up an environment](https://github.com/dbt-labs/dbt-core/blob/HEAD/CONTRIBUTING.md#setting-up-an-environment)
-3. Install `dbt-adapter` and development dependencies in the virtual environment
+ This makes `hatch` create all virtual environments in the project root inside of the directory `/.hatch`, similar to `/.tox` for `tox`.
+ It also makes it easier to add this environment as a runner in common IDEs like VSCode and PyCharm.
+3. Create a `hatch` environment with all of the development dependencies and activate it:
```shell
- pip install -e .[dev]
+ hatch run setup
+ hatch shell
+ ```
+4. Run any commands within the virtual environment by prefixing the command with `hatch run`:
+ ```shell
+ hatch run
```
-When `dbt-adapter` is installed this way, any changes made to the `dbt-adapter` source code
-will be reflected in the virtual environment immediately.
+## Testing
+`dbt-adapters` contains [code quality checks](https://github.com/dbt-labs/dbt-adapters/tree/main/.pre-commit-config.yaml) and [unit tests](https://github.com/dbt-labs/dbt-adapters/tree/main/tests/unit).
+While `dbt-tests-adapter` is also hosted in this repo, it requires a concrete adapter to run.
-## Testing
+### Code quality
-`dbt-adapter` contains [unit](https://github.com/dbt-labs/dbt-adapter/tree/main/tests/unit)
-and [functional](https://github.com/dbt-labs/dbt-adapter/tree/main/tests/functional) tests.
+Code quality checks can run with a single command:
+```shell
+hatch run code-quality
+```
### Unit tests
@@ -93,48 +104,43 @@ Unit tests can be run locally without setting up a database connection:
```shell
# Note: replace $strings with valid names
+# run all unit tests
+hatch run unit-test
+
# run all unit tests in a module
-python -m pytest tests/unit/$test_file_name.py
+hatch run unit-test tests/unit/$test_file_name.py
+
# run a specific unit test
-python -m pytest tests/unit/$test_file_name.py::$test_class_name::$test_method_name
+hatch run unit-test tests/unit/$test_file_name.py::$test_class_name::$test_method_name
```
-### Functional tests
-
-Functional tests require a database to test against. There are two primary ways to run functional tests:
+### Testing against a development branch
-- Tests will run automatically against a dbt Labs owned database during PR checks
-- Tests can be run locally by configuring a `test.env` file with appropriate `ENV` variables:
- ```shell
- cp test.env.example test.env
- $EDITOR test.env
- ```
+Some changes require a change in `dbt-common` and `dbt-adapters`.
+In that case, the dependency on `dbt-common` must be updated to point to the development branch. For example:
-> **_WARNING:_** The parameters in `test.env` must link to a valid database.
-> `test.env` is git-ignored, but be _extra_ careful to never check in credentials
-> or other sensitive information when developing.
+```toml
+[tool.hatch.envs.default]
+dependencies = [
+ "dbt-common @ git+https://github.com/dbt-labs/dbt-common.git@my-dev-branch",
+ ...,
+]
+```
-Functional tests can be run locally with a valid database connection configured in `test.env`:
+This will install `dbt-common` as a snapshot. In other words, if `my-dev-branch` is updated on GitHub, those updates will not be reflected locally.
+In order to pick up those updates, the `hatch` environment(s) will need to be rebuilt:
```shell
-# Note: replace $strings with valid names
-
-# run all functional tests in a directory
-python -m pytest tests/functional/$test_directory
-# run all functional tests in a module
-python -m pytest tests/functional/$test_dir_and_filename.py
-# run all functional tests in a class
-python -m pytest tests/functional/$test_dir_and_filename.py::$test_class_name
-# run a specific functional test
-python -m pytest tests/functional/$test_dir_and_filename.py::$test_class_name::$test__method_name
+exit
+hatch env prune
+hatch shell
```
-
## Documentation
### User documentation
-Many changes will require an update to `dbt-adapter` user documentation.
+Many changes will require an update to `dbt-adapters` user documentation.
All contributors, whether internal or external, are encouraged to open an issue or PR
in the docs repo when submitting user-facing changes. Here are some relevant links:
@@ -145,7 +151,7 @@ in the docs repo when submitting user-facing changes. Here are some relevant lin
### CHANGELOG entry
-`dbt-adapter` uses [changie](https://changie.dev) to generate `CHANGELOG` entries.
+`dbt-adapters` uses [changie](https://changie.dev) to generate `CHANGELOG` entries.
Follow the steps to [install `changie`](https://changie.dev/guide/installation/).
Once changie is installed and the PR is created, run:
@@ -163,7 +169,7 @@ Remember to commit and push the file that's created.
### Signing the CLA
-> **_NOTE:_** All contributors to `dbt-adapter` must sign the
+> **_NOTE:_** All contributors to `dbt-adapters` must sign the
> [Contributor License Agreement](https://docs.getdbt.com/docs/contributor-license-agreements)(CLA).
Maintainers will be unable to merge contributions until the contributor signs the CLA.
@@ -172,11 +178,11 @@ Even without a CLA, anyone is welcome to open issues and comment on existing iss
### Opening a pull request
-A `dbt-adapter` maintainer will be assigned to review each PR based on priority and capacity.
+A `dbt-adapters` maintainer will be assigned to review each PR based on priority and capacity.
They may suggest code revisions for style and clarity or they may request additional tests.
These are good things! dbt Labs believes that contributing high-quality code is a collaborative effort.
-The same process is followed whether the contributor is external or another `dbt-adapter` maintainer.
+The same process is followed whether the contributor is external or another `dbt-adapters` maintainer.
Once all tests are passing and the PR has been approved by the appropriate code owners,
-a `dbt-adapter` maintainer will merge the changes into `main`.
+a `dbt-adapters` maintainer will merge the changes into `main`.
And that's it! Happy developing :tada:
diff --git a/dbt-tests-adapter/dbt/tests/__about__.py b/dbt-tests-adapter/dbt/tests/__about__.py
index 6496f3e2..1b022739 100644
--- a/dbt-tests-adapter/dbt/tests/__about__.py
+++ b/dbt-tests-adapter/dbt/tests/__about__.py
@@ -1 +1 @@
-version = "1.8.0b1"
+version = "1.9.2"
diff --git a/dbt-tests-adapter/dbt/tests/adapter/basic/test_get_catalog_for_single_relation.py b/dbt-tests-adapter/dbt/tests/adapter/basic/test_get_catalog_for_single_relation.py
new file mode 100644
index 00000000..78055cc5
--- /dev/null
+++ b/dbt-tests-adapter/dbt/tests/adapter/basic/test_get_catalog_for_single_relation.py
@@ -0,0 +1,87 @@
+import pytest
+
+from dbt.tests.util import run_dbt, get_connection
+
+models__my_table_model_sql = """
+select * from {{ ref('my_seed') }}
+"""
+
+
+models__my_view_model_sql = """
+{{
+ config(
+ materialized='view',
+ )
+}}
+
+select * from {{ ref('my_seed') }}
+"""
+
+seed__my_seed_csv = """id,first_name,email,ip_address,updated_at
+1,Larry,lking0@miitbeian.gov.cn,69.135.206.194,2008-09-12 19:08:31
+"""
+
+
+class BaseGetCatalogForSingleRelation:
+ @pytest.fixture(scope="class")
+ def project_config_update(self):
+ return {"name": "get_catalog_for_single_relation"}
+
+ @pytest.fixture(scope="class")
+ def seeds(self):
+ return {
+ "my_seed.csv": seed__my_seed_csv,
+ }
+
+ @pytest.fixture(scope="class")
+ def models(self):
+ return {
+ "my_view_model.sql": models__my_view_model_sql,
+ "my_table_model.sql": models__my_table_model_sql,
+ }
+
+ @pytest.fixture(scope="class")
+ def expected_catalog_my_seed(self, project):
+ raise NotImplementedError(
+ "To use this test, please implement `get_catalog_for_single_relation`, inherited from `SQLAdapter`."
+ )
+
+ @pytest.fixture(scope="class")
+ def expected_catalog_my_model(self, project):
+ raise NotImplementedError(
+ "To use this test, please implement `get_catalog_for_single_relation`, inherited from `SQLAdapter`."
+ )
+
+ def get_relation_for_identifier(self, project, identifier):
+ return project.adapter.get_relation(
+ database=project.database,
+ schema=project.test_schema,
+ identifier=identifier,
+ )
+
+ def test_get_catalog_for_single_relation(
+ self, project, expected_catalog_my_seed, expected_catalog_my_view_model
+ ):
+ results = run_dbt(["seed"])
+ assert len(results) == 1
+
+ my_seed_relation = self.get_relation_for_identifier(project, "my_seed")
+
+ with get_connection(project.adapter):
+ actual_catalog_my_seed = project.adapter.get_catalog_for_single_relation(
+ my_seed_relation
+ )
+
+ assert actual_catalog_my_seed == expected_catalog_my_seed
+
+ results = run_dbt(["run"])
+ assert len(results) == 2
+
+ my_view_model_relation = self.get_relation_for_identifier(project, "my_view_model")
+
+ with get_connection(project.adapter):
+ actual_catalog_my_view_model = project.adapter.get_catalog_for_single_relation(
+ my_view_model_relation
+ )
+
+ assert actual_catalog_my_view_model == expected_catalog_my_view_model
diff --git a/dbt-tests-adapter/dbt/tests/adapter/constraints/fixtures.py b/dbt-tests-adapter/dbt/tests/adapter/constraints/fixtures.py
index cfbd5379..2a4f089b 100644
--- a/dbt-tests-adapter/dbt/tests/adapter/constraints/fixtures.py
+++ b/dbt-tests-adapter/dbt/tests/adapter/constraints/fixtures.py
@@ -363,7 +363,8 @@
- type: check
expression: id >= 1
- type: foreign_key
- expression: {schema}.foreign_key_model (id)
+ to: ref('foreign_key_model')
+ to_columns: ["id"]
- type: unique
data_tests:
- unique
diff --git a/dbt-tests-adapter/dbt/tests/adapter/dbt_clone/test_dbt_clone.py b/dbt-tests-adapter/dbt/tests/adapter/dbt_clone/test_dbt_clone.py
index 61472d1a..f59956c1 100644
--- a/dbt-tests-adapter/dbt/tests/adapter/dbt_clone/test_dbt_clone.py
+++ b/dbt-tests-adapter/dbt/tests/adapter/dbt_clone/test_dbt_clone.py
@@ -73,17 +73,14 @@ def copy_state(self, project_root):
def run_and_save_state(self, project_root, with_snapshot=False):
results = run_dbt(["seed"])
assert len(results) == 1
- assert not any(r.node.deferred for r in results)
results = run_dbt(["run"])
assert len(results) == 2
- assert not any(r.node.deferred for r in results)
results = run_dbt(["test"])
assert len(results) == 2
if with_snapshot:
results = run_dbt(["snapshot"])
assert len(results) == 1
- assert not any(r.node.deferred for r in results)
# copy files
self.copy_state(project_root)
@@ -214,6 +211,7 @@ def test_clone_same_target_and_state(self, project, unique_schema, other_schema)
clone_args = [
"clone",
+ "--defer",
"--state",
"target",
]
diff --git a/dbt-tests-adapter/dbt/tests/adapter/dbt_show/test_dbt_show.py b/dbt-tests-adapter/dbt/tests/adapter/dbt_show/test_dbt_show.py
index c00491aa..3f3d36c5 100644
--- a/dbt-tests-adapter/dbt/tests/adapter/dbt_show/test_dbt_show.py
+++ b/dbt-tests-adapter/dbt/tests/adapter/dbt_show/test_dbt_show.py
@@ -1,5 +1,6 @@
import pytest
+from dbt_common.exceptions import DbtRuntimeError
from dbt.tests.adapter.dbt_show import fixtures
from dbt.tests.util import run_dbt
@@ -47,9 +48,25 @@ def test_sql_header(self, project):
run_dbt(["show", "--select", "sql_header", "--vars", "timezone: Asia/Kolkata"])
+class BaseShowDoesNotHandleDoubleLimit:
+ """see issue: https://github.com/dbt-labs/dbt-adapters/issues/207"""
+
+ DATABASE_ERROR_MESSAGE = 'syntax error at or near "limit"'
+
+ def test_double_limit_throws_syntax_error(self, project):
+ with pytest.raises(DbtRuntimeError) as e:
+ run_dbt(["show", "--limit", "1", "--inline", "select 1 limit 1"])
+
+ assert self.DATABASE_ERROR_MESSAGE in str(e)
+
+
class TestPostgresShowSqlHeader(BaseShowSqlHeader):
pass
class TestPostgresShowLimit(BaseShowLimit):
pass
+
+
+class TestShowDoesNotHandleDoubleLimit(BaseShowDoesNotHandleDoubleLimit):
+ pass
diff --git a/dbt-tests-adapter/dbt/tests/adapter/empty/_models.py b/dbt-tests-adapter/dbt/tests/adapter/empty/_models.py
new file mode 100644
index 00000000..f5e684f7
--- /dev/null
+++ b/dbt-tests-adapter/dbt/tests/adapter/empty/_models.py
@@ -0,0 +1,111 @@
+model_input_sql = """
+select 1 as id
+"""
+
+ephemeral_model_input_sql = """
+{{ config(materialized='ephemeral') }}
+select 2 as id
+"""
+
+raw_source_csv = """id
+3
+"""
+
+
+model_sql = """
+select *
+from {{ ref('model_input') }}
+union all
+select *
+from {{ ref('ephemeral_model_input') }}
+union all
+select *
+from {{ source('seed_sources', 'raw_source') }}
+"""
+
+
+model_inline_sql = """
+select * from {{ source('seed_sources', 'raw_source') }} as raw_source
+"""
+
+schema_sources_yml = """
+sources:
+ - name: seed_sources
+ schema: "{{ target.schema }}"
+ tables:
+ - name: raw_source
+"""
+
+
+SEED = """
+my_id,my_value
+1,a
+2,b
+3,c
+""".strip()
+
+
+SCHEMA = """
+version: 2
+
+seeds:
+ - name: my_seed
+ description: "This is my_seed"
+ columns:
+ - name: id
+ description: "This is my_seed.my_id"
+"""
+
+CONTROL = """
+select * from {{ ref("my_seed") }}
+"""
+
+
+GET_COLUMNS_IN_RELATION = """
+{{ config(materialized="table") }}
+{% set columns = adapter.get_columns_in_relation(ref("my_seed")) %}
+select * from {{ ref("my_seed") }}
+"""
+
+
+ALTER_COLUMN_TYPE = """
+{{ config(materialized="table") }}
+{{ alter_column_type(ref("my_seed"), "MY_VALUE", "varchar") }}
+select * from {{ ref("my_seed") }}
+"""
+
+
+ALTER_RELATION_COMMENT = """
+{{ config(
+ materialized="table",
+ persist_docs={"relations": True},
+) }}
+select * from {{ ref("my_seed") }}
+"""
+
+
+ALTER_COLUMN_COMMENT = """
+{{ config(
+ materialized="table",
+ persist_docs={"columns": True},
+) }}
+select * from {{ ref("my_seed") }}
+"""
+
+
+ALTER_RELATION_ADD_REMOVE_COLUMNS = """
+{{ config(materialized="table") }}
+{% set my_seed = adapter.Relation.create(this.database, this.schema, "my_seed", "table") %}
+{% set my_column = api.Column("my_column", "varchar") %}
+{% do alter_relation_add_remove_columns(my_seed, [my_column], none) %}
+{% do alter_relation_add_remove_columns(my_seed, none, [my_column]) %}
+select * from {{ ref("my_seed") }}
+"""
+
+
+TRUNCATE_RELATION = """
+{{ config(materialized="table") }}
+{% set my_seed = adapter.Relation.create(this.database, this.schema, "my_seed", "table") %}
+{{ truncate_relation(my_seed) }}
+select * from {{ ref("my_seed") }}
+"""
diff --git a/dbt-tests-adapter/dbt/tests/adapter/empty/test_empty.py b/dbt-tests-adapter/dbt/tests/adapter/empty/test_empty.py
index 373a13ee..de15bd5b 100644
--- a/dbt-tests-adapter/dbt/tests/adapter/empty/test_empty.py
+++ b/dbt-tests-adapter/dbt/tests/adapter/empty/test_empty.py
@@ -1,57 +1,23 @@
-import pytest
-
from dbt.tests.util import relation_from_name, run_dbt
+import pytest
-
-model_input_sql = """
-select 1 as id
-"""
-
-ephemeral_model_input_sql = """
-{{ config(materialized='ephemeral') }}
-select 2 as id
-"""
-
-raw_source_csv = """id
-3
-"""
-
-
-model_sql = """
-select *
-from {{ ref('model_input') }}
-union all
-select *
-from {{ ref('ephemeral_model_input') }}
-union all
-select *
-from {{ source('seed_sources', 'raw_source') }}
-"""
-
-
-schema_sources_yml = """
-sources:
- - name: seed_sources
- schema: "{{ target.schema }}"
- tables:
- - name: raw_source
-"""
+from dbt.tests.adapter.empty import _models
class BaseTestEmpty:
@pytest.fixture(scope="class")
def seeds(self):
return {
- "raw_source.csv": raw_source_csv,
+ "raw_source.csv": _models.raw_source_csv,
}
@pytest.fixture(scope="class")
def models(self):
return {
- "model_input.sql": model_input_sql,
- "ephemeral_model_input.sql": ephemeral_model_input_sql,
- "model.sql": model_sql,
- "sources.yml": schema_sources_yml,
+ "model_input.sql": _models.model_input_sql,
+ "ephemeral_model_input.sql": _models.ephemeral_model_input_sql,
+ "model.sql": _models.model_sql,
+ "sources.yml": _models.schema_sources_yml,
}
def assert_row_count(self, project, relation_name: str, expected_row_count: int):
@@ -72,5 +38,63 @@ def test_run_with_empty(self, project):
self.assert_row_count(project, "model", 0)
+class BaseTestEmptyInlineSourceRef(BaseTestEmpty):
+ @pytest.fixture(scope="class")
+ def models(self):
+ return {
+ "model.sql": _models.model_inline_sql,
+ "sources.yml": _models.schema_sources_yml,
+ }
+
+ def test_run_with_empty(self, project):
+ # create source from seed
+ run_dbt(["seed"])
+ run_dbt(["run", "--empty", "--debug"])
+ self.assert_row_count(project, "model", 0)
+
+
class TestEmpty(BaseTestEmpty):
+ """
+ Though we don't create these classes anymore, we need to keep this one in case an adapter wanted to import the test as-is to automatically run it.
+ We should consider adding a deprecation warning that suggests moving this into the concrete adapter and importing `BaseTestEmpty` instead.
+ """
+
pass
+
+
+class MetadataWithEmptyFlag:
+ @pytest.fixture(scope="class")
+ def seeds(self):
+ return {"my_seed.csv": _models.SEED}
+
+ @pytest.fixture(scope="class")
+ def models(self):
+ return {
+ "schema.yml": _models.SCHEMA,
+ "control.sql": _models.CONTROL,
+ "get_columns_in_relation.sql": _models.GET_COLUMNS_IN_RELATION,
+ "alter_column_type.sql": _models.ALTER_COLUMN_TYPE,
+ "alter_relation_comment.sql": _models.ALTER_RELATION_COMMENT,
+ "alter_column_comment.sql": _models.ALTER_COLUMN_COMMENT,
+ "alter_relation_add_remove_columns.sql": _models.ALTER_RELATION_ADD_REMOVE_COLUMNS,
+ "truncate_relation.sql": _models.TRUNCATE_RELATION,
+ }
+
+ @pytest.fixture(scope="class", autouse=True)
+ def setup(self, project):
+ run_dbt(["seed"])
+
+ @pytest.mark.parametrize(
+ "model",
+ [
+ "control",
+ "get_columns_in_relation",
+ "alter_column_type",
+ "alter_relation_comment",
+ "alter_column_comment",
+ "alter_relation_add_remove_columns",
+ "truncate_relation",
+ ],
+ )
+ def test_run(self, project, model):
+ run_dbt(["run", "--empty", "--select", model])
diff --git a/dbt-tests-adapter/dbt/tests/adapter/hooks/test_model_hooks.py b/dbt-tests-adapter/dbt/tests/adapter/hooks/test_model_hooks.py
index 6a544af0..8423c9ca 100644
--- a/dbt-tests-adapter/dbt/tests/adapter/hooks/test_model_hooks.py
+++ b/dbt-tests-adapter/dbt/tests/adapter/hooks/test_model_hooks.py
@@ -1,6 +1,7 @@
from pathlib import Path
from dbt_common.exceptions import CompilationError
+
# TODO: does this belong in dbt-tests-adapter?
from dbt.exceptions import ParsingError
import pytest
diff --git a/dbt-tests-adapter/dbt/tests/adapter/query_comment/fixtures.py b/dbt-tests-adapter/dbt/tests/adapter/query_comment/fixtures.py
index d8848dc0..ccaf3292 100644
--- a/dbt-tests-adapter/dbt/tests/adapter/query_comment/fixtures.py
+++ b/dbt-tests-adapter/dbt/tests/adapter/query_comment/fixtures.py
@@ -10,7 +10,6 @@
{%- set comment_dict = dict(
app='dbt++',
macro_version='0.1.0',
- dbt_version=dbt_version,
message='blah: '~ message) -%}
{{ return(comment_dict) }}
{%- endmacro -%}
diff --git a/dbt-tests-adapter/dbt/tests/adapter/query_comment/test_query_comment.py b/dbt-tests-adapter/dbt/tests/adapter/query_comment/test_query_comment.py
index 66251c1d..4453c273 100644
--- a/dbt-tests-adapter/dbt/tests/adapter/query_comment/test_query_comment.py
+++ b/dbt-tests-adapter/dbt/tests/adapter/query_comment/test_query_comment.py
@@ -1,5 +1,4 @@
import json
-from importlib import import_module
import pytest
from dbt_common.exceptions import DbtRuntimeError
@@ -53,19 +52,15 @@ def test_matches_comment(self, project):
class BaseMacroArgsQueryComments(BaseDefaultQueryComments):
- @pytest.fixture(scope="class")
- def get_package_version(self, project):
- return import_module("." + project.adapter_type, "dbt.adapters").__version__.version
@pytest.fixture(scope="class")
def project_config_update(self):
return {"query-comment": "{{ return(ordered_to_json(query_header_args(target.name))) }}"}
- def test_matches_comment(self, project, get_package_version):
+ def test_matches_comment(self, project):
logs = self.run_get_json()
expected_dct = {
"app": "dbt++",
- "dbt_version": get_package_version,
"macro_version": "0.1.0",
"message": f"blah: {project.adapter.config.target_name}",
}
diff --git a/dbt-tests-adapter/dbt/tests/adapter/unit_testing/test_invalid_input.py b/dbt-tests-adapter/dbt/tests/adapter/unit_testing/test_invalid_input.py
index 6c41ceb9..c5bf2a09 100644
--- a/dbt-tests-adapter/dbt/tests/adapter/unit_testing/test_invalid_input.py
+++ b/dbt-tests-adapter/dbt/tests/adapter/unit_testing/test_invalid_input.py
@@ -46,16 +46,22 @@ def models(self):
def test_invalid_input(self, project):
results = run_dbt(["run"])
assert len(results) == 2
-
+
_, out = run_dbt_and_capture(
["test", "--select", "test_name:test_invalid_input_column_name"], expect_pass=False
)
- assert "Invalid column name: 'invalid_column_name' in unit test fixture for 'my_upstream_model'." in out
-
+ assert (
+ "Invalid column name: 'invalid_column_name' in unit test fixture for 'my_upstream_model'."
+ in out
+ )
+
_, out = run_dbt_and_capture(
["test", "--select", "test_name:test_invalid_expect_column_name"], expect_pass=False
)
- assert "Invalid column name: 'invalid_column_name' in unit test fixture for expected output." in out
+ assert (
+ "Invalid column name: 'invalid_column_name' in unit test fixture for expected output."
+ in out
+ )
class TestPostgresUnitTestInvalidInput(BaseUnitTestInvalidInput):
diff --git a/dbt-tests-adapter/dbt/tests/adapter/utils/fixture_cast.py b/dbt-tests-adapter/dbt/tests/adapter/utils/fixture_cast.py
new file mode 100644
index 00000000..fe29c706
--- /dev/null
+++ b/dbt-tests-adapter/dbt/tests/adapter/utils/fixture_cast.py
@@ -0,0 +1,33 @@
+# cast
+
+seeds__data_cast_csv = """field,output
+abc,abc
+123,123
+,
+"""
+
+
+models__test_cast_sql = """
+with data as (
+
+ select * from {{ ref('data_cast') }}
+
+)
+
+select
+ {{ cast('field', api.Column.translate_type('string')) }} as actual,
+ output as expected
+
+from data
+"""
+
+
+models__test_cast_yml = """
+version: 2
+models:
+ - name: test_cast
+ data_tests:
+ - assert_equal:
+ actual: actual
+ expected: expected
+"""
diff --git a/dbt-tests-adapter/dbt/tests/adapter/utils/fixture_date.py b/dbt-tests-adapter/dbt/tests/adapter/utils/fixture_date.py
new file mode 100644
index 00000000..7c9b2084
--- /dev/null
+++ b/dbt-tests-adapter/dbt/tests/adapter/utils/fixture_date.py
@@ -0,0 +1,45 @@
+# date
+
+models__test_date_sql = """
+with generated_dates as (
+
+ {{
+ dbt.date_spine(
+ "day",
+ date(2023, 9, 7),
+ date(2023, 9, 10),
+ )
+ }}
+
+),
+
+expected_dates as (
+
+ select cast('2023-09-07' as date) as expected
+ union all
+ select cast('2023-09-08' as date) as expected
+ union all
+ select cast('2023-09-09' as date) as expected
+
+),
+
+joined as (
+ select
+ generated_dates.date_day,
+ expected_dates.expected
+ from generated_dates
+ full outer join expected_dates on generated_dates.date_day = expected_dates.expected
+)
+
+select * from joined
+"""
+
+models__test_date_yml = """
+version: 2
+models:
+ - name: test_date
+ data_tests:
+ - assert_equal:
+ actual: date_day
+ expected: expected
+"""
diff --git a/dbt-tests-adapter/dbt/tests/adapter/utils/test_cast.py b/dbt-tests-adapter/dbt/tests/adapter/utils/test_cast.py
new file mode 100644
index 00000000..686b7697
--- /dev/null
+++ b/dbt-tests-adapter/dbt/tests/adapter/utils/test_cast.py
@@ -0,0 +1,23 @@
+import pytest
+
+from dbt.tests.adapter.utils import base_utils, fixture_cast
+
+
+class BaseCast(base_utils.BaseUtils):
+ @pytest.fixture(scope="class")
+ def seeds(self):
+ return {"data_cast.csv": fixture_cast.seeds__data_cast_csv}
+
+ @pytest.fixture(scope="class")
+ def models(self):
+ return {
+ "test_cast.yml": fixture_cast.models__test_cast_yml,
+ "test_cast.sql": self.interpolate_macro_namespace(
+ self.interpolate_macro_namespace(fixture_cast.models__test_cast_sql, "cast"),
+ "type_string",
+ ),
+ }
+
+
+class TestCast(BaseCast):
+ pass
diff --git a/dbt-tests-adapter/dbt/tests/adapter/utils/test_date.py b/dbt-tests-adapter/dbt/tests/adapter/utils/test_date.py
new file mode 100644
index 00000000..a096b5af
--- /dev/null
+++ b/dbt-tests-adapter/dbt/tests/adapter/utils/test_date.py
@@ -0,0 +1,18 @@
+import pytest
+
+from dbt.tests.adapter.utils import base_utils, fixture_date
+
+
+class BaseDate(base_utils.BaseUtils):
+ @pytest.fixture(scope="class")
+ def models(self):
+ return {
+ "test_date.yml": fixture_date.models__test_date_yml,
+ "test_date.sql": self.interpolate_macro_namespace(
+ fixture_date.models__test_date_sql, "date"
+ ),
+ }
+
+
+class TestDate(BaseDate):
+ pass
diff --git a/dbt-tests-adapter/pyproject.toml b/dbt-tests-adapter/pyproject.toml
index c5df5998..73bce49e 100644
--- a/dbt-tests-adapter/pyproject.toml
+++ b/dbt-tests-adapter/pyproject.toml
@@ -21,24 +21,17 @@ classifiers = [
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
+ "Programming Language :: Python :: 3.12",
]
dependencies = [
# TODO: remove `dbt-core` dependency
- "dbt-core>=1.8.0a1,<1.9.0",
+ "dbt-core>=1.8.0a1",
# `dbt-tests-adapter` will ultimately depend on the packages below
# `dbt-tests-adapter` temporarily uses `dbt-core` for a dbt runner
# `dbt-core` takes the packages below as dependencies, so they are unpinned to avoid conflicts
"dbt-adapters",
"pyyaml",
]
-
-[project.optional-dependencies]
-build = [
- "wheel",
- "twine",
- "check-wheel-contents",
-]
-
[project.urls]
Homepage = "https://github.com/dbt-labs/dbt-adapters"
Documentation = "https://docs.getdbt.com"
@@ -61,7 +54,11 @@ include = ["dbt/tests", "dbt/__init__.py"]
[tool.hatch.envs.build]
detached = true
-features = ["build"]
+dependencies = [
+ "wheel",
+ "twine",
+ "check-wheel-contents",
+]
[tool.hatch.envs.build.scripts]
check-all = [
"- check-wheel",
diff --git a/dbt/adapters/__about__.py b/dbt/adapters/__about__.py
index 11a716ec..d619c757 100644
--- a/dbt/adapters/__about__.py
+++ b/dbt/adapters/__about__.py
@@ -1 +1 @@
-version = "1.0.0"
+version = "1.4.0"
diff --git a/dbt/adapters/__init__.py b/dbt/adapters/__init__.py
index 65e1d483..1713e032 100644
--- a/dbt/adapters/__init__.py
+++ b/dbt/adapters/__init__.py
@@ -2,6 +2,7 @@
This adds all subdirectories of directories on `sys.path` to this package’s `__path__` .
It effectively combines all adapters into a single namespace (dbt.adapter).
"""
+
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
diff --git a/dbt/adapters/base/column.py b/dbt/adapters/base/column.py
index e2e6e1e0..195684a4 100644
--- a/dbt/adapters/base/column.py
+++ b/dbt/adapters/base/column.py
@@ -123,9 +123,6 @@ def numeric_type(cls, dtype: str, precision: Any, scale: Any) -> str:
else:
return "{}({},{})".format(dtype, precision, scale)
- def __repr__(self) -> str:
- return "".format(self.name, self.data_type)
-
@classmethod
def from_description(cls, name: str, raw_data_type: str) -> "Column":
match = re.match(r"([^(]+)(\([^)]+\))?", raw_data_type)
diff --git a/dbt/adapters/base/connections.py b/dbt/adapters/base/connections.py
index a3a4d98d..6e038297 100644
--- a/dbt/adapters/base/connections.py
+++ b/dbt/adapters/base/connections.py
@@ -165,7 +165,9 @@ def set_connection_name(self, name: Optional[str] = None) -> Connection:
conn.handle = LazyHandle(self.open)
# Add the connection to thread_connections for this thread
self.set_thread_connection(conn)
- fire_event(NewConnection(conn_name=conn_name, conn_type=self.TYPE, node_info=get_node_info()))
+ fire_event(
+ NewConnection(conn_name=conn_name, conn_type=self.TYPE, node_info=get_node_info())
+ )
else: # existing connection either wasn't open or didn't have the right name
if conn.state != "open":
conn.handle = LazyHandle(self.open)
diff --git a/dbt/adapters/base/impl.py b/dbt/adapters/base/impl.py
index 5b4b8080..e0627c47 100644
--- a/dbt/adapters/base/impl.py
+++ b/dbt/adapters/base/impl.py
@@ -1,10 +1,10 @@
import abc
+import time
from concurrent.futures import as_completed, Future
from contextlib import contextmanager
from datetime import datetime
from enum import Enum
from multiprocessing.context import SpawnContext
-import time
from typing import (
Any,
Callable,
@@ -23,12 +23,15 @@
TYPE_CHECKING,
)
+import pytz
from dbt_common.clients.jinja import CallableMacroGenerator
from dbt_common.contracts.constraints import (
ColumnLevelConstraint,
ConstraintType,
ModelLevelConstraint,
)
+from dbt_common.contracts.metadata import CatalogTable
+from dbt_common.events.functions import fire_event, warn_or_error
from dbt_common.exceptions import (
DbtInternalError,
DbtRuntimeError,
@@ -38,14 +41,12 @@
NotImplementedError,
UnexpectedNullError,
)
-from dbt_common.events.functions import fire_event, warn_or_error
from dbt_common.utils import (
AttrDict,
cast_to_str,
executor,
filter_null_values,
)
-import pytz
from dbt.adapters.base.column import Column as BaseColumn
from dbt.adapters.base.connections import (
@@ -222,6 +223,7 @@ class BaseAdapter(metaclass=AdapterMeta):
- truncate_relation
- rename_relation
- get_columns_in_relation
+ - get_catalog_for_single_relation
- get_column_schema_from_query
- expand_column_types
- list_relations_without_caching
@@ -317,14 +319,18 @@ def nice_connection_name(self) -> str:
return conn.name
@contextmanager
- def connection_named(self, name: str, query_header_context: Any = None) -> Iterator[None]:
+ def connection_named(
+ self, name: str, query_header_context: Any = None, should_release_connection=True
+ ) -> Iterator[None]:
try:
if self.connections.query_header is not None:
self.connections.query_header.set(name, query_header_context)
self.acquire_connection(name)
yield
finally:
- self.release_connection()
+ if should_release_connection:
+ self.release_connection()
+
if self.connections.query_header is not None:
self.connections.query_header.reset()
@@ -627,6 +633,12 @@ def get_columns_in_relation(self, relation: BaseRelation) -> List[BaseColumn]:
"""Get a list of the columns in the given Relation."""
raise NotImplementedError("`get_columns_in_relation` is not implemented for this adapter!")
+ def get_catalog_for_single_relation(self, relation: BaseRelation) -> Optional[CatalogTable]:
+ """Get catalog information including table-level and column-level metadata for a single relation."""
+ raise NotImplementedError(
+ "`get_catalog_for_single_relation` is not implemented for this adapter!"
+ )
+
@available.deprecated("get_columns_in_relation", lambda *a, **k: [])
def get_columns_in_table(self, schema: str, identifier: str) -> List[BaseColumn]:
"""DEPRECATED: Get a list of the columns in the given table."""
@@ -1062,6 +1074,7 @@ def execute_macro(
project: Optional[str] = None,
context_override: Optional[Dict[str, Any]] = None,
kwargs: Optional[Dict[str, Any]] = None,
+ needs_conn: bool = False,
) -> AttrDict:
"""Look macro_name up in the manifest and execute its results.
@@ -1074,6 +1087,10 @@ def execute_macro(
execution context.
:param kwargs: An optional dict of keyword args used to pass to the
macro.
+ : param needs_conn: A boolean that indicates whether the specified macro
+ requires an open connection to execute. If needs_conn is True, a
+ connection is expected and opened if necessary. Otherwise (and by default),
+ no connection is expected prior to executing the macro.
"""
if kwargs is None:
@@ -1106,6 +1123,10 @@ def execute_macro(
macro_function = CallableMacroGenerator(macro, macro_context)
+ if needs_conn:
+ connection = self.connections.get_thread_connection()
+ self.connections.open(connection)
+
with self.connections.exception_handler(f"macro {macro_name}"):
result = macro_function(**kwargs)
return result
@@ -1297,48 +1318,111 @@ def calculate_freshness(
}
return adapter_response, freshness
+ def calculate_freshness_from_metadata_batch(
+ self,
+ sources: List[BaseRelation],
+ macro_resolver: Optional[MacroResolverProtocol] = None,
+ ) -> Tuple[List[Optional[AdapterResponse]], Dict[BaseRelation, FreshnessResponse]]:
+ """
+ Given a list of sources (BaseRelations), calculate the metadata-based freshness in batch.
+ This method should _not_ execute a warehouse query per source, but rather batch up
+ the sources into as few requests as possible to minimize the number of roundtrips required
+ to compute metadata-based freshness for each input source.
+
+ :param sources: The list of sources to calculate metadata-based freshness for
+ :param macro_resolver: An optional macro_resolver to use for get_relation_last_modified
+ :return: a tuple where:
+ * the first element is a list of optional AdapterResponses indicating the response
+ for each request the method made to compute the freshness for the provided sources.
+ * the second element is a dictionary mapping an input source BaseRelation to a FreshnessResponse,
+ if it was possible to calculate a FreshnessResponse for the source.
+ """
+ # Track schema, identifiers of sources for lookup from batch query
+ schema_identifier_to_source = {
+ (
+ source.path.get_lowered_part(ComponentName.Schema), # type: ignore
+ source.path.get_lowered_part(ComponentName.Identifier), # type: ignore
+ ): source
+ for source in sources
+ }
+
+ # Group metadata sources by information schema -- one query per information schema will be necessary
+ sources_by_info_schema: Dict[InformationSchema, List[BaseRelation]] = (
+ self._get_catalog_relations_by_info_schema(sources)
+ )
+
+ freshness_responses: Dict[BaseRelation, FreshnessResponse] = {}
+ adapter_responses: List[Optional[AdapterResponse]] = []
+ for (
+ information_schema,
+ sources_for_information_schema,
+ ) in sources_by_info_schema.items():
+ result = self.execute_macro(
+ GET_RELATION_LAST_MODIFIED_MACRO_NAME,
+ kwargs={
+ "information_schema": information_schema,
+ "relations": sources_for_information_schema,
+ },
+ macro_resolver=macro_resolver,
+ needs_conn=True,
+ )
+ adapter_response, table = result.response, result.table # type: ignore[attr-defined]
+ adapter_responses.append(adapter_response)
+
+ for row in table:
+ raw_relation, freshness_response = self._parse_freshness_row(row, table)
+ source_relation_for_result = schema_identifier_to_source[raw_relation]
+ freshness_responses[source_relation_for_result] = freshness_response
+
+ return adapter_responses, freshness_responses
+
def calculate_freshness_from_metadata(
self,
source: BaseRelation,
macro_resolver: Optional[MacroResolverProtocol] = None,
) -> Tuple[Optional[AdapterResponse], FreshnessResponse]:
- kwargs: Dict[str, Any] = {
- "information_schema": source.information_schema_only(),
- "relations": [source],
- }
- result = self.execute_macro(
- GET_RELATION_LAST_MODIFIED_MACRO_NAME,
- kwargs=kwargs,
+ adapter_responses, freshness_responses = self.calculate_freshness_from_metadata_batch(
+ sources=[source],
macro_resolver=macro_resolver,
)
- adapter_response, table = result.response, result.table # type: ignore[attr-defined]
-
- try:
- from dbt_common.clients.agate_helper import get_column_value_uncased
+ adapter_response = adapter_responses[0] if adapter_responses else None
+ return adapter_response, freshness_responses[source]
- row = table[0]
- last_modified_val = get_column_value_uncased("last_modified", row)
- snapshotted_at_val = get_column_value_uncased("snapshotted_at", row)
- except Exception:
- raise MacroResultError(GET_RELATION_LAST_MODIFIED_MACRO_NAME, table)
-
- if last_modified_val is None:
+ def _create_freshness_response(
+ self, last_modified: Optional[datetime], snapshotted_at: Optional[datetime]
+ ) -> FreshnessResponse:
+ if last_modified is None:
# Interpret missing value as "infinitely long ago"
max_loaded_at = datetime(1, 1, 1, 0, 0, 0, tzinfo=pytz.UTC)
else:
- max_loaded_at = _utc(last_modified_val, None, "last_modified")
-
- snapshotted_at = _utc(snapshotted_at_val, None, "snapshotted_at")
+ max_loaded_at = _utc(last_modified, None, "last_modified")
+ snapshotted_at = _utc(snapshotted_at, None, "snapshotted_at")
age = (snapshotted_at - max_loaded_at).total_seconds()
-
freshness: FreshnessResponse = {
"max_loaded_at": max_loaded_at,
"snapshotted_at": snapshotted_at,
"age": age,
}
- return adapter_response, freshness
+ return freshness
+
+ def _parse_freshness_row(
+ self, row: "agate.Row", table: "agate.Table"
+ ) -> Tuple[Any, FreshnessResponse]:
+ from dbt_common.clients.agate_helper import get_column_value_uncased
+
+ try:
+ last_modified_val = get_column_value_uncased("last_modified", row)
+ snapshotted_at_val = get_column_value_uncased("snapshotted_at", row)
+ identifier = get_column_value_uncased("identifier", row)
+ schema = get_column_value_uncased("schema", row)
+ except Exception:
+ raise MacroResultError(GET_RELATION_LAST_MODIFIED_MACRO_NAME, table)
+
+ freshness_response = self._create_freshness_response(last_modified_val, snapshotted_at_val)
+ raw_relation = schema.lower().strip(), identifier.lower().strip()
+ return raw_relation, freshness_response
def pre_model_hook(self, config: Mapping[str, Any]) -> Any:
"""A hook for running some operation before the model materialization
@@ -1518,8 +1602,13 @@ def render_column_constraint(cls, constraint: ColumnLevelConstraint) -> Optional
rendered_column_constraint = f"unique {constraint_expression}"
elif constraint.type == ConstraintType.primary_key:
rendered_column_constraint = f"primary key {constraint_expression}"
- elif constraint.type == ConstraintType.foreign_key and constraint_expression:
- rendered_column_constraint = f"references {constraint_expression}"
+ elif constraint.type == ConstraintType.foreign_key:
+ if constraint.to and constraint.to_columns:
+ rendered_column_constraint = (
+ f"references {constraint.to} ({', '.join(constraint.to_columns)})"
+ )
+ elif constraint_expression:
+ rendered_column_constraint = f"references {constraint_expression}"
elif constraint.type == ConstraintType.custom and constraint_expression:
rendered_column_constraint = constraint_expression
@@ -1598,20 +1687,29 @@ def render_model_constraint(cls, constraint: ModelLevelConstraint) -> Optional[s
rendering."""
constraint_prefix = f"constraint {constraint.name} " if constraint.name else ""
column_list = ", ".join(constraint.columns)
+ rendered_model_constraint = None
+
if constraint.type == ConstraintType.check and constraint.expression:
- return f"{constraint_prefix}check ({constraint.expression})"
+ rendered_model_constraint = f"{constraint_prefix}check ({constraint.expression})"
elif constraint.type == ConstraintType.unique:
constraint_expression = f" {constraint.expression}" if constraint.expression else ""
- return f"{constraint_prefix}unique{constraint_expression} ({column_list})"
+ rendered_model_constraint = (
+ f"{constraint_prefix}unique{constraint_expression} ({column_list})"
+ )
elif constraint.type == ConstraintType.primary_key:
constraint_expression = f" {constraint.expression}" if constraint.expression else ""
- return f"{constraint_prefix}primary key{constraint_expression} ({column_list})"
- elif constraint.type == ConstraintType.foreign_key and constraint.expression:
- return f"{constraint_prefix}foreign key ({column_list}) references {constraint.expression}"
+ rendered_model_constraint = (
+ f"{constraint_prefix}primary key{constraint_expression} ({column_list})"
+ )
+ elif constraint.type == ConstraintType.foreign_key:
+ if constraint.to and constraint.to_columns:
+ rendered_model_constraint = f"{constraint_prefix}foreign key ({column_list}) references {constraint.to} ({', '.join(constraint.to_columns)})"
+ elif constraint.expression:
+ rendered_model_constraint = f"{constraint_prefix}foreign key ({column_list}) references {constraint.expression}"
elif constraint.type == ConstraintType.custom and constraint.expression:
- return f"{constraint_prefix}{constraint.expression}"
- else:
- return None
+ rendered_model_constraint = f"{constraint_prefix}{constraint.expression}"
+
+ return rendered_model_constraint
@classmethod
def capabilities(cls) -> CapabilityDict:
diff --git a/dbt/adapters/base/relation.py b/dbt/adapters/base/relation.py
index d74b29bd..1aab7b2f 100644
--- a/dbt/adapters/base/relation.py
+++ b/dbt/adapters/base/relation.py
@@ -47,6 +47,9 @@ class BaseRelation(FakeAPIObject, Hashable):
quote_policy: Policy = field(default_factory=lambda: Policy())
dbt_created: bool = False
limit: Optional[int] = None
+ require_alias: bool = (
+ True # used to govern whether to add an alias when render_limited is called
+ )
# register relation types that can be renamed for the purpose of replacing relations using stages and backups
# adding a relation type here also requires defining the associated rename macro
@@ -205,14 +208,22 @@ def render(self) -> str:
# if there is nothing set, this will return the empty string.
return ".".join(part for _, part in self._render_iterator() if part is not None)
+ def _render_limited_alias(self) -> str:
+ """Some databases require an alias for subqueries (postgres, mysql) for all others we want to avoid adding
+ an alias as it has the potential to introduce issues with the query if the user also defines an alias.
+ """
+ if self.require_alias:
+ return f" _dbt_limit_subq_{self.table}"
+ return ""
+
def render_limited(self) -> str:
rendered = self.render()
if self.limit is None:
return rendered
elif self.limit == 0:
- return f"(select * from {rendered} where false limit 0) _dbt_limit_subq"
+ return f"(select * from {rendered} where false limit 0){self._render_limited_alias()}"
else:
- return f"(select * from {rendered} limit {self.limit}) _dbt_limit_subq"
+ return f"(select * from {rendered} limit {self.limit}){self._render_limited_alias()}"
def quoted(self, identifier):
return "{quote_char}{identifier}{quote_char}".format(
@@ -230,8 +241,11 @@ def create_ephemeral_from(
relation_config: RelationConfig,
limit: Optional[int] = None,
) -> Self:
- # Note that ephemeral models are based on the name.
- identifier = cls.add_ephemeral_prefix(relation_config.name)
+ # Note that ephemeral models are based on the identifier, which will
+ # point to the model's alias if one exists and otherwise fall back to
+ # the filename. This is intended to give the user more control over
+ # the way that the CTE name is constructed
+ identifier = cls.add_ephemeral_prefix(relation_config.identifier)
return cls.create(
type=cls.CTE,
identifier=identifier,
diff --git a/dbt/adapters/capability.py b/dbt/adapters/capability.py
index 745cb27a..2bd49112 100644
--- a/dbt/adapters/capability.py
+++ b/dbt/adapters/capability.py
@@ -13,6 +13,14 @@ class Capability(str, Enum):
TableLastModifiedMetadata = "TableLastModifiedMetadata"
"""Indicates support for determining the time of the last table modification by querying database metadata."""
+ TableLastModifiedMetadataBatch = "TableLastModifiedMetadataBatch"
+ """Indicates support for performantly determining the time of the last table modification by querying database
+ metadata in batch."""
+
+ GetCatalogForSingleRelation = "GetCatalogForSingleRelation"
+ """Indicates support for getting catalog information including table-level and column-level metadata for a single
+ relation."""
+
class Support(str, Enum):
Unknown = "Unknown"
diff --git a/dbt/adapters/contracts/connection.py b/dbt/adapters/contracts/connection.py
index 2ff4e5af..e3baf284 100644
--- a/dbt/adapters/contracts/connection.py
+++ b/dbt/adapters/contracts/connection.py
@@ -170,7 +170,7 @@ def __pre_deserialize__(cls, data):
def translate_aliases(cls, kwargs: Dict[str, Any], recurse: bool = False) -> Dict[str, Any]:
return translate_aliases(kwargs, cls._ALIASES, recurse)
- def __post_serialize__(self, dct):
+ def __post_serialize__(self, dct: Dict, context: Optional[Dict] = None):
# no super() -- do we need it?
if self._ALIASES:
dct.update(
diff --git a/dbt/adapters/contracts/relation.py b/dbt/adapters/contracts/relation.py
index 6a88d074..42beb579 100644
--- a/dbt/adapters/contracts/relation.py
+++ b/dbt/adapters/contracts/relation.py
@@ -18,8 +18,9 @@ class RelationType(StrEnum):
View = "view"
CTE = "cte"
MaterializedView = "materialized_view"
- External = "external"
Ephemeral = "ephemeral"
+ # this is a "catch all" that is better than `None` == external to anything dbt is aware of
+ External = "external"
class MaterializationContract(Protocol):
@@ -40,11 +41,9 @@ class MaterializationConfig(Mapping, ABC):
contract: MaterializationContract
extra: Dict[str, Any]
- def __contains__(self, item):
- ...
+ def __contains__(self, item): ...
- def __delitem__(self, key):
- ...
+ def __delitem__(self, key): ...
class RelationConfig(Protocol):
diff --git a/dbt/adapters/events/README.md b/dbt/adapters/events/README.md
index fe39a18e..c98488db 100644
--- a/dbt/adapters/events/README.md
+++ b/dbt/adapters/events/README.md
@@ -14,7 +14,7 @@ When events are processed via `fire_event`, nearly everything is logged. Whether
We have switched from using betterproto to using google protobuf, because of a lack of support for Struct fields in betterproto.
-The google protobuf interface is janky and very much non-Pythonic. The "generated" classes in types_pb2.py do not resemble regular Python classes. They do not have normal constructors; they can only be constructed empty. They can be "filled" by setting fields individually or using a json_format method like ParseDict. We have wrapped the logging events with a class (in types.py) which allows using a constructor -- keywords only, no positional parameters.
+The google protobuf interface is janky and very much non-Pythonic. The "generated" classes in types_pb2.py do not resemble regular Python classes. They do not have normal constructors; they can only be constructed empty. They can be "filled" by setting fields individually or using a json_format method like ParseDict. We have wrapped the logging events with a class (in types.py) which allows using a constructor -- keywords only, no positional parameters.
## Required for Every Event
diff --git a/dbt/adapters/events/adapter_types.proto b/dbt/adapters/events/adapter_types.proto
index aa0b507c..69d64325 100644
--- a/dbt/adapters/events/adapter_types.proto
+++ b/dbt/adapters/events/adapter_types.proto
@@ -515,3 +515,13 @@ message ConstraintNotSupportedMsg {
AdapterCommonEventInfo info = 1;
ConstraintNotSupported data = 2;
}
+
+// E050
+message TypeCodeNotFound {
+ int32 type_code = 1;
+}
+
+message TypeCodeNotFoundMsg {
+ AdapterCommonEventInfo info = 1;
+ TypeCodeNotFound data = 2;
+}
diff --git a/dbt/adapters/events/adapter_types_pb2.py b/dbt/adapters/events/adapter_types_pb2.py
index 5d41b719..bfd44080 100644
--- a/dbt/adapters/events/adapter_types_pb2.py
+++ b/dbt/adapters/events/adapter_types_pb2.py
@@ -1,7 +1,6 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: adapter_types.proto
-# Protobuf Python Version: 4.25.2
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
@@ -16,7 +15,7 @@
from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2
-DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x13\x61\x64\x61pter_types.proto\x12\x0bproto_types\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/protobuf/struct.proto\"\xab\x02\n\x16\x41\x64\x61pterCommonEventInfo\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04\x63ode\x18\x02 \x01(\t\x12\x0b\n\x03msg\x18\x03 \x01(\t\x12\r\n\x05level\x18\x04 \x01(\t\x12\x15\n\rinvocation_id\x18\x05 \x01(\t\x12\x0b\n\x03pid\x18\x06 \x01(\x05\x12\x0e\n\x06thread\x18\x07 \x01(\t\x12&\n\x02ts\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12=\n\x05\x65xtra\x18\t \x03(\x0b\x32..proto_types.AdapterCommonEventInfo.ExtraEntry\x12\x10\n\x08\x63\x61tegory\x18\n \x01(\t\x1a,\n\nExtraEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"]\n\x13\x41\x64\x61pterNodeRelation\x12\x10\n\x08\x64\x61tabase\x18\n \x01(\t\x12\x0e\n\x06schema\x18\x0b \x01(\t\x12\r\n\x05\x61lias\x18\x0c \x01(\t\x12\x15\n\rrelation_name\x18\r \x01(\t\"\x9f\x02\n\x0f\x41\x64\x61pterNodeInfo\x12\x11\n\tnode_path\x18\x01 \x01(\t\x12\x11\n\tnode_name\x18\x02 \x01(\t\x12\x11\n\tunique_id\x18\x03 \x01(\t\x12\x15\n\rresource_type\x18\x04 \x01(\t\x12\x14\n\x0cmaterialized\x18\x05 \x01(\t\x12\x13\n\x0bnode_status\x18\x06 \x01(\t\x12\x17\n\x0fnode_started_at\x18\x07 \x01(\t\x12\x18\n\x10node_finished_at\x18\x08 \x01(\t\x12%\n\x04meta\x18\t \x01(\x0b\x32\x17.google.protobuf.Struct\x12\x37\n\rnode_relation\x18\n \x01(\x0b\x32 .proto_types.AdapterNodeRelation\"G\n\x0fReferenceKeyMsg\x12\x10\n\x08\x64\x61tabase\x18\x01 \x01(\t\x12\x0e\n\x06schema\x18\x02 \x01(\t\x12\x12\n\nidentifier\x18\x03 \x01(\t\"?\n\x19\x41\x64\x61pterDeprecationWarning\x12\x10\n\x08old_name\x18\x01 \x01(\t\x12\x10\n\x08new_name\x18\x02 \x01(\t\"\x87\x01\n\x1c\x41\x64\x61pterDeprecationWarningMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x34\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32&.proto_types.AdapterDeprecationWarning\"!\n\x1f\x43ollectFreshnessReturnSignature\"\x93\x01\n\"CollectFreshnessReturnSignatureMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12:\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32,.proto_types.CollectFreshnessReturnSignature\"\x8e\x01\n\x11\x41\x64\x61pterEventDebug\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x62\x61se_msg\x18\x03 \x01(\t\x12(\n\x04\x61rgs\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.ListValue\"w\n\x14\x41\x64\x61pterEventDebugMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.AdapterEventDebug\"\x8d\x01\n\x10\x41\x64\x61pterEventInfo\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x62\x61se_msg\x18\x03 \x01(\t\x12(\n\x04\x61rgs\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.ListValue\"u\n\x13\x41\x64\x61pterEventInfoMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.AdapterEventInfo\"\x90\x01\n\x13\x41\x64\x61pterEventWarning\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x62\x61se_msg\x18\x03 \x01(\t\x12(\n\x04\x61rgs\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.ListValue\"{\n\x16\x41\x64\x61pterEventWarningMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.AdapterEventWarning\"\xa0\x01\n\x11\x41\x64\x61pterEventError\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x62\x61se_msg\x18\x03 \x01(\t\x12(\n\x04\x61rgs\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.ListValue\x12\x10\n\x08\x65xc_info\x18\x05 \x01(\t\"w\n\x14\x41\x64\x61pterEventErrorMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.AdapterEventError\"f\n\rNewConnection\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_type\x18\x02 \x01(\t\x12\x11\n\tconn_name\x18\x03 \x01(\t\"o\n\x10NewConnectionMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.NewConnection\"=\n\x10\x43onnectionReused\x12\x11\n\tconn_name\x18\x01 \x01(\t\x12\x16\n\x0eorig_conn_name\x18\x02 \x01(\t\"u\n\x13\x43onnectionReusedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.ConnectionReused\"0\n\x1b\x43onnectionLeftOpenInCleanup\x12\x11\n\tconn_name\x18\x01 \x01(\t\"\x8b\x01\n\x1e\x43onnectionLeftOpenInCleanupMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x36\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32(.proto_types.ConnectionLeftOpenInCleanup\".\n\x19\x43onnectionClosedInCleanup\x12\x11\n\tconn_name\x18\x01 \x01(\t\"\x87\x01\n\x1c\x43onnectionClosedInCleanupMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x34\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32&.proto_types.ConnectionClosedInCleanup\"f\n\x0eRollbackFailed\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\x12\x10\n\x08\x65xc_info\x18\x03 \x01(\t\"q\n\x11RollbackFailedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.RollbackFailed\"V\n\x10\x43onnectionClosed\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\"u\n\x13\x43onnectionClosedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.ConnectionClosed\"X\n\x12\x43onnectionLeftOpen\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\"y\n\x15\x43onnectionLeftOpenMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.ConnectionLeftOpen\"N\n\x08Rollback\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\"e\n\x0bRollbackMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12#\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x15.proto_types.Rollback\"@\n\tCacheMiss\x12\x11\n\tconn_name\x18\x01 \x01(\t\x12\x10\n\x08\x64\x61tabase\x18\x02 \x01(\t\x12\x0e\n\x06schema\x18\x03 \x01(\t\"g\n\x0c\x43\x61\x63heMissMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12$\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x16.proto_types.CacheMiss\"b\n\rListRelations\x12\x10\n\x08\x64\x61tabase\x18\x01 \x01(\t\x12\x0e\n\x06schema\x18\x02 \x01(\t\x12/\n\trelations\x18\x03 \x03(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"o\n\x10ListRelationsMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.ListRelations\"g\n\x0e\x43onnectionUsed\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_type\x18\x02 \x01(\t\x12\x11\n\tconn_name\x18\x03 \x01(\t\"q\n\x11\x43onnectionUsedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.ConnectionUsed\"[\n\x08SQLQuery\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\x12\x0b\n\x03sql\x18\x03 \x01(\t\"e\n\x0bSQLQueryMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12#\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x15.proto_types.SQLQuery\"b\n\x0eSQLQueryStatus\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0e\n\x06status\x18\x02 \x01(\t\x12\x0f\n\x07\x65lapsed\x18\x03 \x01(\x02\"q\n\x11SQLQueryStatusMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.SQLQueryStatus\"O\n\tSQLCommit\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\"g\n\x0cSQLCommitMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12$\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x16.proto_types.SQLCommit\"a\n\rColTypeChange\x12\x11\n\torig_type\x18\x01 \x01(\t\x12\x10\n\x08new_type\x18\x02 \x01(\t\x12+\n\x05table\x18\x03 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"o\n\x10\x43olTypeChangeMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.ColTypeChange\"@\n\x0eSchemaCreation\x12.\n\x08relation\x18\x01 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"q\n\x11SchemaCreationMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.SchemaCreation\"<\n\nSchemaDrop\x12.\n\x08relation\x18\x01 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"i\n\rSchemaDropMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12%\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x17.proto_types.SchemaDrop\"\xde\x01\n\x0b\x43\x61\x63heAction\x12\x0e\n\x06\x61\x63tion\x18\x01 \x01(\t\x12-\n\x07ref_key\x18\x02 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\x12/\n\tref_key_2\x18\x03 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\x12/\n\tref_key_3\x18\x04 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\x12.\n\x08ref_list\x18\x05 \x03(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"k\n\x0e\x43\x61\x63heActionMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12&\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x18.proto_types.CacheAction\"\x98\x01\n\x0e\x43\x61\x63heDumpGraph\x12\x33\n\x04\x64ump\x18\x01 \x03(\x0b\x32%.proto_types.CacheDumpGraph.DumpEntry\x12\x14\n\x0c\x62\x65\x66ore_after\x18\x02 \x01(\t\x12\x0e\n\x06\x61\x63tion\x18\x03 \x01(\t\x1a+\n\tDumpEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"q\n\x11\x43\x61\x63heDumpGraphMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.CacheDumpGraph\"B\n\x11\x41\x64\x61pterRegistered\x12\x14\n\x0c\x61\x64\x61pter_name\x18\x01 \x01(\t\x12\x17\n\x0f\x61\x64\x61pter_version\x18\x02 \x01(\t\"w\n\x14\x41\x64\x61pterRegisteredMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.AdapterRegistered\"!\n\x12\x41\x64\x61pterImportError\x12\x0b\n\x03\x65xc\x18\x01 \x01(\t\"y\n\x15\x41\x64\x61pterImportErrorMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.AdapterImportError\"#\n\x0fPluginLoadError\x12\x10\n\x08\x65xc_info\x18\x01 \x01(\t\"s\n\x12PluginLoadErrorMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.PluginLoadError\"a\n\x14NewConnectionOpening\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x18\n\x10\x63onnection_state\x18\x02 \x01(\t\"}\n\x17NewConnectionOpeningMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.NewConnectionOpening\"8\n\rCodeExecution\x12\x11\n\tconn_name\x18\x01 \x01(\t\x12\x14\n\x0c\x63ode_content\x18\x02 \x01(\t\"o\n\x10\x43odeExecutionMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.CodeExecution\"6\n\x13\x43odeExecutionStatus\x12\x0e\n\x06status\x18\x01 \x01(\t\x12\x0f\n\x07\x65lapsed\x18\x02 \x01(\x02\"{\n\x16\x43odeExecutionStatusMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.CodeExecutionStatus\"%\n\x16\x43\x61talogGenerationError\x12\x0b\n\x03\x65xc\x18\x01 \x01(\t\"\x81\x01\n\x19\x43\x61talogGenerationErrorMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x31\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32#.proto_types.CatalogGenerationError\"-\n\x13WriteCatalogFailure\x12\x16\n\x0enum_exceptions\x18\x01 \x01(\x05\"{\n\x16WriteCatalogFailureMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.WriteCatalogFailure\"\x1e\n\x0e\x43\x61talogWritten\x12\x0c\n\x04path\x18\x01 \x01(\t\"q\n\x11\x43\x61talogWrittenMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.CatalogWritten\"\x14\n\x12\x43\x61nnotGenerateDocs\"y\n\x15\x43\x61nnotGenerateDocsMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.CannotGenerateDocs\"\x11\n\x0f\x42uildingCatalog\"s\n\x12\x42uildingCatalogMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.BuildingCatalog\"-\n\x18\x44\x61tabaseErrorRunningHook\x12\x11\n\thook_type\x18\x01 \x01(\t\"\x85\x01\n\x1b\x44\x61tabaseErrorRunningHookMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x33\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32%.proto_types.DatabaseErrorRunningHook\"4\n\x0cHooksRunning\x12\x11\n\tnum_hooks\x18\x01 \x01(\x05\x12\x11\n\thook_type\x18\x02 \x01(\t\"m\n\x0fHooksRunningMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\'\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x19.proto_types.HooksRunning\"T\n\x14\x46inishedRunningStats\x12\x11\n\tstat_line\x18\x01 \x01(\t\x12\x11\n\texecution\x18\x02 \x01(\t\x12\x16\n\x0e\x65xecution_time\x18\x03 \x01(\x02\"}\n\x17\x46inishedRunningStatsMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.FinishedRunningStats\"<\n\x15\x43onstraintNotEnforced\x12\x12\n\nconstraint\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x64\x61pter\x18\x02 \x01(\t\"\x7f\n\x18\x43onstraintNotEnforcedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x30\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\".proto_types.ConstraintNotEnforced\"=\n\x16\x43onstraintNotSupported\x12\x12\n\nconstraint\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x64\x61pter\x18\x02 \x01(\t\"\x81\x01\n\x19\x43onstraintNotSupportedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x31\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32#.proto_types.ConstraintNotSupportedb\x06proto3')
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x13\x61\x64\x61pter_types.proto\x12\x0bproto_types\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/protobuf/struct.proto\"\xab\x02\n\x16\x41\x64\x61pterCommonEventInfo\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04\x63ode\x18\x02 \x01(\t\x12\x0b\n\x03msg\x18\x03 \x01(\t\x12\r\n\x05level\x18\x04 \x01(\t\x12\x15\n\rinvocation_id\x18\x05 \x01(\t\x12\x0b\n\x03pid\x18\x06 \x01(\x05\x12\x0e\n\x06thread\x18\x07 \x01(\t\x12&\n\x02ts\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12=\n\x05\x65xtra\x18\t \x03(\x0b\x32..proto_types.AdapterCommonEventInfo.ExtraEntry\x12\x10\n\x08\x63\x61tegory\x18\n \x01(\t\x1a,\n\nExtraEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"]\n\x13\x41\x64\x61pterNodeRelation\x12\x10\n\x08\x64\x61tabase\x18\n \x01(\t\x12\x0e\n\x06schema\x18\x0b \x01(\t\x12\r\n\x05\x61lias\x18\x0c \x01(\t\x12\x15\n\rrelation_name\x18\r \x01(\t\"\x9f\x02\n\x0f\x41\x64\x61pterNodeInfo\x12\x11\n\tnode_path\x18\x01 \x01(\t\x12\x11\n\tnode_name\x18\x02 \x01(\t\x12\x11\n\tunique_id\x18\x03 \x01(\t\x12\x15\n\rresource_type\x18\x04 \x01(\t\x12\x14\n\x0cmaterialized\x18\x05 \x01(\t\x12\x13\n\x0bnode_status\x18\x06 \x01(\t\x12\x17\n\x0fnode_started_at\x18\x07 \x01(\t\x12\x18\n\x10node_finished_at\x18\x08 \x01(\t\x12%\n\x04meta\x18\t \x01(\x0b\x32\x17.google.protobuf.Struct\x12\x37\n\rnode_relation\x18\n \x01(\x0b\x32 .proto_types.AdapterNodeRelation\"G\n\x0fReferenceKeyMsg\x12\x10\n\x08\x64\x61tabase\x18\x01 \x01(\t\x12\x0e\n\x06schema\x18\x02 \x01(\t\x12\x12\n\nidentifier\x18\x03 \x01(\t\"?\n\x19\x41\x64\x61pterDeprecationWarning\x12\x10\n\x08old_name\x18\x01 \x01(\t\x12\x10\n\x08new_name\x18\x02 \x01(\t\"\x87\x01\n\x1c\x41\x64\x61pterDeprecationWarningMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x34\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32&.proto_types.AdapterDeprecationWarning\"!\n\x1f\x43ollectFreshnessReturnSignature\"\x93\x01\n\"CollectFreshnessReturnSignatureMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12:\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32,.proto_types.CollectFreshnessReturnSignature\"\x8e\x01\n\x11\x41\x64\x61pterEventDebug\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x62\x61se_msg\x18\x03 \x01(\t\x12(\n\x04\x61rgs\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.ListValue\"w\n\x14\x41\x64\x61pterEventDebugMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.AdapterEventDebug\"\x8d\x01\n\x10\x41\x64\x61pterEventInfo\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x62\x61se_msg\x18\x03 \x01(\t\x12(\n\x04\x61rgs\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.ListValue\"u\n\x13\x41\x64\x61pterEventInfoMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.AdapterEventInfo\"\x90\x01\n\x13\x41\x64\x61pterEventWarning\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x62\x61se_msg\x18\x03 \x01(\t\x12(\n\x04\x61rgs\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.ListValue\"{\n\x16\x41\x64\x61pterEventWarningMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.AdapterEventWarning\"\xa0\x01\n\x11\x41\x64\x61pterEventError\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x62\x61se_msg\x18\x03 \x01(\t\x12(\n\x04\x61rgs\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.ListValue\x12\x10\n\x08\x65xc_info\x18\x05 \x01(\t\"w\n\x14\x41\x64\x61pterEventErrorMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.AdapterEventError\"f\n\rNewConnection\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_type\x18\x02 \x01(\t\x12\x11\n\tconn_name\x18\x03 \x01(\t\"o\n\x10NewConnectionMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.NewConnection\"=\n\x10\x43onnectionReused\x12\x11\n\tconn_name\x18\x01 \x01(\t\x12\x16\n\x0eorig_conn_name\x18\x02 \x01(\t\"u\n\x13\x43onnectionReusedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.ConnectionReused\"0\n\x1b\x43onnectionLeftOpenInCleanup\x12\x11\n\tconn_name\x18\x01 \x01(\t\"\x8b\x01\n\x1e\x43onnectionLeftOpenInCleanupMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x36\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32(.proto_types.ConnectionLeftOpenInCleanup\".\n\x19\x43onnectionClosedInCleanup\x12\x11\n\tconn_name\x18\x01 \x01(\t\"\x87\x01\n\x1c\x43onnectionClosedInCleanupMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x34\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32&.proto_types.ConnectionClosedInCleanup\"f\n\x0eRollbackFailed\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\x12\x10\n\x08\x65xc_info\x18\x03 \x01(\t\"q\n\x11RollbackFailedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.RollbackFailed\"V\n\x10\x43onnectionClosed\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\"u\n\x13\x43onnectionClosedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.ConnectionClosed\"X\n\x12\x43onnectionLeftOpen\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\"y\n\x15\x43onnectionLeftOpenMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.ConnectionLeftOpen\"N\n\x08Rollback\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\"e\n\x0bRollbackMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12#\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x15.proto_types.Rollback\"@\n\tCacheMiss\x12\x11\n\tconn_name\x18\x01 \x01(\t\x12\x10\n\x08\x64\x61tabase\x18\x02 \x01(\t\x12\x0e\n\x06schema\x18\x03 \x01(\t\"g\n\x0c\x43\x61\x63heMissMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12$\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x16.proto_types.CacheMiss\"b\n\rListRelations\x12\x10\n\x08\x64\x61tabase\x18\x01 \x01(\t\x12\x0e\n\x06schema\x18\x02 \x01(\t\x12/\n\trelations\x18\x03 \x03(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"o\n\x10ListRelationsMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.ListRelations\"g\n\x0e\x43onnectionUsed\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_type\x18\x02 \x01(\t\x12\x11\n\tconn_name\x18\x03 \x01(\t\"q\n\x11\x43onnectionUsedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.ConnectionUsed\"[\n\x08SQLQuery\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\x12\x0b\n\x03sql\x18\x03 \x01(\t\"e\n\x0bSQLQueryMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12#\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x15.proto_types.SQLQuery\"b\n\x0eSQLQueryStatus\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0e\n\x06status\x18\x02 \x01(\t\x12\x0f\n\x07\x65lapsed\x18\x03 \x01(\x02\"q\n\x11SQLQueryStatusMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.SQLQueryStatus\"O\n\tSQLCommit\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\"g\n\x0cSQLCommitMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12$\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x16.proto_types.SQLCommit\"a\n\rColTypeChange\x12\x11\n\torig_type\x18\x01 \x01(\t\x12\x10\n\x08new_type\x18\x02 \x01(\t\x12+\n\x05table\x18\x03 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"o\n\x10\x43olTypeChangeMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.ColTypeChange\"@\n\x0eSchemaCreation\x12.\n\x08relation\x18\x01 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"q\n\x11SchemaCreationMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.SchemaCreation\"<\n\nSchemaDrop\x12.\n\x08relation\x18\x01 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"i\n\rSchemaDropMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12%\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x17.proto_types.SchemaDrop\"\xde\x01\n\x0b\x43\x61\x63heAction\x12\x0e\n\x06\x61\x63tion\x18\x01 \x01(\t\x12-\n\x07ref_key\x18\x02 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\x12/\n\tref_key_2\x18\x03 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\x12/\n\tref_key_3\x18\x04 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\x12.\n\x08ref_list\x18\x05 \x03(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"k\n\x0e\x43\x61\x63heActionMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12&\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x18.proto_types.CacheAction\"\x98\x01\n\x0e\x43\x61\x63heDumpGraph\x12\x33\n\x04\x64ump\x18\x01 \x03(\x0b\x32%.proto_types.CacheDumpGraph.DumpEntry\x12\x14\n\x0c\x62\x65\x66ore_after\x18\x02 \x01(\t\x12\x0e\n\x06\x61\x63tion\x18\x03 \x01(\t\x1a+\n\tDumpEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"q\n\x11\x43\x61\x63heDumpGraphMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.CacheDumpGraph\"B\n\x11\x41\x64\x61pterRegistered\x12\x14\n\x0c\x61\x64\x61pter_name\x18\x01 \x01(\t\x12\x17\n\x0f\x61\x64\x61pter_version\x18\x02 \x01(\t\"w\n\x14\x41\x64\x61pterRegisteredMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.AdapterRegistered\"!\n\x12\x41\x64\x61pterImportError\x12\x0b\n\x03\x65xc\x18\x01 \x01(\t\"y\n\x15\x41\x64\x61pterImportErrorMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.AdapterImportError\"#\n\x0fPluginLoadError\x12\x10\n\x08\x65xc_info\x18\x01 \x01(\t\"s\n\x12PluginLoadErrorMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.PluginLoadError\"a\n\x14NewConnectionOpening\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x18\n\x10\x63onnection_state\x18\x02 \x01(\t\"}\n\x17NewConnectionOpeningMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.NewConnectionOpening\"8\n\rCodeExecution\x12\x11\n\tconn_name\x18\x01 \x01(\t\x12\x14\n\x0c\x63ode_content\x18\x02 \x01(\t\"o\n\x10\x43odeExecutionMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.CodeExecution\"6\n\x13\x43odeExecutionStatus\x12\x0e\n\x06status\x18\x01 \x01(\t\x12\x0f\n\x07\x65lapsed\x18\x02 \x01(\x02\"{\n\x16\x43odeExecutionStatusMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.CodeExecutionStatus\"%\n\x16\x43\x61talogGenerationError\x12\x0b\n\x03\x65xc\x18\x01 \x01(\t\"\x81\x01\n\x19\x43\x61talogGenerationErrorMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x31\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32#.proto_types.CatalogGenerationError\"-\n\x13WriteCatalogFailure\x12\x16\n\x0enum_exceptions\x18\x01 \x01(\x05\"{\n\x16WriteCatalogFailureMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.WriteCatalogFailure\"\x1e\n\x0e\x43\x61talogWritten\x12\x0c\n\x04path\x18\x01 \x01(\t\"q\n\x11\x43\x61talogWrittenMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.CatalogWritten\"\x14\n\x12\x43\x61nnotGenerateDocs\"y\n\x15\x43\x61nnotGenerateDocsMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.CannotGenerateDocs\"\x11\n\x0f\x42uildingCatalog\"s\n\x12\x42uildingCatalogMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.BuildingCatalog\"-\n\x18\x44\x61tabaseErrorRunningHook\x12\x11\n\thook_type\x18\x01 \x01(\t\"\x85\x01\n\x1b\x44\x61tabaseErrorRunningHookMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x33\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32%.proto_types.DatabaseErrorRunningHook\"4\n\x0cHooksRunning\x12\x11\n\tnum_hooks\x18\x01 \x01(\x05\x12\x11\n\thook_type\x18\x02 \x01(\t\"m\n\x0fHooksRunningMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\'\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x19.proto_types.HooksRunning\"T\n\x14\x46inishedRunningStats\x12\x11\n\tstat_line\x18\x01 \x01(\t\x12\x11\n\texecution\x18\x02 \x01(\t\x12\x16\n\x0e\x65xecution_time\x18\x03 \x01(\x02\"}\n\x17\x46inishedRunningStatsMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.FinishedRunningStats\"<\n\x15\x43onstraintNotEnforced\x12\x12\n\nconstraint\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x64\x61pter\x18\x02 \x01(\t\"\x7f\n\x18\x43onstraintNotEnforcedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x30\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\".proto_types.ConstraintNotEnforced\"=\n\x16\x43onstraintNotSupported\x12\x12\n\nconstraint\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x64\x61pter\x18\x02 \x01(\t\"\x81\x01\n\x19\x43onstraintNotSupportedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x31\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32#.proto_types.ConstraintNotSupported\"%\n\x10TypeCodeNotFound\x12\x11\n\ttype_code\x18\x01 \x01(\x05\"u\n\x13TypeCodeNotFoundMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.TypeCodeNotFoundb\x06proto3')
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
@@ -203,4 +202,8 @@
_globals['_CONSTRAINTNOTSUPPORTED']._serialized_end=8961
_globals['_CONSTRAINTNOTSUPPORTEDMSG']._serialized_start=8964
_globals['_CONSTRAINTNOTSUPPORTEDMSG']._serialized_end=9093
+ _globals['_TYPECODENOTFOUND']._serialized_start=9095
+ _globals['_TYPECODENOTFOUND']._serialized_end=9132
+ _globals['_TYPECODENOTFOUNDMSG']._serialized_start=9134
+ _globals['_TYPECODENOTFOUNDMSG']._serialized_end=9251
# @@protoc_insertion_point(module_scope)
diff --git a/dbt/adapters/events/types.py b/dbt/adapters/events/types.py
index c49fef9b..47c48da6 100644
--- a/dbt/adapters/events/types.py
+++ b/dbt/adapters/events/types.py
@@ -190,7 +190,7 @@ def code(self) -> str:
return "E017"
def message(self) -> str:
- return f"SQL status: {self.status} in {self.elapsed} seconds"
+ return f"SQL status: {self.status} in {self.elapsed:.3f} seconds"
class SQLCommit(DebugLevel):
@@ -422,3 +422,16 @@ def message(self) -> str:
"be ignored. Set 'warn_unsupported: false' on this constraint to ignore this warning."
)
return line_wrap_message(warning_tag(msg))
+
+
+class TypeCodeNotFound(DebugLevel):
+ def code(self) -> str:
+ return "E050"
+
+ def message(self) -> str:
+ msg = (
+ f"The `type_code` {self.type_code} was not recognized, which may affect error "
+ "messages for enforced contracts that fail as well as `Column.data_type` values "
+ "returned by `get_column_schema_from_query`"
+ )
+ return line_wrap_message(warning_tag(msg))
diff --git a/dbt/adapters/factory.py b/dbt/adapters/factory.py
index e63d43e1..b1854f67 100644
--- a/dbt/adapters/factory.py
+++ b/dbt/adapters/factory.py
@@ -101,17 +101,14 @@ def register_adapter(
self,
config: AdapterRequiredConfig,
mp_context: SpawnContext,
- adapter_registered_log_level: Optional[EventLevel] = EventLevel.INFO
+ adapter_registered_log_level: Optional[EventLevel] = EventLevel.INFO,
) -> None:
adapter_name = config.credentials.type
adapter_type = self.get_adapter_class_by_name(adapter_name)
adapter_version = self._adapter_version(adapter_name)
fire_event(
- AdapterRegistered(
- adapter_name=adapter_name,
- adapter_version=adapter_version
- ),
- level=adapter_registered_log_level
+ AdapterRegistered(adapter_name=adapter_name, adapter_version=adapter_version),
+ level=adapter_registered_log_level,
)
with self.lock:
if adapter_name in self.adapters:
@@ -199,9 +196,9 @@ def get_adapter_constraint_support(self, name: Optional[str]) -> List[str]:
def register_adapter(
- config: AdapterRequiredConfig,
- mp_context: SpawnContext,
- adapter_registered_log_level: Optional[EventLevel] = EventLevel.INFO
+ config: AdapterRequiredConfig,
+ mp_context: SpawnContext,
+ adapter_registered_log_level: Optional[EventLevel] = EventLevel.INFO,
) -> None:
FACTORY.register_adapter(config, mp_context, adapter_registered_log_level)
diff --git a/dbt/adapters/protocol.py b/dbt/adapters/protocol.py
index bbfdd330..35219866 100644
--- a/dbt/adapters/protocol.py
+++ b/dbt/adapters/protocol.py
@@ -47,8 +47,7 @@ class ColumnProtocol(Protocol):
class RelationProtocol(Protocol):
@classmethod
- def get_default_quote_policy(cls) -> Policy:
- ...
+ def get_default_quote_policy(cls) -> Policy: ...
@classmethod
def create_from(
@@ -56,8 +55,7 @@ def create_from(
quoting: HasQuoting,
relation_config: RelationConfig,
**kwargs: Any,
- ) -> Self:
- ...
+ ) -> Self: ...
AdapterConfig_T = TypeVar("AdapterConfig_T", bound=AdapterConfig)
@@ -73,8 +71,7 @@ def __call__(
config: AdapterRequiredConfig,
macro_resolver: MacroResolverProtocol,
package_name: Optional[str],
- ) -> Dict[str, Any]:
- ...
+ ) -> Dict[str, Any]: ...
# TODO CT-211
@@ -96,81 +93,58 @@ class AdapterProtocol( # type: ignore[misc]
ConnectionManager: Type[ConnectionManager_T]
connections: ConnectionManager_T
- def __init__(self, config: AdapterRequiredConfig) -> None:
- ...
+ def __init__(self, config: AdapterRequiredConfig) -> None: ...
- def set_macro_resolver(self, macro_resolver: MacroResolverProtocol) -> None:
- ...
+ def set_macro_resolver(self, macro_resolver: MacroResolverProtocol) -> None: ...
- def get_macro_resolver(self) -> Optional[MacroResolverProtocol]:
- ...
+ def get_macro_resolver(self) -> Optional[MacroResolverProtocol]: ...
- def clear_macro_resolver(self) -> None:
- ...
+ def clear_macro_resolver(self) -> None: ...
def set_macro_context_generator(
self,
macro_context_generator: MacroContextGeneratorCallable,
- ) -> None:
- ...
+ ) -> None: ...
@classmethod
def type(cls) -> str:
pass
- def set_query_header(self, query_header_context: Dict[str, Any]) -> None:
- ...
+ def set_query_header(self, query_header_context: Dict[str, Any]) -> None: ...
@staticmethod
- def get_thread_identifier() -> Hashable:
- ...
+ def get_thread_identifier() -> Hashable: ...
- def get_thread_connection(self) -> Connection:
- ...
+ def get_thread_connection(self) -> Connection: ...
- def set_thread_connection(self, conn: Connection) -> None:
- ...
+ def set_thread_connection(self, conn: Connection) -> None: ...
- def get_if_exists(self) -> Optional[Connection]:
- ...
+ def get_if_exists(self) -> Optional[Connection]: ...
- def clear_thread_connection(self) -> None:
- ...
+ def clear_thread_connection(self) -> None: ...
- def clear_transaction(self) -> None:
- ...
+ def clear_transaction(self) -> None: ...
- def exception_handler(self, sql: str) -> ContextManager:
- ...
+ def exception_handler(self, sql: str) -> ContextManager: ...
- def set_connection_name(self, name: Optional[str] = None) -> Connection:
- ...
+ def set_connection_name(self, name: Optional[str] = None) -> Connection: ...
- def cancel_open(self) -> Optional[List[str]]:
- ...
+ def cancel_open(self) -> Optional[List[str]]: ...
- def open(cls, connection: Connection) -> Connection:
- ...
+ def open(cls, connection: Connection) -> Connection: ...
- def release(self) -> None:
- ...
+ def release(self) -> None: ...
- def cleanup_all(self) -> None:
- ...
+ def cleanup_all(self) -> None: ...
- def begin(self) -> None:
- ...
+ def begin(self) -> None: ...
- def commit(self) -> None:
- ...
+ def commit(self) -> None: ...
- def close(cls, connection: Connection) -> Connection:
- ...
+ def close(cls, connection: Connection) -> Connection: ...
- def commit_if_has_connection(self) -> None:
- ...
+ def commit_if_has_connection(self) -> None: ...
def execute(
self, sql: str, auto_begin: bool = False, fetch: bool = False
- ) -> Tuple[AdapterResponse, "agate.Table"]:
- ...
+ ) -> Tuple[AdapterResponse, "agate.Table"]: ...
diff --git a/dbt/adapters/record/__init__.py b/dbt/adapters/record/__init__.py
new file mode 100644
index 00000000..afde4a01
--- /dev/null
+++ b/dbt/adapters/record/__init__.py
@@ -0,0 +1,2 @@
+from dbt.adapters.record.handle import RecordReplayHandle
+from dbt.adapters.record.cursor.cursor import RecordReplayCursor
diff --git a/dbt/adapters/record/cursor/cursor.py b/dbt/adapters/record/cursor/cursor.py
new file mode 100644
index 00000000..577178db
--- /dev/null
+++ b/dbt/adapters/record/cursor/cursor.py
@@ -0,0 +1,54 @@
+from typing import Any, Optional
+
+from dbt_common.record import record_function
+
+from dbt.adapters.contracts.connection import Connection
+from dbt.adapters.record.cursor.description import CursorGetDescriptionRecord
+from dbt.adapters.record.cursor.execute import CursorExecuteRecord
+from dbt.adapters.record.cursor.fetchone import CursorFetchOneRecord
+from dbt.adapters.record.cursor.fetchmany import CursorFetchManyRecord
+from dbt.adapters.record.cursor.fetchall import CursorFetchAllRecord
+from dbt.adapters.record.cursor.rowcount import CursorGetRowCountRecord
+
+
+class RecordReplayCursor:
+ """A proxy object used to wrap native database cursors under record/replay
+ modes. In record mode, this proxy notes the parameters and return values
+ of the methods and properties it implements, which closely match the Python
+ DB API 2.0 cursor methods used by many dbt adapters to interact with the
+ database or DWH. In replay mode, it mocks out those calls using previously
+ recorded calls, so that no interaction with a database actually occurs."""
+
+ def __init__(self, native_cursor: Any, connection: Connection) -> None:
+ self.native_cursor = native_cursor
+ self.connection = connection
+
+ @record_function(CursorExecuteRecord, method=True, id_field_name="connection_name")
+ def execute(self, operation, parameters=None) -> None:
+ self.native_cursor.execute(operation, parameters)
+
+ @record_function(CursorFetchOneRecord, method=True, id_field_name="connection_name")
+ def fetchone(self) -> Any:
+ return self.native_cursor.fetchone()
+
+ @record_function(CursorFetchManyRecord, method=True, id_field_name="connection_name")
+ def fetchmany(self, size: int) -> Any:
+ return self.native_cursor.fetchmany(size)
+
+ @record_function(CursorFetchAllRecord, method=True, id_field_name="connection_name")
+ def fetchall(self) -> Any:
+ return self.native_cursor.fetchall()
+
+ @property
+ def connection_name(self) -> Optional[str]:
+ return self.connection.name
+
+ @property
+ @record_function(CursorGetRowCountRecord, method=True, id_field_name="connection_name")
+ def rowcount(self) -> int:
+ return self.native_cursor.rowcount
+
+ @property
+ @record_function(CursorGetDescriptionRecord, method=True, id_field_name="connection_name")
+ def description(self) -> str:
+ return self.native_cursor.description
diff --git a/dbt/adapters/record/cursor/description.py b/dbt/adapters/record/cursor/description.py
new file mode 100644
index 00000000..d6ba15d9
--- /dev/null
+++ b/dbt/adapters/record/cursor/description.py
@@ -0,0 +1,37 @@
+import dataclasses
+from typing import Any, Iterable, Mapping
+
+from dbt_common.record import Record, Recorder
+
+
+@dataclasses.dataclass
+class CursorGetDescriptionParams:
+ connection_name: str
+
+
+@dataclasses.dataclass
+class CursorGetDescriptionResult:
+ columns: Iterable[Any]
+
+ def _to_dict(self) -> Any:
+ column_dicts = []
+ for c in self.columns:
+ # This captures the mandatory column information, but we might need
+ # more for some adapters.
+ # See https://peps.python.org/pep-0249/#description
+ column_dicts.append((c[0], c[1]))
+
+ return {"columns": column_dicts}
+
+ @classmethod
+ def _from_dict(cls, dct: Mapping) -> "CursorGetDescriptionResult":
+ return CursorGetDescriptionResult(columns=dct["columns"])
+
+
+@Recorder.register_record_type
+class CursorGetDescriptionRecord(Record):
+ """Implements record/replay support for the cursor.description property."""
+
+ params_cls = CursorGetDescriptionParams
+ result_cls = CursorGetDescriptionResult
+ group = "Database"
diff --git a/dbt/adapters/record/cursor/execute.py b/dbt/adapters/record/cursor/execute.py
new file mode 100644
index 00000000..e7e69859
--- /dev/null
+++ b/dbt/adapters/record/cursor/execute.py
@@ -0,0 +1,20 @@
+import dataclasses
+from typing import Any, Iterable, Union, Mapping
+
+from dbt_common.record import Record, Recorder
+
+
+@dataclasses.dataclass
+class CursorExecuteParams:
+ connection_name: str
+ operation: str
+ parameters: Union[Iterable[Any], Mapping[str, Any]]
+
+
+@Recorder.register_record_type
+class CursorExecuteRecord(Record):
+ """Implements record/replay support for the cursor.execute() method."""
+
+ params_cls = CursorExecuteParams
+ result_cls = None
+ group = "Database"
diff --git a/dbt/adapters/record/cursor/fetchall.py b/dbt/adapters/record/cursor/fetchall.py
new file mode 100644
index 00000000..090cc160
--- /dev/null
+++ b/dbt/adapters/record/cursor/fetchall.py
@@ -0,0 +1,66 @@
+import dataclasses
+import datetime
+from typing import Any, Dict, List, Mapping
+
+from dbt_common.record import Record, Recorder
+
+
+@dataclasses.dataclass
+class CursorFetchAllParams:
+ connection_name: str
+
+
+@dataclasses.dataclass
+class CursorFetchAllResult:
+ results: List[Any]
+
+ def _to_dict(self) -> Dict[str, Any]:
+ processed_results = []
+ for result in self.results:
+ result = tuple(map(self._process_value, result))
+ processed_results.append(result)
+
+ return {"results": processed_results}
+
+ @classmethod
+ def _from_dict(cls, dct: Mapping) -> "CursorFetchAllResult":
+ unprocessed_results = []
+ for result in dct["results"]:
+ result = tuple(map(cls._unprocess_value, result))
+ unprocessed_results.append(result)
+
+ return CursorFetchAllResult(unprocessed_results)
+
+ @classmethod
+ def _process_value(cls, value: Any) -> Any:
+ if type(value) is datetime.date:
+ return {"type": "date", "value": value.isoformat()}
+ elif type(value) is datetime.datetime:
+ return {"type": "datetime", "value": value.isoformat()}
+ else:
+ return value
+
+ @classmethod
+ def _unprocess_value(cls, value: Any) -> Any:
+ if type(value) is dict:
+ value_type = value.get("type")
+ if value_type == "date":
+ date_string = value.get("value")
+ assert isinstance(date_string, str)
+ return datetime.date.fromisoformat(date_string)
+ elif value_type == "datetime":
+ date_string = value.get("value")
+ assert isinstance(date_string, str)
+ return datetime.datetime.fromisoformat(date_string)
+ return value
+ else:
+ return value
+
+
+@Recorder.register_record_type
+class CursorFetchAllRecord(Record):
+ """Implements record/replay support for the cursor.fetchall() method."""
+
+ params_cls = CursorFetchAllParams
+ result_cls = CursorFetchAllResult
+ group = "Database"
diff --git a/dbt/adapters/record/cursor/fetchmany.py b/dbt/adapters/record/cursor/fetchmany.py
new file mode 100644
index 00000000..86f15440
--- /dev/null
+++ b/dbt/adapters/record/cursor/fetchmany.py
@@ -0,0 +1,23 @@
+import dataclasses
+from typing import Any, List
+
+from dbt_common.record import Record, Recorder
+
+
+@dataclasses.dataclass
+class CursorFetchManyParams:
+ connection_name: str
+
+
+@dataclasses.dataclass
+class CursorFetchManyResult:
+ results: List[Any]
+
+
+@Recorder.register_record_type
+class CursorFetchManyRecord(Record):
+ """Implements record/replay support for the cursor.fetchmany() method."""
+
+ params_cls = CursorFetchManyParams
+ result_cls = CursorFetchManyResult
+ group = "Database"
diff --git a/dbt/adapters/record/cursor/fetchone.py b/dbt/adapters/record/cursor/fetchone.py
new file mode 100644
index 00000000..42ffe210
--- /dev/null
+++ b/dbt/adapters/record/cursor/fetchone.py
@@ -0,0 +1,23 @@
+import dataclasses
+from typing import Any
+
+from dbt_common.record import Record, Recorder
+
+
+@dataclasses.dataclass
+class CursorFetchOneParams:
+ connection_name: str
+
+
+@dataclasses.dataclass
+class CursorFetchOneResult:
+ result: Any
+
+
+@Recorder.register_record_type
+class CursorFetchOneRecord(Record):
+ """Implements record/replay support for the cursor.fetchone() method."""
+
+ params_cls = CursorFetchOneParams
+ result_cls = CursorFetchOneResult
+ group = "Database"
diff --git a/dbt/adapters/record/cursor/rowcount.py b/dbt/adapters/record/cursor/rowcount.py
new file mode 100644
index 00000000..c024817e
--- /dev/null
+++ b/dbt/adapters/record/cursor/rowcount.py
@@ -0,0 +1,23 @@
+import dataclasses
+from typing import Optional
+
+from dbt_common.record import Record, Recorder
+
+
+@dataclasses.dataclass
+class CursorGetRowCountParams:
+ connection_name: str
+
+
+@dataclasses.dataclass
+class CursorGetRowCountResult:
+ rowcount: Optional[int]
+
+
+@Recorder.register_record_type
+class CursorGetRowCountRecord(Record):
+ """Implements record/replay support for the cursor.rowcount property."""
+
+ params_cls = CursorGetRowCountParams
+ result_cls = CursorGetRowCountResult
+ group = "Database"
diff --git a/dbt/adapters/record/handle.py b/dbt/adapters/record/handle.py
new file mode 100644
index 00000000..31817c37
--- /dev/null
+++ b/dbt/adapters/record/handle.py
@@ -0,0 +1,24 @@
+from typing import Any
+
+from dbt.adapters.contracts.connection import Connection
+
+from dbt.adapters.record.cursor.cursor import RecordReplayCursor
+
+
+class RecordReplayHandle:
+ """A proxy object used for record/replay modes. What adapters call a
+ 'handle' is typically a native database connection, but should not be
+ confused with the Connection protocol, which is a dbt-adapters concept.
+
+ Currently, the only function of the handle proxy is to provide a record/replay
+ aware cursor object when cursor() is called."""
+
+ def __init__(self, native_handle: Any, connection: Connection) -> None:
+ self.native_handle = native_handle
+ self.connection = connection
+
+ def cursor(self) -> Any:
+ # The native handle could be None if we are in replay mode, because no
+ # actual database access should be performed in that mode.
+ cursor = None if self.native_handle is None else self.native_handle.cursor()
+ return RecordReplayCursor(cursor, self.connection)
diff --git a/dbt/adapters/relation_configs/README.md b/dbt/adapters/relation_configs/README.md
index 6be3bc59..22d6bf78 100644
--- a/dbt/adapters/relation_configs/README.md
+++ b/dbt/adapters/relation_configs/README.md
@@ -1,6 +1,6 @@
# RelationConfig
This package serves as an initial abstraction for managing the inspection of existing relations and determining
-changes on those relations. It arose from the materialized view work and is currently only supporting
+changes on those relations. It arose from the materialized view work and is currently only supporting
materialized views for Postgres and Redshift as well as dynamic tables for Snowflake. There are three main
classes in this package.
diff --git a/dbt/adapters/relation_configs/config_change.py b/dbt/adapters/relation_configs/config_change.py
index 9d3c8e01..a776dc6b 100644
--- a/dbt/adapters/relation_configs/config_change.py
+++ b/dbt/adapters/relation_configs/config_change.py
@@ -16,7 +16,9 @@ class RelationConfigChangeAction(StrEnum):
@dataclass(frozen=True, eq=True, unsafe_hash=True)
class RelationConfigChange(RelationConfigBase, ABC):
action: RelationConfigChangeAction
- context: Hashable # this is usually a RelationConfig, e.g. IndexConfig, but shouldn't be limited
+ context: (
+ Hashable # this is usually a RelationConfig, e.g. IndexConfig, but shouldn't be limited
+ )
@property
@abstractmethod
diff --git a/dbt/adapters/sql/connections.py b/dbt/adapters/sql/connections.py
index 78cd3c9b..4d450c88 100644
--- a/dbt/adapters/sql/connections.py
+++ b/dbt/adapters/sql/connections.py
@@ -1,6 +1,6 @@
import abc
import time
-from typing import Any, Dict, Iterable, List, Optional, Tuple, TYPE_CHECKING
+from typing import Any, Dict, Iterable, Iterator, List, Optional, Tuple, TYPE_CHECKING
from dbt_common.events.contextvars import get_node_info
from dbt_common.events.functions import fire_event
@@ -86,7 +86,8 @@ def add_query(
node_info=get_node_info(),
)
)
- pre = time.time()
+
+ pre = time.perf_counter()
cursor = connection.handle.cursor()
cursor.execute(sql, bindings)
@@ -94,7 +95,7 @@ def add_query(
fire_event(
SQLQueryStatus(
status=str(self.get_response(cursor)),
- elapsed=round((time.time() - pre)),
+ elapsed=time.perf_counter() - pre,
node_info=get_node_info(),
)
)
@@ -110,27 +111,24 @@ def get_response(cls, cursor: Any) -> AdapterResponse:
@classmethod
def process_results(
cls, column_names: Iterable[str], rows: Iterable[Any]
- ) -> List[Dict[str, Any]]:
- # TODO CT-211
+ ) -> Iterator[Dict[str, Any]]:
unique_col_names = dict() # type: ignore[var-annotated]
- # TODO CT-211
for idx in range(len(column_names)): # type: ignore[arg-type]
- # TODO CT-211
col_name = column_names[idx] # type: ignore[index]
if col_name in unique_col_names:
unique_col_names[col_name] += 1
- # TODO CT-211
column_names[idx] = f"{col_name}_{unique_col_names[col_name]}" # type: ignore[index] # noqa
else:
- # TODO CT-211
unique_col_names[column_names[idx]] = 1 # type: ignore[index]
- return [dict(zip(column_names, row)) for row in rows]
+
+ for row in rows:
+ yield dict(zip(column_names, row))
@classmethod
def get_result_from_cursor(cls, cursor: Any, limit: Optional[int]) -> "agate.Table":
from dbt_common.clients.agate_helper import table_from_data_flat
- data: List[Any] = []
+ data: Iterable[Any] = []
column_names: List[str] = []
if cursor.description is not None:
diff --git a/dbt/adapters/sql/impl.py b/dbt/adapters/sql/impl.py
index 8c6e0e8e..8a8473f2 100644
--- a/dbt/adapters/sql/impl.py
+++ b/dbt/adapters/sql/impl.py
@@ -9,7 +9,6 @@
from dbt.adapters.exceptions import RelationTypeNullError
from dbt.adapters.sql.connections import SQLConnectionManager
-
LIST_RELATIONS_MACRO_NAME = "list_relations_without_caching"
GET_COLUMNS_IN_RELATION_MACRO_NAME = "get_columns_in_relation"
LIST_SCHEMAS_MACRO_NAME = "list_schemas"
@@ -41,6 +40,7 @@ class SQLAdapter(BaseAdapter):
- get_catalog
- list_relations_without_caching
- get_columns_in_relation
+ - get_catalog_for_single_relation
"""
ConnectionManager: Type[SQLConnectionManager]
diff --git a/dbt/include/global_project/macros/adapters/apply_grants.sql b/dbt/include/global_project/macros/adapters/apply_grants.sql
index 10906e7f..c75eef89 100644
--- a/dbt/include/global_project/macros/adapters/apply_grants.sql
+++ b/dbt/include/global_project/macros/adapters/apply_grants.sql
@@ -61,7 +61,7 @@
{% endmacro %}
{% macro default__get_show_grant_sql(relation) %}
- show grants on {{ relation }}
+ show grants on {{ relation.render() }}
{% endmacro %}
@@ -70,7 +70,7 @@
{% endmacro %}
{%- macro default__get_grant_sql(relation, privilege, grantees) -%}
- grant {{ privilege }} on {{ relation }} to {{ grantees | join(', ') }}
+ grant {{ privilege }} on {{ relation.render() }} to {{ grantees | join(', ') }}
{%- endmacro -%}
@@ -79,7 +79,7 @@
{% endmacro %}
{%- macro default__get_revoke_sql(relation, privilege, grantees) -%}
- revoke {{ privilege }} on {{ relation }} from {{ grantees | join(', ') }}
+ revoke {{ privilege }} on {{ relation.render() }} from {{ grantees | join(', ') }}
{%- endmacro -%}
@@ -147,7 +147,7 @@
{% set needs_granting = diff_of_two_dicts(grant_config, current_grants_dict) %}
{% set needs_revoking = diff_of_two_dicts(current_grants_dict, grant_config) %}
{% if not (needs_granting or needs_revoking) %}
- {{ log('On ' ~ relation ~': All grants are in place, no revocation or granting needed.')}}
+ {{ log('On ' ~ relation.render() ~': All grants are in place, no revocation or granting needed.')}}
{% endif %}
{% else %}
{#-- We don't think there's any chance of previous grants having carried over. --#}
diff --git a/dbt/include/global_project/macros/adapters/columns.sql b/dbt/include/global_project/macros/adapters/columns.sql
index e1099649..96e6f3f2 100644
--- a/dbt/include/global_project/macros/adapters/columns.sql
+++ b/dbt/include/global_project/macros/adapters/columns.sql
@@ -53,7 +53,7 @@
{%- do col_naked_numeric.append(col['name']) -%}
{%- endif -%}
{% set col_name = adapter.quote(col['name']) if col.get('quote') else col['name'] %}
- cast(null as {{ col['data_type'] }}) as {{ col_name }}{{ ", " if not loop.last }}
+ {{ cast('null', col['data_type']) }} as {{ col_name }}{{ ", " if not loop.last }}
{%- endfor -%}
{%- if (col_err | length) > 0 -%}
{{ exceptions.column_type_missing(column_names=col_err) }}
@@ -96,10 +96,10 @@
{%- set tmp_column = column_name + "__dbt_alter" -%}
{% call statement('alter_column_type') %}
- alter table {{ relation }} add column {{ adapter.quote(tmp_column) }} {{ new_column_type }};
- update {{ relation }} set {{ adapter.quote(tmp_column) }} = {{ adapter.quote(column_name) }};
- alter table {{ relation }} drop column {{ adapter.quote(column_name) }} cascade;
- alter table {{ relation }} rename column {{ adapter.quote(tmp_column) }} to {{ adapter.quote(column_name) }}
+ alter table {{ relation.render() }} add column {{ adapter.quote(tmp_column) }} {{ new_column_type }};
+ update {{ relation.render() }} set {{ adapter.quote(tmp_column) }} = {{ adapter.quote(column_name) }};
+ alter table {{ relation.render() }} drop column {{ adapter.quote(column_name) }} cascade;
+ alter table {{ relation.render() }} rename column {{ adapter.quote(tmp_column) }} to {{ adapter.quote(column_name) }}
{% endcall %}
{% endmacro %}
@@ -120,7 +120,7 @@
{% set sql -%}
- alter {{ relation.type }} {{ relation }}
+ alter {{ relation.type }} {{ relation.render() }}
{% for column in add_columns %}
add column {{ column.name }} {{ column.data_type }}{{ ',' if not loop.last }}
diff --git a/dbt/include/global_project/macros/adapters/metadata.sql b/dbt/include/global_project/macros/adapters/metadata.sql
index c8e8a414..0aa7aabb 100644
--- a/dbt/include/global_project/macros/adapters/metadata.sql
+++ b/dbt/include/global_project/macros/adapters/metadata.sql
@@ -77,6 +77,15 @@
'list_relations_without_caching macro not implemented for adapter '+adapter.type()) }}
{% endmacro %}
+{% macro get_catalog_for_single_relation(relation) %}
+ {{ return(adapter.dispatch('get_catalog_for_single_relation', 'dbt')(relation)) }}
+{% endmacro %}
+
+{% macro default__get_catalog_for_single_relation(relation) %}
+ {{ exceptions.raise_not_implemented(
+ 'get_catalog_for_single_relation macro not implemented for adapter '+adapter.type()) }}
+{% endmacro %}
+
{% macro get_relations() %}
{{ return(adapter.dispatch('get_relations', 'dbt')()) }}
{% endmacro %}
diff --git a/dbt/include/global_project/macros/adapters/relation.sql b/dbt/include/global_project/macros/adapters/relation.sql
index 1c2bd880..b9af4969 100644
--- a/dbt/include/global_project/macros/adapters/relation.sql
+++ b/dbt/include/global_project/macros/adapters/relation.sql
@@ -38,7 +38,7 @@
{% macro default__truncate_relation(relation) -%}
{% call statement('truncate_relation') -%}
- truncate table {{ relation }}
+ truncate table {{ relation.render() }}
{%- endcall %}
{% endmacro %}
diff --git a/dbt/include/global_project/macros/adapters/show.sql b/dbt/include/global_project/macros/adapters/show.sql
index 33a93f3d..3a5faa98 100644
--- a/dbt/include/global_project/macros/adapters/show.sql
+++ b/dbt/include/global_project/macros/adapters/show.sql
@@ -1,22 +1,26 @@
+{#
+ We expect a syntax error if dbt show is invoked both with a --limit flag to show
+ and with a limit predicate embedded in its inline query. No special handling is
+ provided out-of-box.
+#}
{% macro get_show_sql(compiled_code, sql_header, limit) -%}
- {%- if sql_header -%}
+ {%- if sql_header is not none -%}
{{ sql_header }}
- {%- endif -%}
- {%- if limit is not none -%}
+ {%- endif %}
{{ get_limit_subquery_sql(compiled_code, limit) }}
- {%- else -%}
- {{ compiled_code }}
- {%- endif -%}
{% endmacro %}
-{% macro get_limit_subquery_sql(sql, limit) %}
- {{ adapter.dispatch('get_limit_subquery_sql', 'dbt')(sql, limit) }}
-{% endmacro %}
+{#
+ Not necessarily a true subquery anymore. Now, merely a query subordinate
+ to the calling macro.
+#}
+{%- macro get_limit_subquery_sql(sql, limit) -%}
+ {{ adapter.dispatch('get_limit_sql', 'dbt')(sql, limit) }}
+{%- endmacro -%}
-{% macro default__get_limit_subquery_sql(sql, limit) %}
- select *
- from (
- {{ sql }}
- ) as model_limit_subq
- limit {{ limit }}
+{% macro default__get_limit_sql(sql, limit) %}
+ {{ compiled_code }}
+ {% if limit is not none %}
+ limit {{ limit }}
+ {%- endif -%}
{% endmacro %}
diff --git a/dbt/include/global_project/macros/materializations/models/clone/clone.sql b/dbt/include/global_project/macros/materializations/models/clone/clone.sql
index 01c8c393..56d80082 100644
--- a/dbt/include/global_project/macros/materializations/models/clone/clone.sql
+++ b/dbt/include/global_project/macros/materializations/models/clone/clone.sql
@@ -27,14 +27,14 @@
{%- set target_relation = this.incorporate(type='table') -%}
{% if existing_relation is not none and not existing_relation.is_table %}
- {{ log("Dropping relation " ~ existing_relation ~ " because it is of type " ~ existing_relation.type) }}
+ {{ log("Dropping relation " ~ existing_relation.render() ~ " because it is of type " ~ existing_relation.type) }}
{{ drop_relation_if_exists(existing_relation) }}
{% endif %}
-- as a general rule, data platforms that can clone tables can also do atomic 'create or replace'
{% call statement('main') %}
{% if target_relation and defer_relation and target_relation == defer_relation %}
- {{ log("Target relation and defer relation are the same, skipping clone for relation: " ~ target_relation) }}
+ {{ log("Target relation and defer relation are the same, skipping clone for relation: " ~ target_relation.render()) }}
{% else %}
{{ create_or_replace_clone(target_relation, defer_relation) }}
{% endif %}
diff --git a/dbt/include/global_project/macros/materializations/models/clone/create_or_replace_clone.sql b/dbt/include/global_project/macros/materializations/models/clone/create_or_replace_clone.sql
index 204e9e87..cdb2559c 100644
--- a/dbt/include/global_project/macros/materializations/models/clone/create_or_replace_clone.sql
+++ b/dbt/include/global_project/macros/materializations/models/clone/create_or_replace_clone.sql
@@ -3,5 +3,5 @@
{% endmacro %}
{% macro default__create_or_replace_clone(this_relation, defer_relation) %}
- create or replace table {{ this_relation }} clone {{ defer_relation }}
+ create or replace table {{ this_relation.render() }} clone {{ defer_relation.render() }}
{% endmacro %}
diff --git a/dbt/include/global_project/macros/materializations/models/incremental/incremental.sql b/dbt/include/global_project/macros/materializations/models/incremental/incremental.sql
index e8ff5c1e..f932751a 100644
--- a/dbt/include/global_project/macros/materializations/models/incremental/incremental.sql
+++ b/dbt/include/global_project/macros/materializations/models/incremental/incremental.sql
@@ -39,9 +39,12 @@
{% set need_swap = true %}
{% else %}
{% do run_query(get_create_table_as_sql(True, temp_relation, sql)) %}
- {% do adapter.expand_target_column_types(
- from_relation=temp_relation,
- to_relation=target_relation) %}
+ {% set contract_config = config.get('contract') %}
+ {% if not contract_config or not contract_config.enforced %}
+ {% do adapter.expand_target_column_types(
+ from_relation=temp_relation,
+ to_relation=target_relation) %}
+ {% endif %}
{#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}
{% set dest_columns = process_schema_changes(on_schema_change, temp_relation, existing_relation) %}
{% if not dest_columns %}
diff --git a/dbt/include/global_project/macros/materializations/models/materialized_view.sql b/dbt/include/global_project/macros/materializations/models/materialized_view.sql
index 6dc30bf9..a39f8aa2 100644
--- a/dbt/include/global_project/macros/materializations/models/materialized_view.sql
+++ b/dbt/include/global_project/macros/materializations/models/materialized_view.sql
@@ -71,9 +71,9 @@
{% set build_sql = get_alter_materialized_view_as_sql(target_relation, configuration_changes, sql, existing_relation, backup_relation, intermediate_relation) %}
{% elif on_configuration_change == 'continue' %}
{% set build_sql = '' %}
- {{ exceptions.warn("Configuration changes were identified and `on_configuration_change` was set to `continue` for `" ~ target_relation ~ "`") }}
+ {{ exceptions.warn("Configuration changes were identified and `on_configuration_change` was set to `continue` for `" ~ target_relation.render() ~ "`") }}
{% elif on_configuration_change == 'fail' %}
- {{ exceptions.raise_fail_fast_error("Configuration changes were identified and `on_configuration_change` was set to `fail` for `" ~ target_relation ~ "`") }}
+ {{ exceptions.raise_fail_fast_error("Configuration changes were identified and `on_configuration_change` was set to `fail` for `" ~ target_relation.render() ~ "`") }}
{% else %}
-- this only happens if the user provides a value other than `apply`, 'skip', 'fail'
diff --git a/dbt/include/global_project/macros/materializations/seeds/helpers.sql b/dbt/include/global_project/macros/materializations/seeds/helpers.sql
index 44dbf370..d87c258b 100644
--- a/dbt/include/global_project/macros/materializations/seeds/helpers.sql
+++ b/dbt/include/global_project/macros/materializations/seeds/helpers.sql
@@ -37,7 +37,7 @@
{% set sql = create_csv_table(model, agate_table) %}
{% else %}
{{ adapter.truncate_relation(old_relation) }}
- {% set sql = "truncate table " ~ old_relation %}
+ {% set sql = "truncate table " ~ old_relation.render() %}
{% endif %}
{{ return(sql) }}
diff --git a/dbt/include/global_project/macros/materializations/seeds/seed.sql b/dbt/include/global_project/macros/materializations/seeds/seed.sql
index 3b66252d..4ee4fb80 100644
--- a/dbt/include/global_project/macros/materializations/seeds/seed.sql
+++ b/dbt/include/global_project/macros/materializations/seeds/seed.sql
@@ -22,7 +22,7 @@
-- build model
{% set create_table_sql = "" %}
{% if exists_as_view %}
- {{ exceptions.raise_compiler_error("Cannot seed to '{}', it is a view".format(old_relation)) }}
+ {{ exceptions.raise_compiler_error("Cannot seed to '{}', it is a view".format(old_relation.render())) }}
{% elif exists_as_table %}
{% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}
{% else %}
diff --git a/dbt/include/global_project/macros/materializations/snapshots/helpers.sql b/dbt/include/global_project/macros/materializations/snapshots/helpers.sql
index 7fd4bfd5..bb71974c 100644
--- a/dbt/include/global_project/macros/materializations/snapshots/helpers.sql
+++ b/dbt/include/global_project/macros/materializations/snapshots/helpers.sql
@@ -8,7 +8,7 @@
{% macro default__create_columns(relation, columns) %}
{% for column in columns %}
{% call statement() %}
- alter table {{ relation }} add column "{{ column.name }}" {{ column.data_type }};
+ alter table {{ relation.render() }} add column "{{ column.name }}" {{ column.data_type }};
{% endcall %}
{% endfor %}
{% endmacro %}
diff --git a/dbt/include/global_project/macros/materializations/snapshots/snapshot_merge.sql b/dbt/include/global_project/macros/materializations/snapshots/snapshot_merge.sql
index 6bc50fd3..56798811 100644
--- a/dbt/include/global_project/macros/materializations/snapshots/snapshot_merge.sql
+++ b/dbt/include/global_project/macros/materializations/snapshots/snapshot_merge.sql
@@ -7,7 +7,7 @@
{% macro default__snapshot_merge_sql(target, source, insert_cols) -%}
{%- set insert_cols_csv = insert_cols | join(', ') -%}
- merge into {{ target }} as DBT_INTERNAL_DEST
+ merge into {{ target.render() }} as DBT_INTERNAL_DEST
using {{ source }} as DBT_INTERNAL_SOURCE
on DBT_INTERNAL_SOURCE.dbt_scd_id = DBT_INTERNAL_DEST.dbt_scd_id
diff --git a/dbt/include/global_project/macros/materializations/tests/helpers.sql b/dbt/include/global_project/macros/materializations/tests/helpers.sql
index ead727d9..a385d1ea 100644
--- a/dbt/include/global_project/macros/materializations/tests/helpers.sql
+++ b/dbt/include/global_project/macros/materializations/tests/helpers.sql
@@ -41,4 +41,4 @@ dbt_internal_unit_test_expected as (
select * from dbt_internal_unit_test_actual
union all
select * from dbt_internal_unit_test_expected
-{%- endmacro %}
\ No newline at end of file
+{%- endmacro %}
diff --git a/dbt/include/global_project/macros/materializations/tests/unit.sql b/dbt/include/global_project/macros/materializations/tests/unit.sql
index 6d7b632c..78c6f6bc 100644
--- a/dbt/include/global_project/macros/materializations/tests/unit.sql
+++ b/dbt/include/global_project/macros/materializations/tests/unit.sql
@@ -3,6 +3,7 @@
{% set relations = [] %}
{% set expected_rows = config.get('expected_rows') %}
+ {% set expected_sql = config.get('expected_sql') %}
{% set tested_expected_column_names = expected_rows[0].keys() if (expected_rows | length ) > 0 else get_columns_in_query(sql) %} %}
{%- set target_relation = this.incorporate(type='table') -%}
@@ -11,10 +12,13 @@
{%- set columns_in_relation = adapter.get_columns_in_relation(temp_relation) -%}
{%- set column_name_to_data_types = {} -%}
{%- for column in columns_in_relation -%}
- {%- do column_name_to_data_types.update({column.name|lower: column.data_type}) -%}
+ {%- do column_name_to_data_types.update({column.name|lower: column.data_type}) -%}
{%- endfor -%}
- {% set unit_test_sql = get_unit_test_sql(sql, get_expected_sql(expected_rows, column_name_to_data_types), tested_expected_column_names) %}
+ {% if not expected_sql %}
+ {% set expected_sql = get_expected_sql(expected_rows, column_name_to_data_types) %}
+ {% endif %}
+ {% set unit_test_sql = get_unit_test_sql(sql, expected_sql, tested_expected_column_names) %}
{% call statement('main', fetch_result=True) -%}
diff --git a/dbt/include/global_project/macros/relations/drop.sql b/dbt/include/global_project/macros/relations/drop.sql
index 58abd14d..e66511da 100644
--- a/dbt/include/global_project/macros/relations/drop.sql
+++ b/dbt/include/global_project/macros/relations/drop.sql
@@ -16,7 +16,7 @@
{{ drop_materialized_view(relation) }}
{%- else -%}
- drop {{ relation.type }} if exists {{ relation }} cascade
+ drop {{ relation.type }} if exists {{ relation.render() }} cascade
{%- endif -%}
diff --git a/dbt/include/global_project/macros/relations/materialized_view/drop.sql b/dbt/include/global_project/macros/relations/materialized_view/drop.sql
index b218d0f3..8235b1c6 100644
--- a/dbt/include/global_project/macros/relations/materialized_view/drop.sql
+++ b/dbt/include/global_project/macros/relations/materialized_view/drop.sql
@@ -10,5 +10,5 @@ actually executes the drop, and `get_drop_sql`, which returns the template.
{% macro default__drop_materialized_view(relation) -%}
- drop materialized view if exists {{ relation }} cascade
+ drop materialized view if exists {{ relation.render() }} cascade
{%- endmacro %}
diff --git a/dbt/include/global_project/macros/relations/rename.sql b/dbt/include/global_project/macros/relations/rename.sql
index d7f3a72e..4b913df3 100644
--- a/dbt/include/global_project/macros/relations/rename.sql
+++ b/dbt/include/global_project/macros/relations/rename.sql
@@ -30,6 +30,6 @@
{% macro default__rename_relation(from_relation, to_relation) -%}
{% set target_name = adapter.quote_as_configured(to_relation.identifier, 'identifier') %}
{% call statement('rename_relation') -%}
- alter table {{ from_relation }} rename to {{ target_name }}
+ alter table {{ from_relation.render() }} rename to {{ target_name }}
{%- endcall %}
{% endmacro %}
diff --git a/dbt/include/global_project/macros/relations/table/drop.sql b/dbt/include/global_project/macros/relations/table/drop.sql
index d7d5941c..038ded9e 100644
--- a/dbt/include/global_project/macros/relations/table/drop.sql
+++ b/dbt/include/global_project/macros/relations/table/drop.sql
@@ -10,5 +10,5 @@ actually executes the drop, and `get_drop_sql`, which returns the template.
{% macro default__drop_table(relation) -%}
- drop table if exists {{ relation }} cascade
+ drop table if exists {{ relation.render() }} cascade
{%- endmacro %}
diff --git a/dbt/include/global_project/macros/relations/view/create.sql b/dbt/include/global_project/macros/relations/view/create.sql
index 41cd196c..ee83befa 100644
--- a/dbt/include/global_project/macros/relations/view/create.sql
+++ b/dbt/include/global_project/macros/relations/view/create.sql
@@ -16,7 +16,7 @@
{%- set sql_header = config.get('sql_header', none) -%}
{{ sql_header if sql_header is not none }}
- create view {{ relation }}
+ create view {{ relation.render() }}
{% set contract_config = config.get('contract') %}
{% if contract_config.enforced %}
{{ get_assert_columns_equivalent(sql) }}
diff --git a/dbt/include/global_project/macros/relations/view/drop.sql b/dbt/include/global_project/macros/relations/view/drop.sql
index 7e1924fa..84c91a36 100644
--- a/dbt/include/global_project/macros/relations/view/drop.sql
+++ b/dbt/include/global_project/macros/relations/view/drop.sql
@@ -10,5 +10,5 @@ actually executes the drop, and `get_drop_sql`, which returns the template.
{% macro default__drop_view(relation) -%}
- drop view if exists {{ relation }} cascade
+ drop view if exists {{ relation.render() }} cascade
{%- endmacro %}
diff --git a/dbt/include/global_project/macros/relations/view/replace.sql b/dbt/include/global_project/macros/relations/view/replace.sql
index 1da06134..a0f0dc76 100644
--- a/dbt/include/global_project/macros/relations/view/replace.sql
+++ b/dbt/include/global_project/macros/relations/view/replace.sql
@@ -61,6 +61,6 @@
{% endmacro %}
{% macro default__handle_existing_table(full_refresh, old_relation) %}
- {{ log("Dropping relation " ~ old_relation ~ " because it is of type " ~ old_relation.type) }}
+ {{ log("Dropping relation " ~ old_relation.render() ~ " because it is of type " ~ old_relation.type) }}
{{ adapter.drop_relation(old_relation) }}
{% endmacro %}
diff --git a/dbt/include/global_project/macros/unit_test_sql/get_fixture_sql.sql b/dbt/include/global_project/macros/unit_test_sql/get_fixture_sql.sql
index ca39a39c..a3a8173b 100644
--- a/dbt/include/global_project/macros/unit_test_sql/get_fixture_sql.sql
+++ b/dbt/include/global_project/macros/unit_test_sql/get_fixture_sql.sql
@@ -15,13 +15,14 @@
{%- endif -%}
{%- if not column_name_to_data_types -%}
- {{ exceptions.raise_compiler_error("Not able to get columns for unit test '" ~ model.name ~ "' from relation " ~ this) }}
+ {{ exceptions.raise_compiler_error("Not able to get columns for unit test '" ~ model.name ~ "' from relation " ~ this ~ " because the relation doesn't exist") }}
{%- endif -%}
{%- for column_name, column_type in column_name_to_data_types.items() -%}
{%- do default_row.update({column_name: (safe_cast("null", column_type) | trim )}) -%}
{%- endfor -%}
+{{ validate_fixture_rows(rows, row_number) }}
{%- for row in rows -%}
{%- set formatted_row = format_row(row, column_name_to_data_types) -%}
@@ -79,7 +80,7 @@ union all
{%- endif -%}
{%- set column_type = column_name_to_data_types[column_name] %}
-
+
{#-- sanitize column_value: wrap yaml strings in quotes, apply cast --#}
{%- set column_value_clean = column_value -%}
{%- if column_value is string -%}
@@ -93,3 +94,11 @@ union all
{%- endfor -%}
{{ return(formatted_row) }}
{%- endmacro -%}
+
+{%- macro validate_fixture_rows(rows, row_number) -%}
+ {{ return(adapter.dispatch('validate_fixture_rows', 'dbt')(rows, row_number)) }}
+{%- endmacro -%}
+
+{%- macro default__validate_fixture_rows(rows, row_number) -%}
+ {# This is an abstract method for adapter overrides as needed #}
+{%- endmacro -%}
diff --git a/dbt/include/global_project/macros/utils/date.sql b/dbt/include/global_project/macros/utils/date.sql
new file mode 100644
index 00000000..d41b4438
--- /dev/null
+++ b/dbt/include/global_project/macros/utils/date.sql
@@ -0,0 +1,10 @@
+{% macro date(year, month, day) %}
+ {{ return(adapter.dispatch('date', 'dbt') (year, month, day)) }}
+{% endmacro %}
+
+
+{% macro default__date(year, month, day) -%}
+ {%- set dt = modules.datetime.date(year, month, day) -%}
+ {%- set iso_8601_formatted_date = dt.strftime('%Y-%m-%d') -%}
+ to_date('{{ iso_8601_formatted_date }}', 'YYYY-MM-DD')
+{%- endmacro %}
diff --git a/docs/README.md b/docs/README.md
new file mode 100644
index 00000000..a1096d54
--- /dev/null
+++ b/docs/README.md
@@ -0,0 +1,5 @@
+## Documentation
+
+The documentation is divided into the following sub-folders:
+* arch: Architecture Decision Records (ADRs) which explain and justify major architectural decisions
+* guides: Informal documents which describe the code or our development practices at a high level
diff --git a/docs/guides/record_replay.md b/docs/guides/record_replay.md
new file mode 100644
index 00000000..5bcbec06
--- /dev/null
+++ b/docs/guides/record_replay.md
@@ -0,0 +1,15 @@
+# Supporting Record/Replay in Adapters
+
+This document describes how to implement support for dbt's Record/Replay Subsystem for adapters. Before reading it, make sure you understand the fundamental ideas behind Record/Replay, which are [documented in the dbt-common repo](https://github.com/dbt-labs/dbt-common/blob/docs/guides/record_replay.md).
+
+## Recording and Replaying Warehouse Interaction
+
+The goal of the Record/Replay Subsystem is to record all interactions between dbt and external systems, of which the data warehouse is the most important. Since, warehouse interaction is mediated by adapters, full Record/Replay support requires that adapters record all interactions they have with the warehouse. It also requires that they record access to the local filesystem or external service, if that is access is not mediated by dbt itself. This includes authentication steps, opening and closing connections, beginning and ending transactions, etc.
+
+A basic implementation of Record/Replay functionality, suitable for most adapters which extend the `SQLAdapter` class, can be found in `dbt-adapters/dbt/adapters/record`. The `RecordReplayHandle` and `RecordReplayCursor` classes defined there are used to intercept and record or replay all DWH interactions. They are an excellent starting point for adapters which extend `SQLAdapter` and use a database library which substantially conforms to Python's DB API v2.0 (PEP 249). Examples of how library-specific deviations from that API can be found in the dbt-postgress and dbt-snowflake repositories.
+
+## Misc. Notes and Suggestions
+
+Not every interaction with an external system has to be recorded in full detail, and authentication might prove to be a place where we exclude sensitive secrets from the recording. For example, since replay will not actually be communicating with the warehouse, it may be possible to exclude passwords and auth keys from the parameters recorded, and to exclude auth tokens from the results.
+
+In addition to adding an appropriate decorator to functions which communicate with external systems, you should check those functions for side-effects. Since the function's calls will be mocked out in replay mode, those side-effects will not be carried out during replay. At present, we are focusing on support for recording and comparing recordings, but this is worth keeping in mind.
diff --git a/pyproject.toml b/pyproject.toml
index 74c2374e..e794781c 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -12,7 +12,7 @@ maintainers = [
{ name = "dbt Labs", email = "info@dbtlabs.com" },
]
classifiers = [
- "Development Status :: 2 - Pre-Alpha",
+ "Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: Apache Software License",
"Operating System :: MacOS :: MacOS X",
"Operating System :: Microsoft :: Windows",
@@ -21,9 +21,10 @@ classifiers = [
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
+ "Programming Language :: Python :: 3.12",
]
dependencies = [
- "dbt-common<2.0",
+ "dbt-common>=1.6,<2.0",
"pytz>=2015.7",
# installed via dbt-common but used directly
"agate>=1.0,<2.0",
@@ -54,41 +55,16 @@ include = ["dbt/adapters", "dbt/include", "dbt/__init__.py"]
[tool.hatch.envs.default]
dependencies = [
"dbt_common @ git+https://github.com/dbt-labs/dbt-common.git",
-]
-
-[tool.hatch.envs.lint]
-detached = true
-dependencies = [
- "black>=24.3",
- "flake8",
- "Flake8-pyproject",
-]
-[tool.hatch.envs.lint.scripts]
-all = [
- "- black-only",
- "- flake8-only",
-]
-black-only = "python -m black ."
-flake8-only = "python -m flake8 ."
-
-[tool.hatch.envs.typecheck]
-dependencies = [
- "mypy",
- "types-PyYAML",
- "types-protobuf",
- "types-pytz",
-]
-[tool.hatch.envs.typecheck.scripts]
-all = "python -m mypy ."
-
-[tool.hatch.envs.unit-tests]
-dependencies = [
+ 'pre-commit==3.7.0;python_version>="3.9"',
+ 'pre-commit==3.5.0;python_version=="3.8"',
"pytest",
"pytest-dotenv",
"pytest-xdist",
]
-[tool.hatch.envs.unit-tests.scripts]
-all = "python -m pytest {args:tests/unit}"
+[tool.hatch.envs.default.scripts]
+setup = "pre-commit install"
+code-quality = "pre-commit run --all-files"
+unit-tests = "python -m pytest {args:tests/unit}"
[tool.hatch.envs.build]
detached = true
@@ -114,37 +90,8 @@ check-sdist = [
]
protobuf = "protoc -I=./dbt/adapters/events --python_out=./dbt/adapters/events ./dbt/adapters/events/adapter_types.proto"
-[tool.black]
-extend-exclude = "dbt/adapters/events/adapter_types_pb2.py"
-line-length = 99
-target-version = ['py38']
-
-[tool.flake8]
-select = ["E", "W", "F"]
-ignore = ["E203", "E501", "E741", "W503", "W504"]
-exclude = [
- "dbt/adapters/events/adapter_types_pb2.py",
- "tests/functional",
- "venv",
-]
-per-file-ignores = ["*/__init__.py: F401"]
-
[tool.mypy]
-namespace_packages = true
-show_error_codes = true
-explicit_package_bases = true
-ignore_missing_imports = true
-pretty = true
mypy_path = "third-party-stubs/"
-files = [
- "dbt",
- "tests/unit",
-]
-exclude = [
- "dbt/adapters/events/adapter_types_pb2.py",
- "dbt-tests-adapter/dbt/__init__.py", # overlaps with `dbt/__init__.py` as expected for namespaces
- "venv",
-]
[[tool.mypy.overrides]]
module = ["dbt.adapters.events.adapter_types_pb2"]
follow_imports = "skip"
diff --git a/tests/unit/test_base_adapter.py b/tests/unit/test_base_adapter.py
index 95fe5ae2..5fa109b7 100644
--- a/tests/unit/test_base_adapter.py
+++ b/tests/unit/test_base_adapter.py
@@ -39,6 +39,14 @@ def connection_manager(self):
[{"type": "foreign_key", "expression": "other_table (c1)"}],
["column_name integer references other_table (c1)"],
),
+ (
+ [{"type": "foreign_key", "to": "other_table", "to_columns": ["c1"]}],
+ ["column_name integer references other_table (c1)"],
+ ),
+ (
+ [{"type": "foreign_key", "to": "other_table", "to_columns": ["c1", "c2"]}],
+ ["column_name integer references other_table (c1, c2)"],
+ ),
([{"type": "check"}, {"type": "unique"}], ["column_name integer unique"]),
([{"type": "custom", "expression": "-- noop"}], ["column_name integer -- noop"]),
]
@@ -176,6 +184,30 @@ def test_render_raw_columns_constraints_unsupported(
],
["constraint test_name foreign key (c1, c2) references other_table (c1)"],
),
+ (
+ [
+ {
+ "type": "foreign_key",
+ "columns": ["c1", "c2"],
+ "to": "other_table",
+ "to_columns": ["c1"],
+ "name": "test_name",
+ }
+ ],
+ ["constraint test_name foreign key (c1, c2) references other_table (c1)"],
+ ),
+ (
+ [
+ {
+ "type": "foreign_key",
+ "columns": ["c1", "c2"],
+ "to": "other_table",
+ "to_columns": ["c1", "c2"],
+ "name": "test_name",
+ }
+ ],
+ ["constraint test_name foreign key (c1, c2) references other_table (c1, c2)"],
+ ),
]
@pytest.mark.parametrize("constraints,expected_rendered_constraints", model_constraints)
diff --git a/tests/unit/test_events.py b/tests/unit/test_events.py
index bda7c6f4..22e71896 100644
--- a/tests/unit/test_events.py
+++ b/tests/unit/test_events.py
@@ -153,6 +153,7 @@ def test_event_codes(self):
types.FinishedRunningStats(stat_line="", execution="", execution_time=0),
types.ConstraintNotEnforced(constraint="", adapter=""),
types.ConstraintNotSupported(constraint="", adapter=""),
+ types.TypeCodeNotFound(type_code=0),
]
diff --git a/tests/unit/test_relation.py b/tests/unit/test_relation.py
index aa9cda25..97d56419 100644
--- a/tests/unit/test_relation.py
+++ b/tests/unit/test_relation.py
@@ -1,4 +1,4 @@
-from dataclasses import replace
+from dataclasses import dataclass, replace
import pytest
@@ -43,26 +43,52 @@ def test_can_be_replaced_default():
@pytest.mark.parametrize(
- "limit,expected_result",
+ "limit,require_alias,expected_result",
[
- (None, '"test_database"."test_schema"."test_identifier"'),
+ (None, False, '"test_database"."test_schema"."test_identifier"'),
(
0,
- '(select * from "test_database"."test_schema"."test_identifier" where false limit 0) _dbt_limit_subq',
+ True,
+ '(select * from "test_database"."test_schema"."test_identifier" where false limit 0) _dbt_limit_subq_test_identifier',
),
(
1,
- '(select * from "test_database"."test_schema"."test_identifier" limit 1) _dbt_limit_subq',
+ True,
+ '(select * from "test_database"."test_schema"."test_identifier" limit 1) _dbt_limit_subq_test_identifier',
+ ),
+ (
+ 0,
+ False,
+ '(select * from "test_database"."test_schema"."test_identifier" where false limit 0)',
+ ),
+ (
+ 1,
+ False,
+ '(select * from "test_database"."test_schema"."test_identifier" limit 1)',
),
],
)
-def test_render_limited(limit, expected_result):
+def test_render_limited(limit, require_alias, expected_result):
my_relation = BaseRelation.create(
database="test_database",
schema="test_schema",
identifier="test_identifier",
limit=limit,
+ require_alias=require_alias,
)
actual_result = my_relation.render_limited()
assert actual_result == expected_result
assert str(my_relation) == expected_result
+
+
+def test_create_ephemeral_from_uses_identifier():
+ @dataclass
+ class Node:
+ """Dummy implementation of RelationConfig protocol"""
+
+ name: str
+ identifier: str
+
+ node = Node(name="name_should_not_be_used", identifier="test")
+ ephemeral_relation = BaseRelation.create_ephemeral_from(node)
+ assert str(ephemeral_relation) == "__dbt__cte__test"
diff --git a/tests/unit/test_sql_result.py b/tests/unit/test_sql_result.py
index 12c173cb..454e6572 100644
--- a/tests/unit/test_sql_result.py
+++ b/tests/unit/test_sql_result.py
@@ -8,13 +8,13 @@ def test_duplicated_columns(self):
cols_with_one_dupe = ["a", "b", "a", "d"]
rows = [(1, 2, 3, 4)]
self.assertEqual(
- SQLConnectionManager.process_results(cols_with_one_dupe, rows),
+ list(SQLConnectionManager.process_results(cols_with_one_dupe, rows)),
[{"a": 1, "b": 2, "a_2": 3, "d": 4}],
)
cols_with_more_dupes = ["a", "a", "a", "b"]
rows = [(1, 2, 3, 4)]
self.assertEqual(
- SQLConnectionManager.process_results(cols_with_more_dupes, rows),
+ list(SQLConnectionManager.process_results(cols_with_more_dupes, rows)),
[{"a": 1, "a_2": 2, "a_3": 3, "b": 4}],
)