diff --git a/.changes/0.1.0/Docs-20240109-131629.yaml b/.changes/0.1.0/Docs-20240109-131629.yaml deleted file mode 100644 index 22b2ad3f..00000000 --- a/.changes/0.1.0/Docs-20240109-131629.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Docs -body: Configure `changie` -time: 2024-01-09T13:16:29.763021-05:00 -custom: - Author: mikealfare - Issue: 16 diff --git a/.changes/0.1.0/Docs-20240109-131736.yaml b/.changes/0.1.0/Docs-20240109-131736.yaml deleted file mode 100644 index 43186903..00000000 --- a/.changes/0.1.0/Docs-20240109-131736.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Docs -body: Setup ADR tracking framework -time: 2024-01-09T13:17:36.094147-05:00 -custom: - Author: mikealfare - Issue: "11" diff --git a/.changes/0.1.0/Docs-20240109-131858.yaml b/.changes/0.1.0/Docs-20240109-131858.yaml deleted file mode 100644 index decef9a7..00000000 --- a/.changes/0.1.0/Docs-20240109-131858.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Docs -body: Create issue templates -time: 2024-01-09T13:18:58.11819-05:00 -custom: - Author: mikealfare - Issue: "12" diff --git a/.changes/0.1.0/Docs-20240109-131917.yaml b/.changes/0.1.0/Docs-20240109-131917.yaml deleted file mode 100644 index 3c531060..00000000 --- a/.changes/0.1.0/Docs-20240109-131917.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Docs -body: Create PR template -time: 2024-01-09T13:19:17.749914-05:00 -custom: - Author: mikealfare - Issue: "13" diff --git a/.changes/0.1.0/Features-20240212-123544.yaml b/.changes/0.1.0/Features-20240212-123544.yaml deleted file mode 100644 index 239ad59f..00000000 --- a/.changes/0.1.0/Features-20240212-123544.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: Update RelationConfig to capture all fields used by adapters -time: 2024-02-12T12:35:44.653555-08:00 -custom: - Author: colin-rogers-dbt - Issue: "30" diff --git a/.changes/0.1.0/Fixes-20240215-141545.yaml b/.changes/0.1.0/Fixes-20240215-141545.yaml deleted file mode 100644 index ced62f25..00000000 --- a/.changes/0.1.0/Fixes-20240215-141545.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Ignore adapter-level support warnings for 'custom' constraints -time: 2024-02-15T14:15:45.764145+01:00 -custom: - Author: jtcohen6 - Issue: "90" diff --git a/.changes/0.1.0/Fixes-20240216-135420.yaml b/.changes/0.1.0/Fixes-20240216-135420.yaml deleted file mode 100644 index a04cd26b..00000000 --- a/.changes/0.1.0/Fixes-20240216-135420.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Make all adapter zone tests importable by removing "Test" prefix -time: 2024-02-16T13:54:20.411864-05:00 -custom: - Author: mikealfare - Issue: "93" diff --git a/.changes/0.1.0/Under the Hood-20240109-131958.yaml b/.changes/0.1.0/Under the Hood-20240109-131958.yaml deleted file mode 100644 index a062a299..00000000 --- a/.changes/0.1.0/Under the Hood-20240109-131958.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Configure `dependabot` -time: 2024-01-09T13:19:58.060742-05:00 -custom: - Author: mikealfare - Issue: "14" diff --git a/.changes/0.1.0/Under the Hood-20240112-230236.yaml b/.changes/0.1.0/Under the Hood-20240112-230236.yaml deleted file mode 100644 index 1470ac6e..00000000 --- a/.changes/0.1.0/Under the Hood-20240112-230236.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Implement unit testing in CI -time: 2024-01-12T23:02:36.630106-05:00 -custom: - Author: mikealfare - Issue: "10" diff --git a/.changes/0.1.0/Under the Hood-20240123-121220.yaml b/.changes/0.1.0/Under the Hood-20240123-121220.yaml deleted file mode 100644 index 8d01f256..00000000 --- a/.changes/0.1.0/Under the Hood-20240123-121220.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Allow version to be specified in either __version__.py or __about__.py -time: 2024-01-23T12:12:20.529147-05:00 -custom: - Author: mikealfare - Issue: "44" diff --git a/.changes/0.1.0/Under the Hood-20240220-164223.yaml b/.changes/0.1.0/Under the Hood-20240220-164223.yaml deleted file mode 100644 index eefa441e..00000000 --- a/.changes/0.1.0/Under the Hood-20240220-164223.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Remove __init__.py file from dbt.tests -time: 2024-02-20T16:42:23.706-05:00 -custom: - Author: gshank - Issue: "96" diff --git a/.changes/1.0.0.md b/.changes/1.0.0.md index d381d635..c46c8148 100644 --- a/.changes/1.0.0.md +++ b/.changes/1.0.0.md @@ -1,15 +1,32 @@ ## dbt-adapters 1.0.0 - April 01, 2024 +### Features + +* Update RelationConfig to capture all fields used by adapters ([#30](https://github.com/dbt-labs/dbt-adapters/issues/30)) + ### Fixes -* Add field wrapper to BaseRelation members that were missing it. -* Add "description" and "meta" fields to RelationConfig protocol +* Add field wrapper to BaseRelation members that were missing it. ([#108](https://github.com/dbt-labs/dbt-adapters/issues/108)) +* Add "description" and "meta" fields to RelationConfig protocol ([#119](https://github.com/dbt-labs/dbt-adapters/issues/119)) +* Ignore adapter-level support warnings for 'custom' constraints ([#90](https://github.com/dbt-labs/dbt-adapters/issues/90)) +* Make all adapter zone tests importable by removing "Test" prefix ([#93](https://github.com/dbt-labs/dbt-adapters/issues/93)) + +### Docs + +* Configure `changie` ([#16](https://github.com/dbt-labs/dbt-adapters/issues/16)) +* Setup ADR tracking framework ([#11](https://github.com/dbt-labs/dbt-adapters/issues/11)) +* Create issue templates ([#12](https://github.com/dbt-labs/dbt-adapters/issues/12)) +* Create PR template ([#13](https://github.com/dbt-labs/dbt-adapters/issues/13)) ### Under the Hood -* Lazy load agate to improve dbt-core performance -* add BaseAdapater.MAX_SCHEMA_METADATA_RELATIONS +* Lazy load agate to improve dbt-core performance ([#125](https://github.com/dbt-labs/dbt-adapters/issues/125)) +* add BaseAdapater.MAX_SCHEMA_METADATA_RELATIONS ([#131](https://github.com/dbt-labs/dbt-adapters/issues/131)) +* Configure `dependabot` ([#14](https://github.com/dbt-labs/dbt-adapters/issues/14)) +* Implement unit testing in CI ([#22](https://github.com/dbt-labs/dbt-adapters/issues/22)) +* Allow version to be specified in either __version__.py or __about__.py ([#44](https://github.com/dbt-labs/dbt-adapters/issues/44)) +* Remove __init__.py file from dbt.tests ([#96](https://github.com/dbt-labs/dbt-adapters/issues/96)) ### Security -* Pin `black>=24.3` in `pyproject.toml` +* Pin `black>=24.3` in `pyproject.toml` ([#140](https://github.com/dbt-labs/dbt-adapters/issues/140)) diff --git a/.changes/1.1.0.md b/.changes/1.1.0.md index 9e7db78e..224d8e85 100644 --- a/.changes/1.1.0.md +++ b/.changes/1.1.0.md @@ -2,28 +2,28 @@ ### Features -* Debug log when `type_code` fails to convert to a `data_type` -* Introduce TableLastModifiedMetadataBatch and implement BaseAdapter.calculate_freshness_from_metadata_batch -* Support for sql fixtures in unit testing -* Cross-database `cast` macro -* Allow adapters to opt out of aliasing the subquery generated by render_limited -* subquery alias generated by render_limited now includes the relation name to mitigate duplicate aliasing +* Debug log when `type_code` fails to convert to a `data_type` ([#135](https://github.com/dbt-labs/dbt-adapters/issues/135)) +* Introduce TableLastModifiedMetadataBatch and implement BaseAdapter.calculate_freshness_from_metadata_batch ([#127](https://github.com/dbt-labs/dbt-adapters/issues/127)) +* Support for sql fixtures in unit testing ([#146](https://github.com/dbt-labs/dbt-adapters/issues/146)) +* Cross-database `cast` macro ([#173](https://github.com/dbt-labs/dbt-adapters/issues/173)) +* Allow adapters to opt out of aliasing the subquery generated by render_limited ([#179](https://github.com/dbt-labs/dbt-adapters/issues/179)) +* subquery alias generated by render_limited now includes the relation name to mitigate duplicate aliasing ([#179](https://github.com/dbt-labs/dbt-adapters/issues/179)) ### Fixes -* Fix adapter-specific cast handling for constraint enforcement +* Fix adapter-specific cast handling for constraint enforcement ([#165](https://github.com/dbt-labs/dbt-adapters/issues/165)) ### Docs -* Use `dbt-adapters` throughout the contributing guide +* Use `dbt-adapters` throughout the contributing guide ([#137](https://github.com/dbt-labs/dbt-adapters/issues/137)) ### Under the Hood -* Add the option to set the log level of the AdapterRegistered event -* Update dependabot config to cover GHA -* Validate that dbt-core and dbt-adapters remain de-coupled -* remove dbt_version from query comment test fixture +* Add the option to set the log level of the AdapterRegistered event ([#141](https://github.com/dbt-labs/dbt-adapters/issues/141)) +* Update dependabot config to cover GHA ([#161](https://github.com/dbt-labs/dbt-adapters/issues/161)) +* Validate that dbt-core and dbt-adapters remain de-coupled ([#174](https://github.com/dbt-labs/dbt-adapters/issues/174)) +* remove dbt_version from query comment test fixture ([#184](https://github.com/dbt-labs/dbt-adapters/issues/184)) ### Dependencies -* add support for py3.12 +* add support for py3.12 ([#185](https://github.com/dbt-labs/dbt-adapters/issues/185)) diff --git a/.changes/1.1.1.md b/.changes/1.1.1.md index a1c38c88..9e590f94 100644 --- a/.changes/1.1.1.md +++ b/.changes/1.1.1.md @@ -2,4 +2,4 @@ ### Features -* Enable serialization contexts +* Enable serialization contexts ([#197](https://github.com/dbt-labs/dbt-adapters/issues/197)) diff --git a/.changes/1.10.0.md b/.changes/1.10.0.md new file mode 100644 index 00000000..10fc93bc --- /dev/null +++ b/.changes/1.10.0.md @@ -0,0 +1 @@ +## dbt-adapters 1.10.0 - September 12, 2024 diff --git a/.changes/1.10.1.md b/.changes/1.10.1.md new file mode 100644 index 00000000..01d6fda2 --- /dev/null +++ b/.changes/1.10.1.md @@ -0,0 +1 @@ +## dbt-adapters 1.10.1 - September 16, 2024 diff --git a/.changes/1.10.2.md b/.changes/1.10.2.md new file mode 100644 index 00000000..09244521 --- /dev/null +++ b/.changes/1.10.2.md @@ -0,0 +1,5 @@ +## dbt-adapters 1.10.2 - October 01, 2024 + +### Under the Hood + +- dbt-tests-adapters: Add required begin to microbatch model config to BaseMicrobatch test ([#315](https://github.com/dbt-labs/dbt-adapters/issues/315)) diff --git a/.changes/1.10.3.md b/.changes/1.10.3.md new file mode 100644 index 00000000..29844ce2 --- /dev/null +++ b/.changes/1.10.3.md @@ -0,0 +1 @@ +## dbt-adapters 1.10.3 - October 29, 2024 diff --git a/.changes/1.10.4.md b/.changes/1.10.4.md new file mode 100644 index 00000000..f8bbd420 --- /dev/null +++ b/.changes/1.10.4.md @@ -0,0 +1 @@ +## dbt-adapters 1.10.4 - November 11, 2024 diff --git a/.changes/1.11.0.md b/.changes/1.11.0.md new file mode 100644 index 00000000..fbe85222 --- /dev/null +++ b/.changes/1.11.0.md @@ -0,0 +1,12 @@ +## dbt-adapters 1.11.0 - November 11, 2024 + +### Features + +- Use a behavior flag to gate microbatch functionality (instead of an environment variable) ([#327](https://github.com/dbt-labs/dbt-adapters/issues/327)) + +### Under the Hood + +- Add `query_id` to SQLQueryStatus ([#342](https://github.com/dbt-labs/dbt-adapters/issues/342)) + +### Contributors +- [@cmcarthur](https://github.com/cmcarthur) ([#342](https://github.com/dbt-labs/dbt-adapters/issues/342)) diff --git a/.changes/1.2.1.md b/.changes/1.2.1.md index 2aac5674..e554b90b 100644 --- a/.changes/1.2.1.md +++ b/.changes/1.2.1.md @@ -2,9 +2,14 @@ ### Features -* Improvement of the compile error message in the get_fixture-sql.sql when the relation or the model not exist +* Improvement of the compile error message in the get_fixture-sql.sql when the relation or the model not exist ([#203](https://github.com/dbt-labs/dbt-adapters/issues/203)) +* Cross-database `date` macro ([#191](https://github.com/dbt-labs/dbt-adapters/issues/191)) + +### Fixes + +* Update Clone test to reflect core change removing `deferred` attribute from nodes ([#194](https://github.com/dbt-labs/dbt-adapters/issues/194)) ### Under the Hood -* Add query recording for adapters which use SQLConnectionManager -* Improve memory efficiency of process_results() +* Add query recording for adapters which use SQLConnectionManager ([#195](https://github.com/dbt-labs/dbt-adapters/issues/195)) +* Improve memory efficiency of process_results() ([#217](https://github.com/dbt-labs/dbt-adapters/issues/217)) diff --git a/.changes/1.3.0.md b/.changes/1.3.0.md index dae1f819..6a23c3ba 100644 --- a/.changes/1.3.0.md +++ b/.changes/1.3.0.md @@ -2,4 +2,4 @@ ### Features -* Add get_catalog_for_single_relation macro and capability to enable adapters to optimize catalog generation +* Add get_catalog_for_single_relation macro and capability to enable adapters to optimize catalog generation ([#231](https://github.com/dbt-labs/dbt-adapters/issues/231)) diff --git a/.changes/1.3.2.md b/.changes/1.3.2.md new file mode 100644 index 00000000..6963a4c3 --- /dev/null +++ b/.changes/1.3.2.md @@ -0,0 +1,6 @@ +## dbt-adapters 1.3.2 - July 02, 2024 + +### Under the Hood + +* Fix query timer resolution ([#246](https://github.com/dbt-labs/dbt-adapters/issues/246)) +* Add optional release_connection parameter to connection_named method ([#247](https://github.com/dbt-labs/dbt-adapters/issues/247)) diff --git a/.changes/1.3.3.md b/.changes/1.3.3.md new file mode 100644 index 00000000..c62a0562 --- /dev/null +++ b/.changes/1.3.3.md @@ -0,0 +1,9 @@ +## dbt-adapters 1.3.3 - July 09, 2024 + +### Fixes + +* Fix scenario where using the `--empty` flag causes metadata queries to contain limit clauses ([#213](https://github.com/dbt-labs/dbt-adapters/issues/213)) + +### Under the Hood + +* --limit flag no longer subshells the query. This resolves the dbt Cloud experience issue where limit prevents ordering elements.. ([#207](https://github.com/dbt-labs/dbt-adapters/issues/207)) diff --git a/.changes/1.4.0.md b/.changes/1.4.0.md new file mode 100644 index 00000000..fc6279db --- /dev/null +++ b/.changes/1.4.0.md @@ -0,0 +1,13 @@ +## dbt-adapters 1.4.0 - July 30, 2024 + +### Features + +- render 'to' and 'to_columns' fields on foreign key constraints, and bump dbt-common lower bound to 1.6 ([#271](https://github.com/dbt-labs/dbt-adapters/issues/271)) + +### Fixes + +- Incremental table varchar column definition changed ([#276](https://github.com/dbt-labs/dbt-adapters/issues/276)) + +### Under the Hood + +- Rework record/replay to record at the database connection level. ([#244](https://github.com/dbt-labs/dbt-adapters/issues/244)) diff --git a/.changes/1.4.1.md b/.changes/1.4.1.md new file mode 100644 index 00000000..82a731a2 --- /dev/null +++ b/.changes/1.4.1.md @@ -0,0 +1,13 @@ +## dbt-adapters 1.4.1 - August 09, 2024 + +### Fixes + +- Use model alias for the CTE identifier generated during ephemeral materialization ([#5273](https://github.com/dbt-labs/dbt-adapters/issues/5273)) + +### Under the Hood + +- Updating changie.yaml to add contributors and PR links ([#219](https://github.com/dbt-labs/dbt-adapters/issues/219)) + +### Contributors +- [@jeancochrane](https://github.com/jeancochrane) ([#5273](https://github.com/dbt-labs/dbt-adapters/issues/5273)) +- [@leahwicz](https://github.com/leahwicz) ([#219](https://github.com/dbt-labs/dbt-adapters/issues/219)) diff --git a/.changes/1.5.0.md b/.changes/1.5.0.md new file mode 100644 index 00000000..e4c54bdc --- /dev/null +++ b/.changes/1.5.0.md @@ -0,0 +1,11 @@ +## dbt-adapters 1.5.0 - September 10, 2024 + +### Features + +- Compare 'snapshot_get_time' and snapshot 'updated_at' data types ([#242](https://github.com/dbt-labs/dbt-adapters/issues/242)) +- Add Behavior Flag framework ([#281](https://github.com/dbt-labs/dbt-adapters/issues/281)) +- Add EventTimeFilter to BaseRelation, which renders a filtered relation when start or end is set ([#294](https://github.com/dbt-labs/dbt-adapters/issues/294)) + +### Dependencies + +- Update dbt-common pin to >=1.8 ([#299](https://github.com/dbt-labs/dbt-adapters/pull/299)) diff --git a/.changes/1.6.0.md b/.changes/1.6.0.md new file mode 100644 index 00000000..c109454a --- /dev/null +++ b/.changes/1.6.0.md @@ -0,0 +1,5 @@ +## dbt-adapters 1.6.0 - September 12, 2024 + +### Features + +- Default microbatch strategy implementation and base tests ([#302](https://github.com/dbt-labs/dbt-adapters/issues/302)) diff --git a/.changes/1.6.1.md b/.changes/1.6.1.md new file mode 100644 index 00000000..45b41bcd --- /dev/null +++ b/.changes/1.6.1.md @@ -0,0 +1 @@ +## dbt-adapters 1.6.1 - September 16, 2024 diff --git a/.changes/1.7.0.md b/.changes/1.7.0.md new file mode 100644 index 00000000..efbdc601 --- /dev/null +++ b/.changes/1.7.0.md @@ -0,0 +1,5 @@ +## dbt-adapters 1.7.0 - September 19, 2024 + +### Features + +- Allow configuring of snapshot column names ([#289](https://github.com/dbt-labs/dbt-adapters/issues/289)) diff --git a/.changes/1.7.1.md b/.changes/1.7.1.md new file mode 100644 index 00000000..4acda7f8 --- /dev/null +++ b/.changes/1.7.1.md @@ -0,0 +1,5 @@ +## dbt-adapters 1.7.1 - October 15, 2024 + +### Features + +- Enable setting current value of dbt_valid_to ([#320](https://github.com/dbt-labs/dbt-adapters/issues/320)) diff --git a/.changes/1.7.2.md b/.changes/1.7.2.md new file mode 100644 index 00000000..b17bf217 --- /dev/null +++ b/.changes/1.7.2.md @@ -0,0 +1,16 @@ +## dbt-adapters 1.7.2 - October 21, 2024 + +### Breaking Changes + +- Drop support for Python 3.8 ([#332](https://github.com/dbt-labs/dbt-adapters/issues/332)) + +### Features + +- Allows unique_key for snapshots to take a list ([#181](https://github.com/dbt-labs/dbt-adapters/issues/181)) + +### Fixes + +- Always validate an incremental model's `incremental_strategy` ([#330](https://github.com/dbt-labs/dbt-adapters/issues/330)) + +### Contributors +- [@agpapa](https://github.com/agpapa) ([#181](https://github.com/dbt-labs/dbt-adapters/issues/181)) diff --git a/.changes/1.8.0.md b/.changes/1.8.0.md index e781a985..f73a0300 100644 --- a/.changes/1.8.0.md +++ b/.changes/1.8.0.md @@ -1,9 +1,9 @@ -## dbt-adapters 1.8.0 - May 09, 2024 +## dbt-adapters 1.8.0 - October 29, 2024 -### Features +### Fixes -* Cross-database `date` macro +- Always make behavior flags available for evaluation ([#338](https://github.com/dbt-labs/dbt-adapters/issues/338)) -### Fixes +### Under the Hood -* Update Clone test to reflect core change removing `deferred` attribute from nodes +- Add adapter telemetry. ([#301](https://github.com/dbt-labs/dbt-adapters/issues/301)) diff --git a/.changes/1.9.0.md b/.changes/1.9.0.md index 2f016563..cde85d7c 100644 --- a/.changes/1.9.0.md +++ b/.changes/1.9.0.md @@ -1 +1,6 @@ -## dbt-adapters 1.9.0 - June 18, 2024 +## dbt-adapters 1.9.0 - November 13, 2024 + +### Fixes + +- Negate the check for microbatch behavior flag in determining builtins ([#349](https://github.com/dbt-labs/dbt-adapters/issues/349)) +- Move require_batched_execution_for_custom_microbatch_strategy flag to global ([#351](https://github.com/dbt-labs/dbt-adapters/issues/351)) diff --git a/.changes/1.9.1.md b/.changes/1.9.1.md deleted file mode 100644 index 900e6b75..00000000 --- a/.changes/1.9.1.md +++ /dev/null @@ -1 +0,0 @@ -## dbt-adapters 1.9.1 - June 20, 2024 diff --git a/.changes/unreleased/Under the Hood-20240621-150837.yaml b/.changes/unreleased/Under the Hood-20240621-150837.yaml deleted file mode 100644 index fab070c7..00000000 --- a/.changes/unreleased/Under the Hood-20240621-150837.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Fix query timer resolution -time: 2024-06-21T15:08:37.43527-04:00 -custom: - Author: peterallenwebb - Issue: "246" diff --git a/.changes/unreleased/Under the Hood-20240624-161108.yaml b/.changes/unreleased/Under the Hood-20240624-161108.yaml deleted file mode 100644 index 9042547f..00000000 --- a/.changes/unreleased/Under the Hood-20240624-161108.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Add optional release_connection parameter to connection_named method -time: 2024-06-24T16:11:08.773419+01:00 -custom: - Author: aranke - Issue: "247" diff --git a/.changie.yaml b/.changie.yaml index afbafb22..8f1d8615 100644 --- a/.changie.yaml +++ b/.changie.yaml @@ -1,20 +1,65 @@ changesDir: .changes unreleasedDir: unreleased headerPath: header.tpl.md +versionHeaderPath: "" changelogPath: CHANGELOG.md versionExt: md -envPrefix: CHANGIE_ +envPrefix: "CHANGIE_" versionFormat: '## dbt-adapters {{.Version}} - {{.Time.Format "January 02, 2006"}}' kindFormat: '### {{.Kind}}' -changeFormat: '* {{.Body}}' +changeFormat: |- + {{- $IssueList := list }} + {{- $changes := splitList " " $.Custom.Issue }} + {{- range $issueNbr := $changes }} + {{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-adapters/issues/nbr)" | replace "nbr" $issueNbr }} + {{- $IssueList = append $IssueList $changeLink }} + {{- end -}} + - {{.Body}} ({{ range $index, $element := $IssueList }}{{if $index}}, {{end}}{{$element}}{{end}}) + kinds: - - label: Breaking Changes - - label: Features - - label: Fixes - - label: Docs - - label: Under the Hood - - label: Dependencies - - label: Security +- label: Breaking Changes +- label: Features +- label: Fixes +- label: Under the Hood +- label: Dependencies + changeFormat: |- + {{- $PRList := list }} + {{- $changes := splitList " " $.Custom.PR }} + {{- range $pullrequest := $changes }} + {{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-adapters/pull/nbr)" | replace "nbr" $pullrequest }} + {{- $PRList = append $PRList $changeLink }} + {{- end -}} + - {{.Body}} ({{ range $index, $element := $PRList }}{{if $index}}, {{end}}{{$element}}{{end}}) + skipGlobalChoices: true + additionalChoices: + - key: Author + label: GitHub Username(s) (separated by a single space if multiple) + type: string + minLength: 3 + - key: PR + label: GitHub Pull Request Number (separated by a single space if multiple) + type: string + minLength: 1 +- label: Security + changeFormat: |- + {{- $PRList := list }} + {{- $changes := splitList " " $.Custom.PR }} + {{- range $pullrequest := $changes }} + {{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-adapters/pull/nbr)" | replace "nbr" $pullrequest }} + {{- $PRList = append $PRList $changeLink }} + {{- end -}} + - {{.Body}} ({{ range $index, $element := $PRList }}{{if $index}}, {{end}}{{$element}}{{end}}) + skipGlobalChoices: true + additionalChoices: + - key: Author + label: GitHub Username(s) (separated by a single space if multiple) + type: string + minLength: 3 + - key: PR + label: GitHub Pull Request Number (separated by a single space if multiple) + type: string + minLength: 1 + newlines: afterChangelogHeader: 1 afterKind: 1 @@ -31,3 +76,57 @@ custom: label: GitHub Issue Number (separated by a single space if multiple) type: string minLength: 1 + + +footerFormat: | + {{- $contributorDict := dict }} + {{- /* ensure all names in this list are all lowercase for later matching purposes */}} + {{- $core_team := splitList " " .Env.CORE_TEAM }} + {{- /* ensure we always skip snyk and dependabot in addition to the core team */}} + {{- $maintainers := list "dependabot[bot]" "snyk-bot"}} + {{- range $team_member := $core_team }} + {{- $team_member_lower := lower $team_member }} + {{- $maintainers = append $maintainers $team_member_lower }} + {{- end }} + {{- range $change := .Changes }} + {{- $authorList := splitList " " $change.Custom.Author }} + {{- /* loop through all authors for a single changelog */}} + {{- range $author := $authorList }} + {{- $authorLower := lower $author }} + {{- /* we only want to include non-core team contributors */}} + {{- if not (has $authorLower $maintainers)}} + {{- $changeList := splitList " " $change.Custom.Author }} + {{- $IssueList := list }} + {{- $changeLink := $change.Kind }} + {{- if or (eq $change.Kind "Dependencies") (eq $change.Kind "Security") }} + {{- $changes := splitList " " $change.Custom.PR }} + {{- range $issueNbr := $changes }} + {{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-adapters/pull/nbr)" | replace "nbr" $issueNbr }} + {{- $IssueList = append $IssueList $changeLink }} + {{- end -}} + {{- else }} + {{- $changes := splitList " " $change.Custom.Issue }} + {{- range $issueNbr := $changes }} + {{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-adapters/issues/nbr)" | replace "nbr" $issueNbr }} + {{- $IssueList = append $IssueList $changeLink }} + {{- end -}} + {{- end }} + {{- /* check if this contributor has other changes associated with them already */}} + {{- if hasKey $contributorDict $author }} + {{- $contributionList := get $contributorDict $author }} + {{- $contributionList = concat $contributionList $IssueList }} + {{- $contributorDict := set $contributorDict $author $contributionList }} + {{- else }} + {{- $contributionList := $IssueList }} + {{- $contributorDict := set $contributorDict $author $contributionList }} + {{- end }} + {{- end}} + {{- end}} + {{- end }} + {{- /* no indentation here for formatting so the final markdown doesn't have unneeded indentations */}} + {{- if $contributorDict}} + ### Contributors + {{- range $k,$v := $contributorDict }} + - [@{{$k}}](https://github.com/{{$k}}) ({{ range $index, $element := $v }}{{if $index}}, {{end}}{{$element}}{{end}}) + {{- end }} + {{- end }} diff --git a/.github/actions/build-hatch/action.yml b/.github/actions/build-hatch/action.yml index fe9825d4..6d81339a 100644 --- a/.github/actions/build-hatch/action.yml +++ b/.github/actions/build-hatch/action.yml @@ -13,7 +13,7 @@ inputs: default: "./" archive-name: description: Where to upload the artifacts - required: true + default: "" runs: using: composite @@ -30,7 +30,8 @@ runs: working-directory: ${{ inputs.working-dir }} - name: Upload artifacts - uses: actions/upload-artifact@v3 + if: ${{ inputs.archive-name != '' }} + uses: actions/upload-artifact@v4 with: name: ${{ inputs.archive-name }} path: ${{ inputs.working-dir }}dist/ diff --git a/.github/actions/publish-pypi/action.yml b/.github/actions/publish-pypi/action.yml index deffc6e3..22e6773b 100644 --- a/.github/actions/publish-pypi/action.yml +++ b/.github/actions/publish-pypi/action.yml @@ -14,7 +14,7 @@ runs: steps: - name: Download artifacts - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: ${{ inputs.archive-name }} path: dist/ @@ -24,6 +24,6 @@ runs: shell: bash - name: Publish artifacts to PyPI - uses: pypa/gh-action-pypi-publish@release/v1 + uses: pypa/gh-action-pypi-publish@release/v1.11 with: repository-url: ${{ inputs.repository-url }} diff --git a/.github/actions/publish-results/action.yml b/.github/actions/publish-results/action.yml index d863d659..7c73a94f 100644 --- a/.github/actions/publish-results/action.yml +++ b/.github/actions/publish-results/action.yml @@ -5,7 +5,7 @@ inputs: description: File type for file name stub (e.g. "unit-tests") required: true python-version: - description: Python version for the file name stub (e.g. "3.8") + description: Python version for the file name stub (e.g. "3.9") required: true source-file: description: File to be uploaded @@ -19,7 +19,7 @@ runs: run: echo "ts=$(date +'%Y-%m-%dT%H-%M-%S')" >> $GITHUB_OUTPUT #no colons allowed for artifacts shell: bash - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: ${{ inputs.file-name }}_python-${{ inputs.python-version }}_${{ steps.timestamp.outputs.ts }}.csv path: ${{ inputs.source-file }} diff --git a/.github/actions/setup-hatch/action.yml b/.github/actions/setup-hatch/action.yml index 6b15cdbf..6bf8ea10 100644 --- a/.github/actions/setup-hatch/action.yml +++ b/.github/actions/setup-hatch/action.yml @@ -13,7 +13,7 @@ runs: using: composite steps: - name: Set up Python ${{ inputs.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ inputs.python-version }} diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 02f010c7..907926a3 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -5,13 +5,25 @@ updates: schedule: interval: "daily" rebase-strategy: "disabled" + ignore: + - dependency-name: "*" + update-types: + - version-update:semver-patch - package-ecosystem: "pip" directory: "/dbt-tests-adapter" schedule: interval: "daily" rebase-strategy: "disabled" + ignore: + - dependency-name: "*" + update-types: + - version-update:semver-patch - package-ecosystem: "github-actions" directory: "/" schedule: interval: "weekly" rebase-strategy: "disabled" + ignore: + - dependency-name: "*" + update-types: + - version-update:semver-patch diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 33d94ff4..00afd704 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -20,11 +20,6 @@ on: types: [checks_requested] workflow_dispatch: workflow_call: - inputs: - changelog_path: - description: "Path to changelog file" - required: true - type: string permissions: read-all @@ -51,35 +46,9 @@ jobs: uses: ./.github/actions/setup-hatch - name: Build `dbt-adapters` - if: ${{ inputs.package == 'dbt-adapters' }} uses: ./.github/actions/build-hatch - name: Build `dbt-tests-adapter` - if: ${{ inputs.package == 'dbt-tests-adapter' }} uses: ./.github/actions/build-hatch with: working-dir: "./dbt-tests-adapter/" - - - name: Setup `hatch` - uses: ./.github/actions/setup-hatch - - - name: Build `dbt-adapters` - if: ${{ inputs.package == 'dbt-adapters' }} - uses: ./.github/actions/build-hatch - - - name: Build `dbt-tests-adapter` - if: ${{ inputs.package == 'dbt-tests-adapter' }} - uses: ./.github/actions/build-hatch - with: - working-dir: "./dbt-tests-adapter/" - - # this step is only needed for the release process - - name: "Upload Build Artifact" - if: ${{ github.event_name == 'workflow_call' }} - uses: actions/upload-artifact@v3 - with: - name: ${{ steps.version.outputs.version_number }} - path: | - ${{ inputs.changelog_path }} - ./dist/ - retention-days: 3 diff --git a/.github/workflows/changelog-existence.yml b/.github/workflows/changelog-existence.yml index d778f565..8732177f 100644 --- a/.github/workflows/changelog-existence.yml +++ b/.github/workflows/changelog-existence.yml @@ -19,9 +19,6 @@ name: Check Changelog Entry on: pull_request_target: types: [opened, reopened, labeled, unlabeled, synchronize] - paths-ignore: ['.changes/**', '.github/**', 'tests/**', 'third-party-stubs/**', '**.md', '**.yml'] - - workflow_dispatch: defaults: run: diff --git a/.github/workflows/github-release.yml b/.github/workflows/github-release.yml index 1c2f41b5..ad0cc2d8 100644 --- a/.github/workflows/github-release.yml +++ b/.github/workflows/github-release.yml @@ -208,7 +208,7 @@ jobs: ref: ${{ inputs.sha }} - name: "Download Artifact ${{ inputs.archive_name }}" - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: ${{ inputs.archive_name }} path: dist/ diff --git a/.github/workflows/precommit-autoupdate.yml b/.github/workflows/precommit-autoupdate.yml new file mode 100644 index 00000000..74976c48 --- /dev/null +++ b/.github/workflows/precommit-autoupdate.yml @@ -0,0 +1,22 @@ +name: "Run pre-commit autoupdate" + +on: + schedule: + - cron: "30 1 * * SAT" + workflow_dispatch: + +permissions: + contents: write + +concurrency: + group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.sha }} + cancel-in-progress: true + +jobs: + precommit-autoupdate: + name: "Run pre-commit autoupdate" + uses: dbt-labs/actions/.github/workflows/pre-commit-autoupdate.yml + secrets: + TOKEN: ${{ secrets.FISHTOWN_BOT_PAT }} + SLACK_WEBHOOK_PR_URL: ${{ secrets.SLACK_DEV_ADAPTER_PULL_REQUESTS }} + SLACK_WEBHOOK_ALERTS_URL: ${{ secrets.SLACK_DEV_ADAPTER_ALERTS }} diff --git a/.github/workflows/release_prep_hatch.yml b/.github/workflows/release_prep_hatch.yml index b043e19e..a6105786 100644 --- a/.github/workflows/release_prep_hatch.yml +++ b/.github/workflows/release_prep_hatch.yml @@ -34,7 +34,7 @@ # name: Version Bump and Changelog Generation -run-name: Bump ${{ inputs.package }}==${{ inputs.version_number }} for release to ${{ inputs.deploy_to }} and generate changelog +run-name: Bump to ${{ inputs.version_number }} for release to ${{ inputs.deploy_to }} and generate changelog on: workflow_call: inputs: @@ -131,7 +131,7 @@ jobs: - name: "Audit Version And Parse Into Parts" id: semver - uses: dbt-labs/actions/parse-semver@v1.1.0 + uses: dbt-labs/actions/parse-semver@v1.1.1 with: version: ${{ inputs.version_number }} @@ -288,7 +288,7 @@ jobs: steps: - name: "Checkout ${{ github.repository }} Branch ${{ needs.create-temp-branch.outputs.branch_name }}" - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: ref: ${{ needs.create-temp-branch.outputs.branch_name }} - name: Setup `hatch` @@ -392,13 +392,13 @@ jobs: steps: - name: "Checkout ${{ github.repository }} Branch ${{ needs.create-temp-branch.outputs.branch_name }}" - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: ref: ${{ needs.create-temp-branch.outputs.branch_name }} - name: "Setup `hatch`" uses: ./.github/actions/setup-hatch - name: "Run Unit Tests" - run: hatch run unit-tests:all + run: hatch run unit-tests run-integration-tests: runs-on: ubuntu-20.04 @@ -407,7 +407,7 @@ jobs: steps: - name: "Checkout ${{ github.repository }} Branch ${{ needs.create-temp-branch.outputs.branch_name }}" - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: ref: ${{ needs.create-temp-branch.outputs.branch_name }} @@ -447,7 +447,7 @@ jobs: python-version: ${{ env.PYTHON_TARGET_VERSION }} - name: Run tests - run: hatch run integration-tests:all + run: hatch run integration-tests merge-changes-into-target-branch: runs-on: ubuntu-latest @@ -467,7 +467,7 @@ jobs: echo needs.audit-changelog.outputs.exists: ${{ needs.audit-changelog.outputs.exists }} echo needs.audit-version-in-code.outputs.up_to_date: ${{ needs.audit-version-in-code.outputs.up_to_date }} - name: "Checkout Repo ${{ github.repository }}" - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: "Merge Changes Into ${{ inputs.target_branch }}" uses: everlytic/branch-merge@1.1.5 @@ -524,7 +524,7 @@ jobs: message="The ${{ steps.resolve_branch.outputs.target_branch }} branch will be used for release" echo "::notice title=${{ env.NOTIFICATION_PREFIX }}: $title::$message" - name: "Checkout Resolved Branch - ${{ steps.resolve_branch.outputs.target_branch }}" - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: ref: ${{ steps.resolve_branch.outputs.target_branch }} diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml index 7d720655..b4ac615d 100644 --- a/.github/workflows/unit-tests.yml +++ b/.github/workflows/unit-tests.yml @@ -23,7 +23,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] + python-version: ["3.9", "3.10", "3.11", "3.12"] steps: - name: Check out repository @@ -37,7 +37,7 @@ jobs: python-version: ${{ matrix.python-version }} - name: Run unit tests - run: hatch run unit-tests:all + run: hatch run unit-tests shell: bash - name: Publish results diff --git a/.gitignore b/.gitignore index a14d6d0d..29c470c5 100644 --- a/.gitignore +++ b/.gitignore @@ -154,3 +154,13 @@ cython_debug/ # PyCharm .idea/ + +# MacOS +.DS_Store + +# VSCode +.vscode/ +.venv/ + +# Vim +*.swp diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index caf34209..b7835274 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -18,19 +18,19 @@ repos: - id: dbt-core-in-adapters-check - repo: https://github.com/psf/black - rev: 24.4.0 + rev: 24.8.0 hooks: - id: black args: - --line-length=99 - - --target-version=py38 - --target-version=py39 - --target-version=py310 - --target-version=py311 + - --target-version=py312 - --force-exclude=dbt/adapters/events/adapter_types_pb2.py - repo: https://github.com/pycqa/flake8 - rev: 7.0.0 + rev: 7.1.1 hooks: - id: flake8 exclude: dbt/adapters/events/adapter_types_pb2.py|tests/functional/ @@ -38,10 +38,10 @@ repos: - --max-line-length=99 - --select=E,F,W - --ignore=E203,E501,E704,E741,W503,W504 - - --per-file-ignores=*/__init__.py:F401 + - --per-file-ignores=*/__init__.py:F401,*/conftest.py:F401 - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.9.0 + rev: v1.11.2 hooks: - id: mypy exclude: dbt/adapters/events/adapter_types_pb2.py|dbt-tests-adapter/dbt/__init__.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 9114bcd1..87cca898 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,19 +5,145 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html), and is generated by [Changie](https://github.com/miniscruff/changie). -## dbt-adapters 1.9.1 - June 20, 2024 +## dbt-adapters 1.11.0 - November 11, 2024 -## dbt-adapters 1.9.0 - June 18, 2024 +### Features + +- Use a behavior flag to gate microbatch functionality (instead of an environment variable) ([#327](https://github.com/dbt-labs/dbt-adapters/issues/327)) + +### Under the Hood + +- Add `query_id` to SQLQueryStatus ([#342](https://github.com/dbt-labs/dbt-adapters/issues/342)) + +### Contributors +- [@cmcarthur](https://github.com/cmcarthur) ([#342](https://github.com/dbt-labs/dbt-adapters/issues/342)) + +## dbt-adapters 1.10.4 - November 11, 2024 + +## dbt-adapters 1.10.3 - October 29, 2024 + +## dbt-adapters 1.10.2 - October 01, 2024 + +### Under the Hood + +- dbt-tests-adapters: Add required begin to microbatch model config to BaseMicrobatch test ([#315](https://github.com/dbt-labs/dbt-adapters/issues/315)) + +## dbt-adapters 1.10.1 - September 16, 2024 + +## dbt-adapters 1.10.0 - September 12, 2024 + +## dbt-adapters 1.9.0 - November 13, 2024 + +### Fixes + +- Negate the check for microbatch behavior flag in determining builtins ([#349](https://github.com/dbt-labs/dbt-adapters/issues/349)) +- Move require_batched_execution_for_custom_microbatch_strategy flag to global ([#351](https://github.com/dbt-labs/dbt-adapters/issues/351)) + + + +## dbt-adapters 1.8.0 - October 29, 2024 + +### Fixes + +- Always make behavior flags available for evaluation ([#338](https://github.com/dbt-labs/dbt-adapters/issues/338)) + +### Under the Hood + +- Add adapter telemetry. ([#301](https://github.com/dbt-labs/dbt-adapters/issues/301)) + +## dbt-adapters 1.7.2 - October 21, 2024 + +### Breaking Changes + +- Drop support for Python 3.8 ([#332](https://github.com/dbt-labs/dbt-adapters/issues/332)) + +### Features + +- Allows unique_key for snapshots to take a list ([#181](https://github.com/dbt-labs/dbt-adapters/issues/181)) + +### Fixes + +- Always validate an incremental model's `incremental_strategy` ([#330](https://github.com/dbt-labs/dbt-adapters/issues/330)) + +### Contributors +- [@agpapa](https://github.com/agpapa) ([#181](https://github.com/dbt-labs/dbt-adapters/issues/181)) + +## dbt-adapters 1.7.1 - October 15, 2024 + +### Features + +- Enable setting current value of dbt_valid_to ([#320](https://github.com/dbt-labs/dbt-adapters/issues/320)) + +## dbt-adapters 1.7.0 - September 19, 2024 + +### Features -## dbt-adapters 1.8.0 - May 09, 2024 +- Allow configuring of snapshot column names ([#289](https://github.com/dbt-labs/dbt-adapters/issues/289)) + +## dbt-adapters 1.6.1 - September 16, 2024 + +## dbt-adapters 1.6.0 - September 12, 2024 + +### Features + +- Default microbatch strategy implementation and base tests ([#302](https://github.com/dbt-labs/dbt-adapters/issues/302)) + +## dbt-adapters 1.5.0 - September 10, 2024 + +### Features + +- Compare 'snapshot_get_time' and snapshot 'updated_at' data types ([#242](https://github.com/dbt-labs/dbt-adapters/issues/242)) +- Add Behavior Flag framework ([#281](https://github.com/dbt-labs/dbt-adapters/issues/281)) +- Add EventTimeFilter to BaseRelation, which renders a filtered relation when start or end is set ([#294](https://github.com/dbt-labs/dbt-adapters/issues/294)) + +### Dependencies + +- Update dbt-common pin to >=1.8 ([#299](https://github.com/dbt-labs/dbt-adapters/pull/299)) + +## dbt-adapters 1.4.1 - August 09, 2024 + +### Fixes + +- Use model alias for the CTE identifier generated during ephemeral materialization ([#5273](https://github.com/dbt-labs/dbt-adapters/issues/5273)) + +### Under the Hood + +- Updating changie.yaml to add contributors and PR links ([#219](https://github.com/dbt-labs/dbt-adapters/issues/219)) + +### Contributors +- [@jeancochrane](https://github.com/jeancochrane) ([#5273](https://github.com/dbt-labs/dbt-adapters/issues/5273)) +- [@leahwicz](https://github.com/leahwicz) ([#219](https://github.com/dbt-labs/dbt-adapters/issues/219)) + +## dbt-adapters 1.4.0 - July 30, 2024 ### Features -* Cross-database `date` macro +- render 'to' and 'to_columns' fields on foreign key constraints, and bump dbt-common lower bound to 1.6 ([#271](https://github.com/dbt-labs/dbt-adapters/issues/271)) + +### Fixes + +- Incremental table varchar column definition changed ([#276](https://github.com/dbt-labs/dbt-adapters/issues/276)) + +### Under the Hood + +- Rework record/replay to record at the database connection level. ([#244](https://github.com/dbt-labs/dbt-adapters/issues/244)) + +## dbt-adapters 1.3.3 - July 09, 2024 ### Fixes -* Update Clone test to reflect core change removing `deferred` attribute from nodes +* Fix scenario where using the `--empty` flag causes metadata queries to contain limit clauses ([#213](https://github.com/dbt-labs/dbt-adapters/issues/213)) + +### Under the Hood + +* --limit flag no longer subshells the query. This resolves the dbt Cloud experience issue where limit prevents ordering elements.. ([#207](https://github.com/dbt-labs/dbt-adapters/issues/207)) + +## dbt-adapters 1.3.2 - July 02, 2024 + +### Under the Hood + +* Fix query timer resolution ([#246](https://github.com/dbt-labs/dbt-adapters/issues/246)) +* Add optional release_connection parameter to connection_named method ([#247](https://github.com/dbt-labs/dbt-adapters/issues/247)) ## dbt-adapters 1.3.1 - June 20, 2024 @@ -25,67 +151,89 @@ and is generated by [Changie](https://github.com/miniscruff/changie). ### Features -* Add get_catalog_for_single_relation macro and capability to enable adapters to optimize catalog generation +* Add get_catalog_for_single_relation macro and capability to enable adapters to optimize catalog generation ([#231](https://github.com/dbt-labs/dbt-adapters/issues/231)) ## dbt-adapters 1.2.1 - May 21, 2024 ### Features -* Improvement of the compile error message in the get_fixture-sql.sql when the relation or the model not exist +* Improvement of the compile error message in the get_fixture-sql.sql when the relation or the model not exist ([#203](https://github.com/dbt-labs/dbt-adapters/issues/203)) +* Cross-database `date` macro ([#191](https://github.com/dbt-labs/dbt-adapters/issues/191)) + +### Fixes + +* Update Clone test to reflect core change removing `deferred` attribute from nodes ([#194](https://github.com/dbt-labs/dbt-adapters/issues/194)) ### Under the Hood -* Add query recording for adapters which use SQLConnectionManager -* Improve memory efficiency of process_results() +* Add query recording for adapters which use SQLConnectionManager ([#195](https://github.com/dbt-labs/dbt-adapters/issues/195)) +* Improve memory efficiency of process_results() ([#217](https://github.com/dbt-labs/dbt-adapters/issues/217)) ## dbt-adapters 1.1.1 - May 07, 2024 ### Features -* Enable serialization contexts +* Enable serialization contexts ([#197](https://github.com/dbt-labs/dbt-adapters/issues/197)) ## dbt-adapters 1.1.0 - May 01, 2024 ### Features -* Debug log when `type_code` fails to convert to a `data_type` -* Introduce TableLastModifiedMetadataBatch and implement BaseAdapter.calculate_freshness_from_metadata_batch -* Support for sql fixtures in unit testing -* Cross-database `cast` macro -* Allow adapters to opt out of aliasing the subquery generated by render_limited -* subquery alias generated by render_limited now includes the relation name to mitigate duplicate aliasing +* Debug log when `type_code` fails to convert to a `data_type` ([#135](https://github.com/dbt-labs/dbt-adapters/issues/135)) +* Introduce TableLastModifiedMetadataBatch and implement BaseAdapter.calculate_freshness_from_metadata_batch ([#127](https://github.com/dbt-labs/dbt-adapters/issues/127)) +* Support for sql fixtures in unit testing ([#146](https://github.com/dbt-labs/dbt-adapters/issues/146)) +* Cross-database `cast` macro ([#173](https://github.com/dbt-labs/dbt-adapters/issues/173)) +* Allow adapters to opt out of aliasing the subquery generated by render_limited ([#179](https://github.com/dbt-labs/dbt-adapters/issues/179)) +* subquery alias generated by render_limited now includes the relation name to mitigate duplicate aliasing ([#179](https://github.com/dbt-labs/dbt-adapters/issues/179)) ### Fixes -* Fix adapter-specific cast handling for constraint enforcement +* Fix adapter-specific cast handling for constraint enforcement ([#165](https://github.com/dbt-labs/dbt-adapters/issues/165)) ### Docs -* Use `dbt-adapters` throughout the contributing guide +* Use `dbt-adapters` throughout the contributing guide ([#137](https://github.com/dbt-labs/dbt-adapters/issues/137)) ### Under the Hood -* Add the option to set the log level of the AdapterRegistered event -* Update dependabot config to cover GHA -* Validate that dbt-core and dbt-adapters remain de-coupled -* remove dbt_version from query comment test fixture +* Add the option to set the log level of the AdapterRegistered event ([#141](https://github.com/dbt-labs/dbt-adapters/issues/141)) +* Update dependabot config to cover GHA ([#161](https://github.com/dbt-labs/dbt-adapters/issues/161)) +* Validate that dbt-core and dbt-adapters remain de-coupled ([#174](https://github.com/dbt-labs/dbt-adapters/issues/174)) +* remove dbt_version from query comment test fixture ([#184](https://github.com/dbt-labs/dbt-adapters/issues/184)) ### Dependencies -* add support for py3.12 +* add support for py3.12 ([#185](https://github.com/dbt-labs/dbt-adapters/issues/185)) ## dbt-adapters 1.0.0 - April 01, 2024 +### Features + +* Update RelationConfig to capture all fields used by adapters ([#30](https://github.com/dbt-labs/dbt-adapters/issues/30)) + ### Fixes -* Add field wrapper to BaseRelation members that were missing it. -* Add "description" and "meta" fields to RelationConfig protocol +* Add field wrapper to BaseRelation members that were missing it. ([#108](https://github.com/dbt-labs/dbt-adapters/issues/108)) +* Add "description" and "meta" fields to RelationConfig protocol ([#119](https://github.com/dbt-labs/dbt-adapters/issues/119)) +* Ignore adapter-level support warnings for 'custom' constraints ([#90](https://github.com/dbt-labs/dbt-adapters/issues/90)) +* Make all adapter zone tests importable by removing "Test" prefix ([#93](https://github.com/dbt-labs/dbt-adapters/issues/93)) + +### Docs + +* Configure `changie` ([#16](https://github.com/dbt-labs/dbt-adapters/issues/16)) +* Setup ADR tracking framework ([#11](https://github.com/dbt-labs/dbt-adapters/issues/11)) +* Create issue templates ([#12](https://github.com/dbt-labs/dbt-adapters/issues/12)) +* Create PR template ([#13](https://github.com/dbt-labs/dbt-adapters/issues/13)) ### Under the Hood -* Lazy load agate to improve dbt-core performance -* add BaseAdapater.MAX_SCHEMA_METADATA_RELATIONS +* Lazy load agate to improve dbt-core performance ([#125](https://github.com/dbt-labs/dbt-adapters/issues/125)) +* add BaseAdapater.MAX_SCHEMA_METADATA_RELATIONS ([#131](https://github.com/dbt-labs/dbt-adapters/issues/131)) +* Configure `dependabot` ([#14](https://github.com/dbt-labs/dbt-adapters/issues/14)) +* Implement unit testing in CI ([#22](https://github.com/dbt-labs/dbt-adapters/issues/22)) +* Allow version to be specified in either __version__.py or __about__.py ([#44](https://github.com/dbt-labs/dbt-adapters/issues/44)) +* Remove __init__.py file from dbt.tests ([#96](https://github.com/dbt-labs/dbt-adapters/issues/96)) ### Security -* Pin `black>=24.3` in `pyproject.toml` +* Pin `black>=24.3` in `pyproject.toml` ([#140](https://github.com/dbt-labs/dbt-adapters/issues/140)) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 28f13f6c..1a6e92a2 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -17,13 +17,10 @@ This guide assumes users are developing on a Linux or MacOS system. The following utilities are needed or will be installed in this guide: - `pip` -- `virturalenv` +- `hatch` - `git` - `changie` -If local functional testing is required, then a database instance -and appropriate credentials are also required. - In addition to this guide, users are highly encouraged to read the `dbt-core` [CONTRIBUTING.md](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md). Almost all information there is applicable here. @@ -66,25 +63,39 @@ Rather than forking `dbt-labs/dbt-adapters`, use `dbt-labs/dbt-adapters` directl ### Installation -1. Ensure the latest version of `pip` is installed: +1. Ensure the latest versions of `pip` and `hatch` are installed: ```shell - pip install --upgrade pip + pip install --user --upgrade pip hatch + ``` +2. This step is optional, but it's recommended. Configure `hatch` to create its virtual environments in the project. Add this block to your `hatch` `config.toml` file: + ```toml + # MacOS: ~/Library/Application Support/hatch/config.toml + [dirs.env] + virtual = ".hatch" ``` -2. Configure and activate a virtual environment using `virtualenv` as described in -[Setting up an environment](https://github.com/dbt-labs/dbt-core/blob/HEAD/CONTRIBUTING.md#setting-up-an-environment) -3. Install `dbt-adapters` and development dependencies in the virtual environment + This makes `hatch` create all virtual environments in the project root inside of the directory `/.hatch`, similar to `/.tox` for `tox`. + It also makes it easier to add this environment as a runner in common IDEs like VSCode and PyCharm. +3. Create a `hatch` environment with all of the development dependencies and activate it: ```shell - pip install -e .[dev] + hatch run setup + hatch shell + ``` +4. Run any commands within the virtual environment by prefixing the command with `hatch run`: + ```shell + hatch run ``` -When `dbt-adapters` is installed this way, any changes made to the `dbt-adapters` source code -will be reflected in the virtual environment immediately. +## Testing +`dbt-adapters` contains [code quality checks](https://github.com/dbt-labs/dbt-adapters/tree/main/.pre-commit-config.yaml) and [unit tests](https://github.com/dbt-labs/dbt-adapters/tree/main/tests/unit). +While `dbt-tests-adapter` is also hosted in this repo, it requires a concrete adapter to run. -## Testing +### Code quality -`dbt-adapters` contains [unit](https://github.com/dbt-labs/dbt-adapters/tree/main/tests/unit) -and [functional](https://github.com/dbt-labs/dbt-adapters/tree/main/tests/functional) tests. +Code quality checks can run with a single command: +```shell +hatch run code-quality +``` ### Unit tests @@ -93,43 +104,38 @@ Unit tests can be run locally without setting up a database connection: ```shell # Note: replace $strings with valid names +# run all unit tests +hatch run unit-test + # run all unit tests in a module -python -m pytest tests/unit/$test_file_name.py +hatch run unit-test tests/unit/$test_file_name.py + # run a specific unit test -python -m pytest tests/unit/$test_file_name.py::$test_class_name::$test_method_name +hatch run unit-test tests/unit/$test_file_name.py::$test_class_name::$test_method_name ``` -### Functional tests - -Functional tests require a database to test against. There are two primary ways to run functional tests: +### Testing against a development branch -- Tests will run automatically against a dbt Labs owned database during PR checks -- Tests can be run locally by configuring a `test.env` file with appropriate `ENV` variables: - ```shell - cp test.env.example test.env - $EDITOR test.env - ``` +Some changes require a change in `dbt-common` and `dbt-adapters`. +In that case, the dependency on `dbt-common` must be updated to point to the development branch. For example: -> **_WARNING:_** The parameters in `test.env` must link to a valid database. -> `test.env` is git-ignored, but be _extra_ careful to never check in credentials -> or other sensitive information when developing. +```toml +[tool.hatch.envs.default] +dependencies = [ + "dbt-common @ git+https://github.com/dbt-labs/dbt-common.git@my-dev-branch", + ..., +] +``` -Functional tests can be run locally with a valid database connection configured in `test.env`: +This will install `dbt-common` as a snapshot. In other words, if `my-dev-branch` is updated on GitHub, those updates will not be reflected locally. +In order to pick up those updates, the `hatch` environment(s) will need to be rebuilt: ```shell -# Note: replace $strings with valid names - -# run all functional tests in a directory -python -m pytest tests/functional/$test_directory -# run all functional tests in a module -python -m pytest tests/functional/$test_dir_and_filename.py -# run all functional tests in a class -python -m pytest tests/functional/$test_dir_and_filename.py::$test_class_name -# run a specific functional test -python -m pytest tests/functional/$test_dir_and_filename.py::$test_class_name::$test__method_name +exit +hatch env prune +hatch shell ``` - ## Documentation ### User documentation diff --git a/dbt-tests-adapter/dbt/tests/__about__.py b/dbt-tests-adapter/dbt/tests/__about__.py index 70227976..08e0d06b 100644 --- a/dbt-tests-adapter/dbt/tests/__about__.py +++ b/dbt-tests-adapter/dbt/tests/__about__.py @@ -1 +1 @@ -version = "1.9.1" +version = "1.10.4" diff --git a/dbt-tests-adapter/dbt/tests/adapter/basic/files.py b/dbt-tests-adapter/dbt/tests/adapter/basic/files.py index 751b01a0..d0253a53 100644 --- a/dbt-tests-adapter/dbt/tests/adapter/basic/files.py +++ b/dbt-tests-adapter/dbt/tests/adapter/basic/files.py @@ -186,6 +186,10 @@ {{ config(materialized="incremental") }} """ +config_materialized_incremental_invalid_strategy = """ + {{ config(materialized="incremental", incremental_strategy="bad_strategy") }} +""" + config_materialized_var = """ {{ config(materialized=var("materialized_var", "table"))}} """ @@ -217,3 +221,6 @@ ephemeral_view_sql = config_materialized_view + model_ephemeral ephemeral_table_sql = config_materialized_table + model_ephemeral incremental_sql = config_materialized_incremental + model_incremental +incremental_invalid_strategy_sql = ( + config_materialized_incremental_invalid_strategy + model_incremental +) diff --git a/dbt-tests-adapter/dbt/tests/adapter/basic/test_incremental.py b/dbt-tests-adapter/dbt/tests/adapter/basic/test_incremental.py index fe04a5a1..57cc4db9 100644 --- a/dbt-tests-adapter/dbt/tests/adapter/basic/test_incremental.py +++ b/dbt-tests-adapter/dbt/tests/adapter/basic/test_incremental.py @@ -86,6 +86,45 @@ def test_incremental_not_schema_change(self, project): assert run_result == RunStatus.Success +class BaseIncrementalBadStrategy: + @pytest.fixture(scope="class") + def project_config_update(self): + return {"name": "incremental"} + + @pytest.fixture(scope="class") + def models(self): + return { + "incremental.sql": files.incremental_invalid_strategy_sql, + "schema.yml": files.schema_base_yml, + } + + @pytest.fixture(scope="class") + def seeds(self): + return {"base.csv": files.seeds_base_csv, "added.csv": files.seeds_added_csv} + + @pytest.fixture(autouse=True) + def clean_up(self, project): + yield + with project.adapter.connection_named("__test"): + relation = project.adapter.Relation.create( + database=project.database, schema=project.test_schema + ) + project.adapter.drop_schema(relation) + + def test_incremental_invalid_strategy(self, project): + # seed command + results = run_dbt(["seed"]) + assert len(results) == 2 + + # try to run the incremental model, it should fail on the first attempt + results = run_dbt(["run"], expect_pass=False) + assert len(results.results) == 1 + assert ( + 'dbt could not find an incremental strategy macro with the name "get_incremental_bad_strategy_sql"' + in results.results[0].message + ) + + class Testincremental(BaseIncremental): pass diff --git a/dbt-tests-adapter/dbt/tests/adapter/constraints/fixtures.py b/dbt-tests-adapter/dbt/tests/adapter/constraints/fixtures.py index cfbd5379..2a4f089b 100644 --- a/dbt-tests-adapter/dbt/tests/adapter/constraints/fixtures.py +++ b/dbt-tests-adapter/dbt/tests/adapter/constraints/fixtures.py @@ -363,7 +363,8 @@ - type: check expression: id >= 1 - type: foreign_key - expression: {schema}.foreign_key_model (id) + to: ref('foreign_key_model') + to_columns: ["id"] - type: unique data_tests: - unique diff --git a/dbt-tests-adapter/dbt/tests/adapter/dbt_show/test_dbt_show.py b/dbt-tests-adapter/dbt/tests/adapter/dbt_show/test_dbt_show.py index c00491aa..3f3d36c5 100644 --- a/dbt-tests-adapter/dbt/tests/adapter/dbt_show/test_dbt_show.py +++ b/dbt-tests-adapter/dbt/tests/adapter/dbt_show/test_dbt_show.py @@ -1,5 +1,6 @@ import pytest +from dbt_common.exceptions import DbtRuntimeError from dbt.tests.adapter.dbt_show import fixtures from dbt.tests.util import run_dbt @@ -47,9 +48,25 @@ def test_sql_header(self, project): run_dbt(["show", "--select", "sql_header", "--vars", "timezone: Asia/Kolkata"]) +class BaseShowDoesNotHandleDoubleLimit: + """see issue: https://github.com/dbt-labs/dbt-adapters/issues/207""" + + DATABASE_ERROR_MESSAGE = 'syntax error at or near "limit"' + + def test_double_limit_throws_syntax_error(self, project): + with pytest.raises(DbtRuntimeError) as e: + run_dbt(["show", "--limit", "1", "--inline", "select 1 limit 1"]) + + assert self.DATABASE_ERROR_MESSAGE in str(e) + + class TestPostgresShowSqlHeader(BaseShowSqlHeader): pass class TestPostgresShowLimit(BaseShowLimit): pass + + +class TestShowDoesNotHandleDoubleLimit(BaseShowDoesNotHandleDoubleLimit): + pass diff --git a/dbt-tests-adapter/dbt/tests/adapter/empty/_models.py b/dbt-tests-adapter/dbt/tests/adapter/empty/_models.py new file mode 100644 index 00000000..f5e684f7 --- /dev/null +++ b/dbt-tests-adapter/dbt/tests/adapter/empty/_models.py @@ -0,0 +1,111 @@ +model_input_sql = """ +select 1 as id +""" + +ephemeral_model_input_sql = """ +{{ config(materialized='ephemeral') }} +select 2 as id +""" + +raw_source_csv = """id +3 +""" + + +model_sql = """ +select * +from {{ ref('model_input') }} +union all +select * +from {{ ref('ephemeral_model_input') }} +union all +select * +from {{ source('seed_sources', 'raw_source') }} +""" + + +model_inline_sql = """ +select * from {{ source('seed_sources', 'raw_source') }} as raw_source +""" + +schema_sources_yml = """ +sources: + - name: seed_sources + schema: "{{ target.schema }}" + tables: + - name: raw_source +""" + + +SEED = """ +my_id,my_value +1,a +2,b +3,c +""".strip() + + +SCHEMA = """ +version: 2 + +seeds: + - name: my_seed + description: "This is my_seed" + columns: + - name: id + description: "This is my_seed.my_id" +""" + +CONTROL = """ +select * from {{ ref("my_seed") }} +""" + + +GET_COLUMNS_IN_RELATION = """ +{{ config(materialized="table") }} +{% set columns = adapter.get_columns_in_relation(ref("my_seed")) %} +select * from {{ ref("my_seed") }} +""" + + +ALTER_COLUMN_TYPE = """ +{{ config(materialized="table") }} +{{ alter_column_type(ref("my_seed"), "MY_VALUE", "varchar") }} +select * from {{ ref("my_seed") }} +""" + + +ALTER_RELATION_COMMENT = """ +{{ config( + materialized="table", + persist_docs={"relations": True}, +) }} +select * from {{ ref("my_seed") }} +""" + + +ALTER_COLUMN_COMMENT = """ +{{ config( + materialized="table", + persist_docs={"columns": True}, +) }} +select * from {{ ref("my_seed") }} +""" + + +ALTER_RELATION_ADD_REMOVE_COLUMNS = """ +{{ config(materialized="table") }} +{% set my_seed = adapter.Relation.create(this.database, this.schema, "my_seed", "table") %} +{% set my_column = api.Column("my_column", "varchar") %} +{% do alter_relation_add_remove_columns(my_seed, [my_column], none) %} +{% do alter_relation_add_remove_columns(my_seed, none, [my_column]) %} +select * from {{ ref("my_seed") }} +""" + + +TRUNCATE_RELATION = """ +{{ config(materialized="table") }} +{% set my_seed = adapter.Relation.create(this.database, this.schema, "my_seed", "table") %} +{{ truncate_relation(my_seed) }} +select * from {{ ref("my_seed") }} +""" diff --git a/dbt-tests-adapter/dbt/tests/adapter/empty/test_empty.py b/dbt-tests-adapter/dbt/tests/adapter/empty/test_empty.py index 2249d98d..de15bd5b 100644 --- a/dbt-tests-adapter/dbt/tests/adapter/empty/test_empty.py +++ b/dbt-tests-adapter/dbt/tests/adapter/empty/test_empty.py @@ -1,57 +1,23 @@ -import pytest - from dbt.tests.util import relation_from_name, run_dbt +import pytest - -model_input_sql = """ -select 1 as id -""" - -ephemeral_model_input_sql = """ -{{ config(materialized='ephemeral') }} -select 2 as id -""" - -raw_source_csv = """id -3 -""" - - -model_sql = """ -select * -from {{ ref('model_input') }} -union all -select * -from {{ ref('ephemeral_model_input') }} -union all -select * -from {{ source('seed_sources', 'raw_source') }} -""" - - -schema_sources_yml = """ -sources: - - name: seed_sources - schema: "{{ target.schema }}" - tables: - - name: raw_source -""" +from dbt.tests.adapter.empty import _models class BaseTestEmpty: @pytest.fixture(scope="class") def seeds(self): return { - "raw_source.csv": raw_source_csv, + "raw_source.csv": _models.raw_source_csv, } @pytest.fixture(scope="class") def models(self): return { - "model_input.sql": model_input_sql, - "ephemeral_model_input.sql": ephemeral_model_input_sql, - "model.sql": model_sql, - "sources.yml": schema_sources_yml, + "model_input.sql": _models.model_input_sql, + "ephemeral_model_input.sql": _models.ephemeral_model_input_sql, + "model.sql": _models.model_sql, + "sources.yml": _models.schema_sources_yml, } def assert_row_count(self, project, relation_name: str, expected_row_count: int): @@ -75,13 +41,9 @@ def test_run_with_empty(self, project): class BaseTestEmptyInlineSourceRef(BaseTestEmpty): @pytest.fixture(scope="class") def models(self): - model_sql = """ - select * from {{ source('seed_sources', 'raw_source') }} as raw_source - """ - return { - "model.sql": model_sql, - "sources.yml": schema_sources_yml, + "model.sql": _models.model_inline_sql, + "sources.yml": _models.schema_sources_yml, } def test_run_with_empty(self, project): @@ -92,4 +54,47 @@ def test_run_with_empty(self, project): class TestEmpty(BaseTestEmpty): + """ + Though we don't create these classes anymore, we need to keep this one in case an adapter wanted to import the test as-is to automatically run it. + We should consider adding a deprecation warning that suggests moving this into the concrete adapter and importing `BaseTestEmpty` instead. + """ + pass + + +class MetadataWithEmptyFlag: + @pytest.fixture(scope="class") + def seeds(self): + return {"my_seed.csv": _models.SEED} + + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": _models.SCHEMA, + "control.sql": _models.CONTROL, + "get_columns_in_relation.sql": _models.GET_COLUMNS_IN_RELATION, + "alter_column_type.sql": _models.ALTER_COLUMN_TYPE, + "alter_relation_comment.sql": _models.ALTER_RELATION_COMMENT, + "alter_column_comment.sql": _models.ALTER_COLUMN_COMMENT, + "alter_relation_add_remove_columns.sql": _models.ALTER_RELATION_ADD_REMOVE_COLUMNS, + "truncate_relation.sql": _models.TRUNCATE_RELATION, + } + + @pytest.fixture(scope="class", autouse=True) + def setup(self, project): + run_dbt(["seed"]) + + @pytest.mark.parametrize( + "model", + [ + "control", + "get_columns_in_relation", + "alter_column_type", + "alter_relation_comment", + "alter_column_comment", + "alter_relation_add_remove_columns", + "truncate_relation", + ], + ) + def test_run(self, project, model): + run_dbt(["run", "--empty", "--select", model]) diff --git a/dbt-tests-adapter/dbt/tests/adapter/hooks/test_run_hooks.py b/dbt-tests-adapter/dbt/tests/adapter/hooks/test_run_hooks.py index 89565c70..0136e3f6 100644 --- a/dbt-tests-adapter/dbt/tests/adapter/hooks/test_run_hooks.py +++ b/dbt-tests-adapter/dbt/tests/adapter/hooks/test_run_hooks.py @@ -3,6 +3,7 @@ import pytest +from dbt_common.exceptions import DbtDatabaseError from dbt.tests.adapter.hooks import fixtures from dbt.tests.util import check_table_does_not_exist, run_dbt @@ -11,8 +12,8 @@ class BasePrePostRunHooks: @pytest.fixture(scope="function") def setUp(self, project): project.run_sql_file(project.test_data_dir / Path("seed_run.sql")) - project.run_sql(f"drop table if exists { project.test_schema }.schemas") - project.run_sql(f"drop table if exists { project.test_schema }.db_schemas") + project.run_sql(f"drop table if exists {project.test_schema}.schemas") + project.run_sql(f"drop table if exists {project.test_schema}.db_schemas") os.environ["TERM_TEST"] = "TESTING" @pytest.fixture(scope="class") @@ -158,7 +159,8 @@ def project_config_update(self): } def test_missing_column_pre_hook(self, project): - run_dbt(["run"], expect_pass=False) + with pytest.raises(DbtDatabaseError): + run_dbt(["run"], expect_pass=False) class TestAfterRunHooks(BaseAfterRunHooks): diff --git a/dbt-tests-adapter/dbt/tests/adapter/incremental/test_incremental_microbatch.py b/dbt-tests-adapter/dbt/tests/adapter/incremental/test_incremental_microbatch.py new file mode 100644 index 00000000..34078ac3 --- /dev/null +++ b/dbt-tests-adapter/dbt/tests/adapter/incremental/test_incremental_microbatch.py @@ -0,0 +1,93 @@ +from pprint import pformat + +import pytest + +from dbt.tests.util import relation_from_name, run_dbt + +try: + # patch_microbatch_end_time introduced in dbt 1.9.0 + from dbt.tests.util import patch_microbatch_end_time +except ImportError: + from freezegun import freeze_time as patch_microbatch_end_time + +_input_model_sql = """ +{{ config(materialized='table', event_time='event_time') }} +select 1 as id, TIMESTAMP '2020-01-01 00:00:00-0' as event_time +union all +select 2 as id, TIMESTAMP '2020-01-02 00:00:00-0' as event_time +union all +select 3 as id, TIMESTAMP '2020-01-03 00:00:00-0' as event_time +""" + +_microbatch_model_sql = """ +{{ config(materialized='incremental', incremental_strategy='microbatch', unique_key='id', event_time='event_time', batch_size='day', begin=modules.datetime.datetime(2020, 1, 1, 0, 0, 0)) }} +select * from {{ ref('input_model') }} +""" + + +class BaseMicrobatch: + @pytest.fixture(scope="class") + def microbatch_model_sql(self) -> str: + """ + This is the SQL that defines the microbatch model, including any {{ config(..) }} + """ + return _microbatch_model_sql + + @pytest.fixture(scope="class") + def input_model_sql(self) -> str: + """ + This is the SQL that defines the input model to the microbatch model, including any {{ config(..) }}. + event_time is a required configuration of this input + """ + return _input_model_sql + + @pytest.fixture(scope="class") + def insert_two_rows_sql(self, project) -> str: + test_schema_relation = project.adapter.Relation.create( + database=project.database, schema=project.test_schema + ) + return f"insert into {test_schema_relation}.input_model (id, event_time) values (4, TIMESTAMP '2020-01-04 00:00:00-0'), (5, TIMESTAMP '2020-01-05 00:00:00-0')" + + @pytest.fixture(scope="class") + def models(self, microbatch_model_sql, input_model_sql): + return { + "input_model.sql": input_model_sql, + "microbatch_model.sql": microbatch_model_sql, + } + + def assert_row_count(self, project, relation_name: str, expected_row_count: int): + relation = relation_from_name(project.adapter, relation_name) + result = project.run_sql(f"select * from {relation}", fetch="all") + + assert len(result) == expected_row_count, f"{relation_name}:{pformat(result)}" + + def test_run_with_event_time(self, project, insert_two_rows_sql): + # initial run -- backfills all data + with patch_microbatch_end_time("2020-01-03 13:57:00"): + run_dbt(["run"]) + self.assert_row_count(project, "microbatch_model", 3) + + # our partition grain is "day" so running the same day without new data should produce the same results + with patch_microbatch_end_time("2020-01-03 14:57:00"): + run_dbt(["run"]) + self.assert_row_count(project, "microbatch_model", 3) + + # add next two days of data + project.run_sql(insert_two_rows_sql) + + self.assert_row_count(project, "input_model", 5) + + # re-run without changing current time => no insert + with patch_microbatch_end_time("2020-01-03 14:57:00"): + run_dbt(["run", "--select", "microbatch_model"]) + self.assert_row_count(project, "microbatch_model", 3) + + # re-run by advancing time by one day changing current time => insert 1 row + with patch_microbatch_end_time("2020-01-04 14:57:00"): + run_dbt(["run", "--select", "microbatch_model"]) + self.assert_row_count(project, "microbatch_model", 4) + + # re-run by advancing time by one more day changing current time => insert 1 more row + with patch_microbatch_end_time("2020-01-05 14:57:00"): + run_dbt(["run", "--select", "microbatch_model"]) + self.assert_row_count(project, "microbatch_model", 5) diff --git a/dbt-tests-adapter/pyproject.toml b/dbt-tests-adapter/pyproject.toml index 20342078..d2f732b7 100644 --- a/dbt-tests-adapter/pyproject.toml +++ b/dbt-tests-adapter/pyproject.toml @@ -4,7 +4,7 @@ name = "dbt-tests-adapter" description = "The set of reusable tests and test fixtures used to test common functionality" readme = "README.md" keywords = ["dbt", "adapter", "adapters", "database", "elt", "dbt-core", "dbt Core", "dbt Cloud", "dbt Labs"] -requires-python = ">=3.8.0" +requires-python = ">=3.9.0" authors = [ { name = "dbt Labs", email = "info@dbtlabs.com" }, ] @@ -17,7 +17,6 @@ classifiers = [ "Operating System :: MacOS :: MacOS X", "Operating System :: Microsoft :: Windows", "Operating System :: POSIX :: Linux", - "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", @@ -31,15 +30,8 @@ dependencies = [ # `dbt-core` takes the packages below as dependencies, so they are unpinned to avoid conflicts "dbt-adapters", "pyyaml", + "freezegun", ] - -[project.optional-dependencies] -build = [ - "wheel", - "twine", - "check-wheel-contents", -] - [project.urls] Homepage = "https://github.com/dbt-labs/dbt-adapters" Documentation = "https://docs.getdbt.com" @@ -62,7 +54,11 @@ include = ["dbt/tests", "dbt/__init__.py"] [tool.hatch.envs.build] detached = true -features = ["build"] +dependencies = [ + "wheel", + "twine", + "check-wheel-contents", +] [tool.hatch.envs.build.scripts] check-all = [ "- check-wheel", diff --git a/dbt/adapters/__about__.py b/dbt/adapters/__about__.py index 35ec9a37..bd1378f4 100644 --- a/dbt/adapters/__about__.py +++ b/dbt/adapters/__about__.py @@ -1 +1 @@ -version = "1.3.1" +version = "1.10.0" diff --git a/dbt/adapters/base/__init__.py b/dbt/adapters/base/__init__.py index ade1af3d..c30dd01f 100644 --- a/dbt/adapters/base/__init__.py +++ b/dbt/adapters/base/__init__.py @@ -12,4 +12,5 @@ BaseRelation, RelationType, SchemaSearchMap, + AdapterTrackingRelationInfo, ) diff --git a/dbt/adapters/base/column.py b/dbt/adapters/base/column.py index e2e6e1e0..195684a4 100644 --- a/dbt/adapters/base/column.py +++ b/dbt/adapters/base/column.py @@ -123,9 +123,6 @@ def numeric_type(cls, dtype: str, precision: Any, scale: Any) -> str: else: return "{}({},{})".format(dtype, precision, scale) - def __repr__(self) -> str: - return "".format(self.name, self.data_type) - @classmethod def from_description(cls, name: str, raw_data_type: str) -> "Column": match = re.match(r"([^(]+)(\([^)]+\))?", raw_data_type) diff --git a/dbt/adapters/base/impl.py b/dbt/adapters/base/impl.py index ec590137..44817a18 100644 --- a/dbt/adapters/base/impl.py +++ b/dbt/adapters/base/impl.py @@ -4,6 +4,7 @@ from contextlib import contextmanager from datetime import datetime from enum import Enum +from importlib import import_module from multiprocessing.context import SpawnContext from typing import ( Any, @@ -22,8 +23,8 @@ Union, TYPE_CHECKING, ) - import pytz +from dbt_common.behavior_flags import Behavior, BehaviorFlag from dbt_common.clients.jinja import CallableMacroGenerator from dbt_common.contracts.constraints import ( ColumnLevelConstraint, @@ -54,18 +55,20 @@ BaseConnectionManager, Connection, ) -from dbt.adapters.base.meta import AdapterMeta, available +from dbt.adapters.base.meta import AdapterMeta, available, available_property from dbt.adapters.base.relation import ( BaseRelation, ComponentName, InformationSchema, SchemaSearchMap, + AdapterTrackingRelationInfo, ) from dbt.adapters.cache import RelationsCache, _make_ref_key_dict from dbt.adapters.capability import Capability, CapabilityDict from dbt.adapters.contracts.connection import Credentials from dbt.adapters.contracts.macros import MacroResolverProtocol from dbt.adapters.contracts.relation import RelationConfig + from dbt.adapters.events.types import ( CacheMiss, CatalogGenerationError, @@ -82,7 +85,6 @@ QuoteConfigTypeError, RelationReturnedMultipleResultsError, RenameToNoneAttemptedError, - SnapshotTargetIncompleteError, SnapshotTargetNotSnapshotTableError, UnexpectedNonTimestampError, ) @@ -96,6 +98,13 @@ GET_CATALOG_RELATIONS_MACRO_NAME = "get_catalog_relations" FRESHNESS_MACRO_NAME = "collect_freshness" GET_RELATION_LAST_MODIFIED_MACRO_NAME = "get_relation_last_modified" +DEFAULT_BASE_BEHAVIOR_FLAGS = [ + { + "name": "require_batched_execution_for_custom_microbatch_strategy", + "default": False, + "docs_url": "https://docs.getdbt.com/docs/build/incremental-microbatch", + } +] class ConstraintSupport(str, Enum): @@ -261,7 +270,7 @@ class BaseAdapter(metaclass=AdapterMeta): MAX_SCHEMA_METADATA_RELATIONS = 100 - # This static member variable can be overriden in concrete adapter + # This static member variable can be overridden in concrete adapter # implementations to indicate adapter support for optional capabilities. _capabilities = CapabilityDict({}) @@ -271,6 +280,7 @@ def __init__(self, config, mp_context: SpawnContext) -> None: self.connections = self.ConnectionManager(config, mp_context) self._macro_resolver: Optional[MacroResolverProtocol] = None self._macro_context_generator: Optional[MacroContextGeneratorCallable] = None + self.behavior = DEFAULT_BASE_BEHAVIOR_FLAGS # type: ignore ### # Methods to set / access a macro resolver @@ -291,6 +301,30 @@ def set_macro_context_generator( ) -> None: self._macro_context_generator = macro_context_generator + @available_property + def behavior(self) -> Behavior: + return self._behavior + + @behavior.setter # type: ignore + def behavior(self, flags: List[BehaviorFlag]) -> None: + flags.extend(self._behavior_flags) + + # we don't always get project flags, for example, the project file is not loaded during `dbt debug` + # in that case, load the default values for behavior flags to avoid compilation errors + # this mimics not loading a project file, or not specifying flags in a project file + user_overrides = getattr(self.config, "flags", {}) + + self._behavior = Behavior(flags, user_overrides) + + @property + def _behavior_flags(self) -> List[BehaviorFlag]: + """ + This method should be overwritten by adapter maintainers to provide platform-specific flags + + The BaseAdapter should NOT include any global flags here as those should be defined via DEFAULT_BASE_BEHAVIOR_FLAGS + """ + return [] + ### # Methods that pass through to the connection manager ### @@ -740,7 +774,9 @@ def get_missing_columns( return [col for (col_name, col) in from_columns.items() if col_name in missing_columns] @available.parse_none - def valid_snapshot_target(self, relation: BaseRelation) -> None: + def valid_snapshot_target( + self, relation: BaseRelation, column_names: Optional[Dict[str, str]] = None + ) -> None: """Ensure that the target relation is valid, by making sure it has the expected columns. @@ -758,21 +794,16 @@ def valid_snapshot_target(self, relation: BaseRelation) -> None: columns = self.get_columns_in_relation(relation) names = set(c.name.lower() for c in columns) - expanded_keys = ("scd_id", "valid_from", "valid_to") - extra = [] missing = [] - for legacy in expanded_keys: - desired = "dbt_" + legacy + # Note: we're not checking dbt_updated_at here because it's not + # always present. + for column in ("dbt_scd_id", "dbt_valid_from", "dbt_valid_to"): + desired = column_names[column] if column_names else column if desired not in names: missing.append(desired) - if legacy in names: - extra.append(legacy) if missing: - if extra: - raise SnapshotTargetIncompleteError(extra, missing) - else: - raise SnapshotTargetNotSnapshotTableError(missing) + raise SnapshotTargetNotSnapshotTableError(missing) @available.parse_none def expand_target_column_types( @@ -1549,10 +1580,31 @@ def valid_incremental_strategies(self): return ["append"] def builtin_incremental_strategies(self): - return ["append", "delete+insert", "merge", "insert_overwrite"] + """ + List of possible builtin strategies for adapters + + Microbatch is added by _default_. It is only not added when the behavior flag + `require_batched_execution_for_custom_microbatch_strategy` is True. + """ + builtin_strategies = ["append", "delete+insert", "merge", "insert_overwrite"] + if not self.behavior.require_batched_execution_for_custom_microbatch_strategy.no_warn: + builtin_strategies.append("microbatch") + + return builtin_strategies @available.parse_none def get_incremental_strategy_macro(self, model_context, strategy: str): + """Gets the macro for the given incremental strategy. + + Additionally some validations are done: + 1. Assert that if the given strategy is a "builtin" strategy, then it must + also be defined as a "valid" strategy for the associated adapter + 2. Assert that the incremental strategy exists in the model context + + Notably, something be defined by the adapter as "valid" without it being + a "builtin", and nothing will break (and that is desirable). + """ + # Construct macro_name from strategy name if strategy is None: strategy = "default" @@ -1602,8 +1654,13 @@ def render_column_constraint(cls, constraint: ColumnLevelConstraint) -> Optional rendered_column_constraint = f"unique {constraint_expression}" elif constraint.type == ConstraintType.primary_key: rendered_column_constraint = f"primary key {constraint_expression}" - elif constraint.type == ConstraintType.foreign_key and constraint_expression: - rendered_column_constraint = f"references {constraint_expression}" + elif constraint.type == ConstraintType.foreign_key: + if constraint.to and constraint.to_columns: + rendered_column_constraint = ( + f"references {constraint.to} ({', '.join(constraint.to_columns)})" + ) + elif constraint_expression: + rendered_column_constraint = f"references {constraint_expression}" elif constraint.type == ConstraintType.custom and constraint_expression: rendered_column_constraint = constraint_expression @@ -1682,20 +1739,29 @@ def render_model_constraint(cls, constraint: ModelLevelConstraint) -> Optional[s rendering.""" constraint_prefix = f"constraint {constraint.name} " if constraint.name else "" column_list = ", ".join(constraint.columns) + rendered_model_constraint = None + if constraint.type == ConstraintType.check and constraint.expression: - return f"{constraint_prefix}check ({constraint.expression})" + rendered_model_constraint = f"{constraint_prefix}check ({constraint.expression})" elif constraint.type == ConstraintType.unique: constraint_expression = f" {constraint.expression}" if constraint.expression else "" - return f"{constraint_prefix}unique{constraint_expression} ({column_list})" + rendered_model_constraint = ( + f"{constraint_prefix}unique{constraint_expression} ({column_list})" + ) elif constraint.type == ConstraintType.primary_key: constraint_expression = f" {constraint.expression}" if constraint.expression else "" - return f"{constraint_prefix}primary key{constraint_expression} ({column_list})" - elif constraint.type == ConstraintType.foreign_key and constraint.expression: - return f"{constraint_prefix}foreign key ({column_list}) references {constraint.expression}" + rendered_model_constraint = ( + f"{constraint_prefix}primary key{constraint_expression} ({column_list})" + ) + elif constraint.type == ConstraintType.foreign_key: + if constraint.to and constraint.to_columns: + rendered_model_constraint = f"{constraint_prefix}foreign key ({column_list}) references {constraint.to} ({', '.join(constraint.to_columns)})" + elif constraint.expression: + rendered_model_constraint = f"{constraint_prefix}foreign key ({column_list}) references {constraint.expression}" elif constraint.type == ConstraintType.custom and constraint.expression: - return f"{constraint_prefix}{constraint.expression}" - else: - return None + rendered_model_constraint = f"{constraint_prefix}{constraint.expression}" + + return rendered_model_constraint @classmethod def capabilities(cls) -> CapabilityDict: @@ -1705,6 +1771,30 @@ def capabilities(cls) -> CapabilityDict: def supports(cls, capability: Capability) -> bool: return bool(cls.capabilities()[capability]) + @classmethod + def get_adapter_run_info(cls, config: RelationConfig) -> AdapterTrackingRelationInfo: + adapter_class_name, *_ = cls.__name__.split("Adapter") + adapter_name = adapter_class_name.lower() + + if adapter_name == "base": + adapter_version = "" + else: + adapter_version = import_module(f"dbt.adapters.{adapter_name}.__version__").version + + return AdapterTrackingRelationInfo( + adapter_name=adapter_name, + base_adapter_version=import_module("dbt.adapters.__about__").version, + adapter_version=adapter_version, + model_adapter_details=cls._get_adapter_specific_run_info(config), + ) + + @classmethod + def _get_adapter_specific_run_info(cls, config) -> Dict[str, Any]: + """ + Adapter maintainers should overwrite this method to return any run metadata that should be captured during a run. + """ + return {} + COLUMNS_EQUAL_SQL = """ with diff_count as ( diff --git a/dbt/adapters/base/meta.py b/dbt/adapters/base/meta.py index ca7aef9f..e522a056 100644 --- a/dbt/adapters/base/meta.py +++ b/dbt/adapters/base/meta.py @@ -92,6 +92,25 @@ def parse_list(self, func: Callable) -> Callable: available = _Available() +class available_property(property): + """ + This supports making dynamic properties (`@property`) available in the jinja context. + + We use `@available` to make methods available in the jinja context, but this mechanism relies on the method being callable. + Intuitively, we should be able to use both `@available` and `@property` to create a dynamic property that's available in the jinja context. + + Using the `@property` decorator as the inner decorator supplies `@available` with something that is not callable. + Instead of returning the method, `@property` returns the value itself, not the method that is called to create the value. + + Using the `@available` decorator as the inner decorator adds `_is_available_ = True` to the function. + However, when the `@property` decorator executes, it returns a `property` object which does not have the `_is_available_` attribute. + + This decorator solves this problem by simply adding `_is_available_ = True` as an attribute on the `property` built-in. + """ + + _is_available_ = True + + class AdapterMeta(abc.ABCMeta): _available_: FrozenSet[str] _parse_replacements_: Dict[str, Callable] diff --git a/dbt/adapters/base/relation.py b/dbt/adapters/base/relation.py index 210a2dcd..7d4888e4 100644 --- a/dbt/adapters/base/relation.py +++ b/dbt/adapters/base/relation.py @@ -1,10 +1,12 @@ from collections.abc import Hashable from dataclasses import dataclass, field +from datetime import datetime from typing import ( Any, Dict, FrozenSet, Iterator, + List, Optional, Set, Tuple, @@ -36,6 +38,13 @@ SerializableIterable = Union[Tuple, FrozenSet] +@dataclass +class EventTimeFilter(FakeAPIObject): + field_name: str + start: Optional[datetime] = None + end: Optional[datetime] = None + + @dataclass(frozen=True, eq=False, repr=False) class BaseRelation(FakeAPIObject, Hashable): path: Path @@ -47,6 +56,7 @@ class BaseRelation(FakeAPIObject, Hashable): quote_policy: Policy = field(default_factory=lambda: Policy()) dbt_created: bool = False limit: Optional[int] = None + event_time_filter: Optional[EventTimeFilter] = None require_alias: bool = ( True # used to govern whether to add an alias when render_limited is called ) @@ -208,14 +218,19 @@ def render(self) -> str: # if there is nothing set, this will return the empty string. return ".".join(part for _, part in self._render_iterator() if part is not None) - def _render_limited_alias(self) -> str: + def _render_subquery_alias(self, namespace: str) -> str: """Some databases require an alias for subqueries (postgres, mysql) for all others we want to avoid adding an alias as it has the potential to introduce issues with the query if the user also defines an alias. """ if self.require_alias: - return f" _dbt_limit_subq_{self.table}" + return f" _dbt_{namespace}_subq_{self.table}" return "" + def _render_limited_alias( + self, + ) -> str: + return self._render_subquery_alias(namespace="limit") + def render_limited(self) -> str: rendered = self.render() if self.limit is None: @@ -225,6 +240,31 @@ def render_limited(self) -> str: else: return f"(select * from {rendered} limit {self.limit}){self._render_limited_alias()}" + def render_event_time_filtered(self, rendered: Optional[str] = None) -> str: + rendered = rendered or self.render() + if self.event_time_filter is None: + return rendered + + filter = self._render_event_time_filtered(self.event_time_filter) + if not filter: + return rendered + + return f"(select * from {rendered} where {filter}){self._render_subquery_alias(namespace='et_filter')}" + + def _render_event_time_filtered(self, event_time_filter: EventTimeFilter) -> str: + """ + Returns "" if start and end are both None + """ + filter = "" + if event_time_filter.start and event_time_filter.end: + filter = f"{event_time_filter.field_name} >= '{event_time_filter.start}' and {event_time_filter.field_name} < '{event_time_filter.end}'" + elif event_time_filter.start: + filter = f"{event_time_filter.field_name} >= '{event_time_filter.start}'" + elif event_time_filter.end: + filter = f"{event_time_filter.field_name} < '{event_time_filter.end}'" + + return filter + def quoted(self, identifier): return "{quote_char}{identifier}{quote_char}".format( quote_char=self.quote_character, @@ -240,13 +280,18 @@ def create_ephemeral_from( cls: Type[Self], relation_config: RelationConfig, limit: Optional[int] = None, + event_time_filter: Optional[EventTimeFilter] = None, ) -> Self: - # Note that ephemeral models are based on the name. - identifier = cls.add_ephemeral_prefix(relation_config.name) + # Note that ephemeral models are based on the identifier, which will + # point to the model's alias if one exists and otherwise fall back to + # the filename. This is intended to give the user more control over + # the way that the CTE name is constructed + identifier = cls.add_ephemeral_prefix(relation_config.identifier) return cls.create( type=cls.CTE, identifier=identifier, limit=limit, + event_time_filter=event_time_filter, ).quote(identifier=False) @classmethod @@ -297,6 +342,16 @@ def create( ) return cls.from_dict(kwargs) + @classmethod + def scd_args(cls: Type[Self], primary_key: Union[str, List[str]], updated_at) -> List[str]: + scd_args = [] + if isinstance(primary_key, list): + scd_args.extend(primary_key) + else: + scd_args.append(primary_key) + scd_args.append(updated_at) + return scd_args + @property def can_be_renamed(self) -> bool: return self.type in self.renameable_relations @@ -312,7 +367,14 @@ def __hash__(self) -> int: return hash(self.render()) def __str__(self) -> str: - return self.render() if self.limit is None else self.render_limited() + rendered = self.render() if self.limit is None else self.render_limited() + + # Limited subquery is wrapped by the event time filter subquery, and not the other way around. + # This is because in the context of resolving limited refs, we care more about performance than reliably producing a sample of a certain size. + if self.event_time_filter: + rendered = self.render_event_time_filtered(rendered) + + return rendered @property def database(self) -> Optional[str]: @@ -480,3 +542,11 @@ def flatten(self, allow_multiple_databases: bool = False) -> "SchemaSearchMap": ) return new + + +@dataclass(frozen=True, eq=False, repr=False) +class AdapterTrackingRelationInfo(FakeAPIObject, Hashable): + adapter_name: str + base_adapter_version: str + adapter_version: str + model_adapter_details: Any diff --git a/dbt/adapters/contracts/connection.py b/dbt/adapters/contracts/connection.py index e3baf284..2d10c9a3 100644 --- a/dbt/adapters/contracts/connection.py +++ b/dbt/adapters/contracts/connection.py @@ -41,6 +41,7 @@ class AdapterResponse(dbtClassMixin): _message: str code: Optional[str] = None rows_affected: Optional[int] = None + query_id: Optional[str] = None def __str__(self): return self._message diff --git a/dbt/adapters/events/adapter_types.proto b/dbt/adapters/events/adapter_types.proto index 69d64325..70b4e1e3 100644 --- a/dbt/adapters/events/adapter_types.proto +++ b/dbt/adapters/events/adapter_types.proto @@ -266,6 +266,7 @@ message SQLQueryStatus { AdapterNodeInfo node_info = 1; string status = 2; float elapsed = 3; + string query_id = 4; } message SQLQueryStatusMsg { diff --git a/dbt/adapters/events/adapter_types_pb2.py b/dbt/adapters/events/adapter_types_pb2.py index bfd44080..6a411842 100644 --- a/dbt/adapters/events/adapter_types_pb2.py +++ b/dbt/adapters/events/adapter_types_pb2.py @@ -1,11 +1,22 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE # source: adapter_types.proto +# Protobuf Python Version: 5.28.3 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version from google.protobuf import symbol_database as _symbol_database from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 5, + 28, + 3, + '', + 'adapter_types.proto' +) # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() @@ -15,16 +26,16 @@ from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x13\x61\x64\x61pter_types.proto\x12\x0bproto_types\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/protobuf/struct.proto\"\xab\x02\n\x16\x41\x64\x61pterCommonEventInfo\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04\x63ode\x18\x02 \x01(\t\x12\x0b\n\x03msg\x18\x03 \x01(\t\x12\r\n\x05level\x18\x04 \x01(\t\x12\x15\n\rinvocation_id\x18\x05 \x01(\t\x12\x0b\n\x03pid\x18\x06 \x01(\x05\x12\x0e\n\x06thread\x18\x07 \x01(\t\x12&\n\x02ts\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12=\n\x05\x65xtra\x18\t \x03(\x0b\x32..proto_types.AdapterCommonEventInfo.ExtraEntry\x12\x10\n\x08\x63\x61tegory\x18\n \x01(\t\x1a,\n\nExtraEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"]\n\x13\x41\x64\x61pterNodeRelation\x12\x10\n\x08\x64\x61tabase\x18\n \x01(\t\x12\x0e\n\x06schema\x18\x0b \x01(\t\x12\r\n\x05\x61lias\x18\x0c \x01(\t\x12\x15\n\rrelation_name\x18\r \x01(\t\"\x9f\x02\n\x0f\x41\x64\x61pterNodeInfo\x12\x11\n\tnode_path\x18\x01 \x01(\t\x12\x11\n\tnode_name\x18\x02 \x01(\t\x12\x11\n\tunique_id\x18\x03 \x01(\t\x12\x15\n\rresource_type\x18\x04 \x01(\t\x12\x14\n\x0cmaterialized\x18\x05 \x01(\t\x12\x13\n\x0bnode_status\x18\x06 \x01(\t\x12\x17\n\x0fnode_started_at\x18\x07 \x01(\t\x12\x18\n\x10node_finished_at\x18\x08 \x01(\t\x12%\n\x04meta\x18\t \x01(\x0b\x32\x17.google.protobuf.Struct\x12\x37\n\rnode_relation\x18\n \x01(\x0b\x32 .proto_types.AdapterNodeRelation\"G\n\x0fReferenceKeyMsg\x12\x10\n\x08\x64\x61tabase\x18\x01 \x01(\t\x12\x0e\n\x06schema\x18\x02 \x01(\t\x12\x12\n\nidentifier\x18\x03 \x01(\t\"?\n\x19\x41\x64\x61pterDeprecationWarning\x12\x10\n\x08old_name\x18\x01 \x01(\t\x12\x10\n\x08new_name\x18\x02 \x01(\t\"\x87\x01\n\x1c\x41\x64\x61pterDeprecationWarningMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x34\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32&.proto_types.AdapterDeprecationWarning\"!\n\x1f\x43ollectFreshnessReturnSignature\"\x93\x01\n\"CollectFreshnessReturnSignatureMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12:\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32,.proto_types.CollectFreshnessReturnSignature\"\x8e\x01\n\x11\x41\x64\x61pterEventDebug\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x62\x61se_msg\x18\x03 \x01(\t\x12(\n\x04\x61rgs\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.ListValue\"w\n\x14\x41\x64\x61pterEventDebugMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.AdapterEventDebug\"\x8d\x01\n\x10\x41\x64\x61pterEventInfo\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x62\x61se_msg\x18\x03 \x01(\t\x12(\n\x04\x61rgs\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.ListValue\"u\n\x13\x41\x64\x61pterEventInfoMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.AdapterEventInfo\"\x90\x01\n\x13\x41\x64\x61pterEventWarning\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x62\x61se_msg\x18\x03 \x01(\t\x12(\n\x04\x61rgs\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.ListValue\"{\n\x16\x41\x64\x61pterEventWarningMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.AdapterEventWarning\"\xa0\x01\n\x11\x41\x64\x61pterEventError\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x62\x61se_msg\x18\x03 \x01(\t\x12(\n\x04\x61rgs\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.ListValue\x12\x10\n\x08\x65xc_info\x18\x05 \x01(\t\"w\n\x14\x41\x64\x61pterEventErrorMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.AdapterEventError\"f\n\rNewConnection\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_type\x18\x02 \x01(\t\x12\x11\n\tconn_name\x18\x03 \x01(\t\"o\n\x10NewConnectionMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.NewConnection\"=\n\x10\x43onnectionReused\x12\x11\n\tconn_name\x18\x01 \x01(\t\x12\x16\n\x0eorig_conn_name\x18\x02 \x01(\t\"u\n\x13\x43onnectionReusedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.ConnectionReused\"0\n\x1b\x43onnectionLeftOpenInCleanup\x12\x11\n\tconn_name\x18\x01 \x01(\t\"\x8b\x01\n\x1e\x43onnectionLeftOpenInCleanupMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x36\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32(.proto_types.ConnectionLeftOpenInCleanup\".\n\x19\x43onnectionClosedInCleanup\x12\x11\n\tconn_name\x18\x01 \x01(\t\"\x87\x01\n\x1c\x43onnectionClosedInCleanupMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x34\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32&.proto_types.ConnectionClosedInCleanup\"f\n\x0eRollbackFailed\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\x12\x10\n\x08\x65xc_info\x18\x03 \x01(\t\"q\n\x11RollbackFailedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.RollbackFailed\"V\n\x10\x43onnectionClosed\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\"u\n\x13\x43onnectionClosedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.ConnectionClosed\"X\n\x12\x43onnectionLeftOpen\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\"y\n\x15\x43onnectionLeftOpenMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.ConnectionLeftOpen\"N\n\x08Rollback\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\"e\n\x0bRollbackMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12#\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x15.proto_types.Rollback\"@\n\tCacheMiss\x12\x11\n\tconn_name\x18\x01 \x01(\t\x12\x10\n\x08\x64\x61tabase\x18\x02 \x01(\t\x12\x0e\n\x06schema\x18\x03 \x01(\t\"g\n\x0c\x43\x61\x63heMissMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12$\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x16.proto_types.CacheMiss\"b\n\rListRelations\x12\x10\n\x08\x64\x61tabase\x18\x01 \x01(\t\x12\x0e\n\x06schema\x18\x02 \x01(\t\x12/\n\trelations\x18\x03 \x03(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"o\n\x10ListRelationsMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.ListRelations\"g\n\x0e\x43onnectionUsed\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_type\x18\x02 \x01(\t\x12\x11\n\tconn_name\x18\x03 \x01(\t\"q\n\x11\x43onnectionUsedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.ConnectionUsed\"[\n\x08SQLQuery\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\x12\x0b\n\x03sql\x18\x03 \x01(\t\"e\n\x0bSQLQueryMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12#\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x15.proto_types.SQLQuery\"b\n\x0eSQLQueryStatus\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0e\n\x06status\x18\x02 \x01(\t\x12\x0f\n\x07\x65lapsed\x18\x03 \x01(\x02\"q\n\x11SQLQueryStatusMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.SQLQueryStatus\"O\n\tSQLCommit\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\"g\n\x0cSQLCommitMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12$\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x16.proto_types.SQLCommit\"a\n\rColTypeChange\x12\x11\n\torig_type\x18\x01 \x01(\t\x12\x10\n\x08new_type\x18\x02 \x01(\t\x12+\n\x05table\x18\x03 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"o\n\x10\x43olTypeChangeMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.ColTypeChange\"@\n\x0eSchemaCreation\x12.\n\x08relation\x18\x01 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"q\n\x11SchemaCreationMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.SchemaCreation\"<\n\nSchemaDrop\x12.\n\x08relation\x18\x01 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"i\n\rSchemaDropMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12%\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x17.proto_types.SchemaDrop\"\xde\x01\n\x0b\x43\x61\x63heAction\x12\x0e\n\x06\x61\x63tion\x18\x01 \x01(\t\x12-\n\x07ref_key\x18\x02 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\x12/\n\tref_key_2\x18\x03 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\x12/\n\tref_key_3\x18\x04 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\x12.\n\x08ref_list\x18\x05 \x03(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"k\n\x0e\x43\x61\x63heActionMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12&\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x18.proto_types.CacheAction\"\x98\x01\n\x0e\x43\x61\x63heDumpGraph\x12\x33\n\x04\x64ump\x18\x01 \x03(\x0b\x32%.proto_types.CacheDumpGraph.DumpEntry\x12\x14\n\x0c\x62\x65\x66ore_after\x18\x02 \x01(\t\x12\x0e\n\x06\x61\x63tion\x18\x03 \x01(\t\x1a+\n\tDumpEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"q\n\x11\x43\x61\x63heDumpGraphMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.CacheDumpGraph\"B\n\x11\x41\x64\x61pterRegistered\x12\x14\n\x0c\x61\x64\x61pter_name\x18\x01 \x01(\t\x12\x17\n\x0f\x61\x64\x61pter_version\x18\x02 \x01(\t\"w\n\x14\x41\x64\x61pterRegisteredMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.AdapterRegistered\"!\n\x12\x41\x64\x61pterImportError\x12\x0b\n\x03\x65xc\x18\x01 \x01(\t\"y\n\x15\x41\x64\x61pterImportErrorMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.AdapterImportError\"#\n\x0fPluginLoadError\x12\x10\n\x08\x65xc_info\x18\x01 \x01(\t\"s\n\x12PluginLoadErrorMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.PluginLoadError\"a\n\x14NewConnectionOpening\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x18\n\x10\x63onnection_state\x18\x02 \x01(\t\"}\n\x17NewConnectionOpeningMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.NewConnectionOpening\"8\n\rCodeExecution\x12\x11\n\tconn_name\x18\x01 \x01(\t\x12\x14\n\x0c\x63ode_content\x18\x02 \x01(\t\"o\n\x10\x43odeExecutionMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.CodeExecution\"6\n\x13\x43odeExecutionStatus\x12\x0e\n\x06status\x18\x01 \x01(\t\x12\x0f\n\x07\x65lapsed\x18\x02 \x01(\x02\"{\n\x16\x43odeExecutionStatusMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.CodeExecutionStatus\"%\n\x16\x43\x61talogGenerationError\x12\x0b\n\x03\x65xc\x18\x01 \x01(\t\"\x81\x01\n\x19\x43\x61talogGenerationErrorMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x31\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32#.proto_types.CatalogGenerationError\"-\n\x13WriteCatalogFailure\x12\x16\n\x0enum_exceptions\x18\x01 \x01(\x05\"{\n\x16WriteCatalogFailureMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.WriteCatalogFailure\"\x1e\n\x0e\x43\x61talogWritten\x12\x0c\n\x04path\x18\x01 \x01(\t\"q\n\x11\x43\x61talogWrittenMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.CatalogWritten\"\x14\n\x12\x43\x61nnotGenerateDocs\"y\n\x15\x43\x61nnotGenerateDocsMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.CannotGenerateDocs\"\x11\n\x0f\x42uildingCatalog\"s\n\x12\x42uildingCatalogMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.BuildingCatalog\"-\n\x18\x44\x61tabaseErrorRunningHook\x12\x11\n\thook_type\x18\x01 \x01(\t\"\x85\x01\n\x1b\x44\x61tabaseErrorRunningHookMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x33\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32%.proto_types.DatabaseErrorRunningHook\"4\n\x0cHooksRunning\x12\x11\n\tnum_hooks\x18\x01 \x01(\x05\x12\x11\n\thook_type\x18\x02 \x01(\t\"m\n\x0fHooksRunningMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\'\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x19.proto_types.HooksRunning\"T\n\x14\x46inishedRunningStats\x12\x11\n\tstat_line\x18\x01 \x01(\t\x12\x11\n\texecution\x18\x02 \x01(\t\x12\x16\n\x0e\x65xecution_time\x18\x03 \x01(\x02\"}\n\x17\x46inishedRunningStatsMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.FinishedRunningStats\"<\n\x15\x43onstraintNotEnforced\x12\x12\n\nconstraint\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x64\x61pter\x18\x02 \x01(\t\"\x7f\n\x18\x43onstraintNotEnforcedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x30\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\".proto_types.ConstraintNotEnforced\"=\n\x16\x43onstraintNotSupported\x12\x12\n\nconstraint\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x64\x61pter\x18\x02 \x01(\t\"\x81\x01\n\x19\x43onstraintNotSupportedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x31\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32#.proto_types.ConstraintNotSupported\"%\n\x10TypeCodeNotFound\x12\x11\n\ttype_code\x18\x01 \x01(\x05\"u\n\x13TypeCodeNotFoundMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.TypeCodeNotFoundb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x13\x61\x64\x61pter_types.proto\x12\x0bproto_types\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/protobuf/struct.proto\"\xab\x02\n\x16\x41\x64\x61pterCommonEventInfo\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04\x63ode\x18\x02 \x01(\t\x12\x0b\n\x03msg\x18\x03 \x01(\t\x12\r\n\x05level\x18\x04 \x01(\t\x12\x15\n\rinvocation_id\x18\x05 \x01(\t\x12\x0b\n\x03pid\x18\x06 \x01(\x05\x12\x0e\n\x06thread\x18\x07 \x01(\t\x12&\n\x02ts\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12=\n\x05\x65xtra\x18\t \x03(\x0b\x32..proto_types.AdapterCommonEventInfo.ExtraEntry\x12\x10\n\x08\x63\x61tegory\x18\n \x01(\t\x1a,\n\nExtraEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"]\n\x13\x41\x64\x61pterNodeRelation\x12\x10\n\x08\x64\x61tabase\x18\n \x01(\t\x12\x0e\n\x06schema\x18\x0b \x01(\t\x12\r\n\x05\x61lias\x18\x0c \x01(\t\x12\x15\n\rrelation_name\x18\r \x01(\t\"\x9f\x02\n\x0f\x41\x64\x61pterNodeInfo\x12\x11\n\tnode_path\x18\x01 \x01(\t\x12\x11\n\tnode_name\x18\x02 \x01(\t\x12\x11\n\tunique_id\x18\x03 \x01(\t\x12\x15\n\rresource_type\x18\x04 \x01(\t\x12\x14\n\x0cmaterialized\x18\x05 \x01(\t\x12\x13\n\x0bnode_status\x18\x06 \x01(\t\x12\x17\n\x0fnode_started_at\x18\x07 \x01(\t\x12\x18\n\x10node_finished_at\x18\x08 \x01(\t\x12%\n\x04meta\x18\t \x01(\x0b\x32\x17.google.protobuf.Struct\x12\x37\n\rnode_relation\x18\n \x01(\x0b\x32 .proto_types.AdapterNodeRelation\"G\n\x0fReferenceKeyMsg\x12\x10\n\x08\x64\x61tabase\x18\x01 \x01(\t\x12\x0e\n\x06schema\x18\x02 \x01(\t\x12\x12\n\nidentifier\x18\x03 \x01(\t\"?\n\x19\x41\x64\x61pterDeprecationWarning\x12\x10\n\x08old_name\x18\x01 \x01(\t\x12\x10\n\x08new_name\x18\x02 \x01(\t\"\x87\x01\n\x1c\x41\x64\x61pterDeprecationWarningMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x34\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32&.proto_types.AdapterDeprecationWarning\"!\n\x1f\x43ollectFreshnessReturnSignature\"\x93\x01\n\"CollectFreshnessReturnSignatureMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12:\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32,.proto_types.CollectFreshnessReturnSignature\"\x8e\x01\n\x11\x41\x64\x61pterEventDebug\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x62\x61se_msg\x18\x03 \x01(\t\x12(\n\x04\x61rgs\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.ListValue\"w\n\x14\x41\x64\x61pterEventDebugMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.AdapterEventDebug\"\x8d\x01\n\x10\x41\x64\x61pterEventInfo\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x62\x61se_msg\x18\x03 \x01(\t\x12(\n\x04\x61rgs\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.ListValue\"u\n\x13\x41\x64\x61pterEventInfoMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.AdapterEventInfo\"\x90\x01\n\x13\x41\x64\x61pterEventWarning\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x62\x61se_msg\x18\x03 \x01(\t\x12(\n\x04\x61rgs\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.ListValue\"{\n\x16\x41\x64\x61pterEventWarningMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.AdapterEventWarning\"\xa0\x01\n\x11\x41\x64\x61pterEventError\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x62\x61se_msg\x18\x03 \x01(\t\x12(\n\x04\x61rgs\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.ListValue\x12\x10\n\x08\x65xc_info\x18\x05 \x01(\t\"w\n\x14\x41\x64\x61pterEventErrorMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.AdapterEventError\"f\n\rNewConnection\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_type\x18\x02 \x01(\t\x12\x11\n\tconn_name\x18\x03 \x01(\t\"o\n\x10NewConnectionMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.NewConnection\"=\n\x10\x43onnectionReused\x12\x11\n\tconn_name\x18\x01 \x01(\t\x12\x16\n\x0eorig_conn_name\x18\x02 \x01(\t\"u\n\x13\x43onnectionReusedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.ConnectionReused\"0\n\x1b\x43onnectionLeftOpenInCleanup\x12\x11\n\tconn_name\x18\x01 \x01(\t\"\x8b\x01\n\x1e\x43onnectionLeftOpenInCleanupMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x36\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32(.proto_types.ConnectionLeftOpenInCleanup\".\n\x19\x43onnectionClosedInCleanup\x12\x11\n\tconn_name\x18\x01 \x01(\t\"\x87\x01\n\x1c\x43onnectionClosedInCleanupMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x34\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32&.proto_types.ConnectionClosedInCleanup\"f\n\x0eRollbackFailed\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\x12\x10\n\x08\x65xc_info\x18\x03 \x01(\t\"q\n\x11RollbackFailedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.RollbackFailed\"V\n\x10\x43onnectionClosed\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\"u\n\x13\x43onnectionClosedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.ConnectionClosed\"X\n\x12\x43onnectionLeftOpen\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\"y\n\x15\x43onnectionLeftOpenMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.ConnectionLeftOpen\"N\n\x08Rollback\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\"e\n\x0bRollbackMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12#\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x15.proto_types.Rollback\"@\n\tCacheMiss\x12\x11\n\tconn_name\x18\x01 \x01(\t\x12\x10\n\x08\x64\x61tabase\x18\x02 \x01(\t\x12\x0e\n\x06schema\x18\x03 \x01(\t\"g\n\x0c\x43\x61\x63heMissMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12$\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x16.proto_types.CacheMiss\"b\n\rListRelations\x12\x10\n\x08\x64\x61tabase\x18\x01 \x01(\t\x12\x0e\n\x06schema\x18\x02 \x01(\t\x12/\n\trelations\x18\x03 \x03(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"o\n\x10ListRelationsMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.ListRelations\"g\n\x0e\x43onnectionUsed\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_type\x18\x02 \x01(\t\x12\x11\n\tconn_name\x18\x03 \x01(\t\"q\n\x11\x43onnectionUsedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.ConnectionUsed\"[\n\x08SQLQuery\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\x12\x0b\n\x03sql\x18\x03 \x01(\t\"e\n\x0bSQLQueryMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12#\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x15.proto_types.SQLQuery\"t\n\x0eSQLQueryStatus\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0e\n\x06status\x18\x02 \x01(\t\x12\x0f\n\x07\x65lapsed\x18\x03 \x01(\x02\x12\x10\n\x08query_id\x18\x04 \x01(\t\"q\n\x11SQLQueryStatusMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.SQLQueryStatus\"O\n\tSQLCommit\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\"g\n\x0cSQLCommitMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12$\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x16.proto_types.SQLCommit\"a\n\rColTypeChange\x12\x11\n\torig_type\x18\x01 \x01(\t\x12\x10\n\x08new_type\x18\x02 \x01(\t\x12+\n\x05table\x18\x03 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"o\n\x10\x43olTypeChangeMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.ColTypeChange\"@\n\x0eSchemaCreation\x12.\n\x08relation\x18\x01 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"q\n\x11SchemaCreationMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.SchemaCreation\"<\n\nSchemaDrop\x12.\n\x08relation\x18\x01 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"i\n\rSchemaDropMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12%\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x17.proto_types.SchemaDrop\"\xde\x01\n\x0b\x43\x61\x63heAction\x12\x0e\n\x06\x61\x63tion\x18\x01 \x01(\t\x12-\n\x07ref_key\x18\x02 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\x12/\n\tref_key_2\x18\x03 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\x12/\n\tref_key_3\x18\x04 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\x12.\n\x08ref_list\x18\x05 \x03(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"k\n\x0e\x43\x61\x63heActionMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12&\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x18.proto_types.CacheAction\"\x98\x01\n\x0e\x43\x61\x63heDumpGraph\x12\x33\n\x04\x64ump\x18\x01 \x03(\x0b\x32%.proto_types.CacheDumpGraph.DumpEntry\x12\x14\n\x0c\x62\x65\x66ore_after\x18\x02 \x01(\t\x12\x0e\n\x06\x61\x63tion\x18\x03 \x01(\t\x1a+\n\tDumpEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"q\n\x11\x43\x61\x63heDumpGraphMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.CacheDumpGraph\"B\n\x11\x41\x64\x61pterRegistered\x12\x14\n\x0c\x61\x64\x61pter_name\x18\x01 \x01(\t\x12\x17\n\x0f\x61\x64\x61pter_version\x18\x02 \x01(\t\"w\n\x14\x41\x64\x61pterRegisteredMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.AdapterRegistered\"!\n\x12\x41\x64\x61pterImportError\x12\x0b\n\x03\x65xc\x18\x01 \x01(\t\"y\n\x15\x41\x64\x61pterImportErrorMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.AdapterImportError\"#\n\x0fPluginLoadError\x12\x10\n\x08\x65xc_info\x18\x01 \x01(\t\"s\n\x12PluginLoadErrorMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.PluginLoadError\"a\n\x14NewConnectionOpening\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x18\n\x10\x63onnection_state\x18\x02 \x01(\t\"}\n\x17NewConnectionOpeningMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.NewConnectionOpening\"8\n\rCodeExecution\x12\x11\n\tconn_name\x18\x01 \x01(\t\x12\x14\n\x0c\x63ode_content\x18\x02 \x01(\t\"o\n\x10\x43odeExecutionMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.CodeExecution\"6\n\x13\x43odeExecutionStatus\x12\x0e\n\x06status\x18\x01 \x01(\t\x12\x0f\n\x07\x65lapsed\x18\x02 \x01(\x02\"{\n\x16\x43odeExecutionStatusMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.CodeExecutionStatus\"%\n\x16\x43\x61talogGenerationError\x12\x0b\n\x03\x65xc\x18\x01 \x01(\t\"\x81\x01\n\x19\x43\x61talogGenerationErrorMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x31\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32#.proto_types.CatalogGenerationError\"-\n\x13WriteCatalogFailure\x12\x16\n\x0enum_exceptions\x18\x01 \x01(\x05\"{\n\x16WriteCatalogFailureMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.WriteCatalogFailure\"\x1e\n\x0e\x43\x61talogWritten\x12\x0c\n\x04path\x18\x01 \x01(\t\"q\n\x11\x43\x61talogWrittenMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.CatalogWritten\"\x14\n\x12\x43\x61nnotGenerateDocs\"y\n\x15\x43\x61nnotGenerateDocsMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.CannotGenerateDocs\"\x11\n\x0f\x42uildingCatalog\"s\n\x12\x42uildingCatalogMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.BuildingCatalog\"-\n\x18\x44\x61tabaseErrorRunningHook\x12\x11\n\thook_type\x18\x01 \x01(\t\"\x85\x01\n\x1b\x44\x61tabaseErrorRunningHookMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x33\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32%.proto_types.DatabaseErrorRunningHook\"4\n\x0cHooksRunning\x12\x11\n\tnum_hooks\x18\x01 \x01(\x05\x12\x11\n\thook_type\x18\x02 \x01(\t\"m\n\x0fHooksRunningMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\'\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x19.proto_types.HooksRunning\"T\n\x14\x46inishedRunningStats\x12\x11\n\tstat_line\x18\x01 \x01(\t\x12\x11\n\texecution\x18\x02 \x01(\t\x12\x16\n\x0e\x65xecution_time\x18\x03 \x01(\x02\"}\n\x17\x46inishedRunningStatsMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.FinishedRunningStats\"<\n\x15\x43onstraintNotEnforced\x12\x12\n\nconstraint\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x64\x61pter\x18\x02 \x01(\t\"\x7f\n\x18\x43onstraintNotEnforcedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x30\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\".proto_types.ConstraintNotEnforced\"=\n\x16\x43onstraintNotSupported\x12\x12\n\nconstraint\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x64\x61pter\x18\x02 \x01(\t\"\x81\x01\n\x19\x43onstraintNotSupportedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x31\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32#.proto_types.ConstraintNotSupported\"%\n\x10TypeCodeNotFound\x12\x11\n\ttype_code\x18\x01 \x01(\x05\"u\n\x13TypeCodeNotFoundMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.TypeCodeNotFoundb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'adapter_types_pb2', _globals) -if _descriptor._USE_C_DESCRIPTORS == False: - DESCRIPTOR._options = None - _globals['_ADAPTERCOMMONEVENTINFO_EXTRAENTRY']._options = None +if not _descriptor._USE_C_DESCRIPTORS: + DESCRIPTOR._loaded_options = None + _globals['_ADAPTERCOMMONEVENTINFO_EXTRAENTRY']._loaded_options = None _globals['_ADAPTERCOMMONEVENTINFO_EXTRAENTRY']._serialized_options = b'8\001' - _globals['_CACHEDUMPGRAPH_DUMPENTRY']._options = None + _globals['_CACHEDUMPGRAPH_DUMPENTRY']._loaded_options = None _globals['_CACHEDUMPGRAPH_DUMPENTRY']._serialized_options = b'8\001' _globals['_ADAPTERCOMMONEVENTINFO']._serialized_start=100 _globals['_ADAPTERCOMMONEVENTINFO']._serialized_end=399 @@ -109,101 +120,101 @@ _globals['_SQLQUERYMSG']._serialized_start=4628 _globals['_SQLQUERYMSG']._serialized_end=4729 _globals['_SQLQUERYSTATUS']._serialized_start=4731 - _globals['_SQLQUERYSTATUS']._serialized_end=4829 - _globals['_SQLQUERYSTATUSMSG']._serialized_start=4831 - _globals['_SQLQUERYSTATUSMSG']._serialized_end=4944 - _globals['_SQLCOMMIT']._serialized_start=4946 - _globals['_SQLCOMMIT']._serialized_end=5025 - _globals['_SQLCOMMITMSG']._serialized_start=5027 - _globals['_SQLCOMMITMSG']._serialized_end=5130 - _globals['_COLTYPECHANGE']._serialized_start=5132 - _globals['_COLTYPECHANGE']._serialized_end=5229 - _globals['_COLTYPECHANGEMSG']._serialized_start=5231 - _globals['_COLTYPECHANGEMSG']._serialized_end=5342 - _globals['_SCHEMACREATION']._serialized_start=5344 - _globals['_SCHEMACREATION']._serialized_end=5408 - _globals['_SCHEMACREATIONMSG']._serialized_start=5410 - _globals['_SCHEMACREATIONMSG']._serialized_end=5523 - _globals['_SCHEMADROP']._serialized_start=5525 - _globals['_SCHEMADROP']._serialized_end=5585 - _globals['_SCHEMADROPMSG']._serialized_start=5587 - _globals['_SCHEMADROPMSG']._serialized_end=5692 - _globals['_CACHEACTION']._serialized_start=5695 - _globals['_CACHEACTION']._serialized_end=5917 - _globals['_CACHEACTIONMSG']._serialized_start=5919 - _globals['_CACHEACTIONMSG']._serialized_end=6026 - _globals['_CACHEDUMPGRAPH']._serialized_start=6029 - _globals['_CACHEDUMPGRAPH']._serialized_end=6181 - _globals['_CACHEDUMPGRAPH_DUMPENTRY']._serialized_start=6138 - _globals['_CACHEDUMPGRAPH_DUMPENTRY']._serialized_end=6181 - _globals['_CACHEDUMPGRAPHMSG']._serialized_start=6183 - _globals['_CACHEDUMPGRAPHMSG']._serialized_end=6296 - _globals['_ADAPTERREGISTERED']._serialized_start=6298 - _globals['_ADAPTERREGISTERED']._serialized_end=6364 - _globals['_ADAPTERREGISTEREDMSG']._serialized_start=6366 - _globals['_ADAPTERREGISTEREDMSG']._serialized_end=6485 - _globals['_ADAPTERIMPORTERROR']._serialized_start=6487 - _globals['_ADAPTERIMPORTERROR']._serialized_end=6520 - _globals['_ADAPTERIMPORTERRORMSG']._serialized_start=6522 - _globals['_ADAPTERIMPORTERRORMSG']._serialized_end=6643 - _globals['_PLUGINLOADERROR']._serialized_start=6645 - _globals['_PLUGINLOADERROR']._serialized_end=6680 - _globals['_PLUGINLOADERRORMSG']._serialized_start=6682 - _globals['_PLUGINLOADERRORMSG']._serialized_end=6797 - _globals['_NEWCONNECTIONOPENING']._serialized_start=6799 - _globals['_NEWCONNECTIONOPENING']._serialized_end=6896 - _globals['_NEWCONNECTIONOPENINGMSG']._serialized_start=6898 - _globals['_NEWCONNECTIONOPENINGMSG']._serialized_end=7023 - _globals['_CODEEXECUTION']._serialized_start=7025 - _globals['_CODEEXECUTION']._serialized_end=7081 - _globals['_CODEEXECUTIONMSG']._serialized_start=7083 - _globals['_CODEEXECUTIONMSG']._serialized_end=7194 - _globals['_CODEEXECUTIONSTATUS']._serialized_start=7196 - _globals['_CODEEXECUTIONSTATUS']._serialized_end=7250 - _globals['_CODEEXECUTIONSTATUSMSG']._serialized_start=7252 - _globals['_CODEEXECUTIONSTATUSMSG']._serialized_end=7375 - _globals['_CATALOGGENERATIONERROR']._serialized_start=7377 - _globals['_CATALOGGENERATIONERROR']._serialized_end=7414 - _globals['_CATALOGGENERATIONERRORMSG']._serialized_start=7417 - _globals['_CATALOGGENERATIONERRORMSG']._serialized_end=7546 - _globals['_WRITECATALOGFAILURE']._serialized_start=7548 - _globals['_WRITECATALOGFAILURE']._serialized_end=7593 - _globals['_WRITECATALOGFAILUREMSG']._serialized_start=7595 - _globals['_WRITECATALOGFAILUREMSG']._serialized_end=7718 - _globals['_CATALOGWRITTEN']._serialized_start=7720 - _globals['_CATALOGWRITTEN']._serialized_end=7750 - _globals['_CATALOGWRITTENMSG']._serialized_start=7752 - _globals['_CATALOGWRITTENMSG']._serialized_end=7865 - _globals['_CANNOTGENERATEDOCS']._serialized_start=7867 - _globals['_CANNOTGENERATEDOCS']._serialized_end=7887 - _globals['_CANNOTGENERATEDOCSMSG']._serialized_start=7889 - _globals['_CANNOTGENERATEDOCSMSG']._serialized_end=8010 - _globals['_BUILDINGCATALOG']._serialized_start=8012 - _globals['_BUILDINGCATALOG']._serialized_end=8029 - _globals['_BUILDINGCATALOGMSG']._serialized_start=8031 - _globals['_BUILDINGCATALOGMSG']._serialized_end=8146 - _globals['_DATABASEERRORRUNNINGHOOK']._serialized_start=8148 - _globals['_DATABASEERRORRUNNINGHOOK']._serialized_end=8193 - _globals['_DATABASEERRORRUNNINGHOOKMSG']._serialized_start=8196 - _globals['_DATABASEERRORRUNNINGHOOKMSG']._serialized_end=8329 - _globals['_HOOKSRUNNING']._serialized_start=8331 - _globals['_HOOKSRUNNING']._serialized_end=8383 - _globals['_HOOKSRUNNINGMSG']._serialized_start=8385 - _globals['_HOOKSRUNNINGMSG']._serialized_end=8494 - _globals['_FINISHEDRUNNINGSTATS']._serialized_start=8496 - _globals['_FINISHEDRUNNINGSTATS']._serialized_end=8580 - _globals['_FINISHEDRUNNINGSTATSMSG']._serialized_start=8582 - _globals['_FINISHEDRUNNINGSTATSMSG']._serialized_end=8707 - _globals['_CONSTRAINTNOTENFORCED']._serialized_start=8709 - _globals['_CONSTRAINTNOTENFORCED']._serialized_end=8769 - _globals['_CONSTRAINTNOTENFORCEDMSG']._serialized_start=8771 - _globals['_CONSTRAINTNOTENFORCEDMSG']._serialized_end=8898 - _globals['_CONSTRAINTNOTSUPPORTED']._serialized_start=8900 - _globals['_CONSTRAINTNOTSUPPORTED']._serialized_end=8961 - _globals['_CONSTRAINTNOTSUPPORTEDMSG']._serialized_start=8964 - _globals['_CONSTRAINTNOTSUPPORTEDMSG']._serialized_end=9093 - _globals['_TYPECODENOTFOUND']._serialized_start=9095 - _globals['_TYPECODENOTFOUND']._serialized_end=9132 - _globals['_TYPECODENOTFOUNDMSG']._serialized_start=9134 - _globals['_TYPECODENOTFOUNDMSG']._serialized_end=9251 + _globals['_SQLQUERYSTATUS']._serialized_end=4847 + _globals['_SQLQUERYSTATUSMSG']._serialized_start=4849 + _globals['_SQLQUERYSTATUSMSG']._serialized_end=4962 + _globals['_SQLCOMMIT']._serialized_start=4964 + _globals['_SQLCOMMIT']._serialized_end=5043 + _globals['_SQLCOMMITMSG']._serialized_start=5045 + _globals['_SQLCOMMITMSG']._serialized_end=5148 + _globals['_COLTYPECHANGE']._serialized_start=5150 + _globals['_COLTYPECHANGE']._serialized_end=5247 + _globals['_COLTYPECHANGEMSG']._serialized_start=5249 + _globals['_COLTYPECHANGEMSG']._serialized_end=5360 + _globals['_SCHEMACREATION']._serialized_start=5362 + _globals['_SCHEMACREATION']._serialized_end=5426 + _globals['_SCHEMACREATIONMSG']._serialized_start=5428 + _globals['_SCHEMACREATIONMSG']._serialized_end=5541 + _globals['_SCHEMADROP']._serialized_start=5543 + _globals['_SCHEMADROP']._serialized_end=5603 + _globals['_SCHEMADROPMSG']._serialized_start=5605 + _globals['_SCHEMADROPMSG']._serialized_end=5710 + _globals['_CACHEACTION']._serialized_start=5713 + _globals['_CACHEACTION']._serialized_end=5935 + _globals['_CACHEACTIONMSG']._serialized_start=5937 + _globals['_CACHEACTIONMSG']._serialized_end=6044 + _globals['_CACHEDUMPGRAPH']._serialized_start=6047 + _globals['_CACHEDUMPGRAPH']._serialized_end=6199 + _globals['_CACHEDUMPGRAPH_DUMPENTRY']._serialized_start=6156 + _globals['_CACHEDUMPGRAPH_DUMPENTRY']._serialized_end=6199 + _globals['_CACHEDUMPGRAPHMSG']._serialized_start=6201 + _globals['_CACHEDUMPGRAPHMSG']._serialized_end=6314 + _globals['_ADAPTERREGISTERED']._serialized_start=6316 + _globals['_ADAPTERREGISTERED']._serialized_end=6382 + _globals['_ADAPTERREGISTEREDMSG']._serialized_start=6384 + _globals['_ADAPTERREGISTEREDMSG']._serialized_end=6503 + _globals['_ADAPTERIMPORTERROR']._serialized_start=6505 + _globals['_ADAPTERIMPORTERROR']._serialized_end=6538 + _globals['_ADAPTERIMPORTERRORMSG']._serialized_start=6540 + _globals['_ADAPTERIMPORTERRORMSG']._serialized_end=6661 + _globals['_PLUGINLOADERROR']._serialized_start=6663 + _globals['_PLUGINLOADERROR']._serialized_end=6698 + _globals['_PLUGINLOADERRORMSG']._serialized_start=6700 + _globals['_PLUGINLOADERRORMSG']._serialized_end=6815 + _globals['_NEWCONNECTIONOPENING']._serialized_start=6817 + _globals['_NEWCONNECTIONOPENING']._serialized_end=6914 + _globals['_NEWCONNECTIONOPENINGMSG']._serialized_start=6916 + _globals['_NEWCONNECTIONOPENINGMSG']._serialized_end=7041 + _globals['_CODEEXECUTION']._serialized_start=7043 + _globals['_CODEEXECUTION']._serialized_end=7099 + _globals['_CODEEXECUTIONMSG']._serialized_start=7101 + _globals['_CODEEXECUTIONMSG']._serialized_end=7212 + _globals['_CODEEXECUTIONSTATUS']._serialized_start=7214 + _globals['_CODEEXECUTIONSTATUS']._serialized_end=7268 + _globals['_CODEEXECUTIONSTATUSMSG']._serialized_start=7270 + _globals['_CODEEXECUTIONSTATUSMSG']._serialized_end=7393 + _globals['_CATALOGGENERATIONERROR']._serialized_start=7395 + _globals['_CATALOGGENERATIONERROR']._serialized_end=7432 + _globals['_CATALOGGENERATIONERRORMSG']._serialized_start=7435 + _globals['_CATALOGGENERATIONERRORMSG']._serialized_end=7564 + _globals['_WRITECATALOGFAILURE']._serialized_start=7566 + _globals['_WRITECATALOGFAILURE']._serialized_end=7611 + _globals['_WRITECATALOGFAILUREMSG']._serialized_start=7613 + _globals['_WRITECATALOGFAILUREMSG']._serialized_end=7736 + _globals['_CATALOGWRITTEN']._serialized_start=7738 + _globals['_CATALOGWRITTEN']._serialized_end=7768 + _globals['_CATALOGWRITTENMSG']._serialized_start=7770 + _globals['_CATALOGWRITTENMSG']._serialized_end=7883 + _globals['_CANNOTGENERATEDOCS']._serialized_start=7885 + _globals['_CANNOTGENERATEDOCS']._serialized_end=7905 + _globals['_CANNOTGENERATEDOCSMSG']._serialized_start=7907 + _globals['_CANNOTGENERATEDOCSMSG']._serialized_end=8028 + _globals['_BUILDINGCATALOG']._serialized_start=8030 + _globals['_BUILDINGCATALOG']._serialized_end=8047 + _globals['_BUILDINGCATALOGMSG']._serialized_start=8049 + _globals['_BUILDINGCATALOGMSG']._serialized_end=8164 + _globals['_DATABASEERRORRUNNINGHOOK']._serialized_start=8166 + _globals['_DATABASEERRORRUNNINGHOOK']._serialized_end=8211 + _globals['_DATABASEERRORRUNNINGHOOKMSG']._serialized_start=8214 + _globals['_DATABASEERRORRUNNINGHOOKMSG']._serialized_end=8347 + _globals['_HOOKSRUNNING']._serialized_start=8349 + _globals['_HOOKSRUNNING']._serialized_end=8401 + _globals['_HOOKSRUNNINGMSG']._serialized_start=8403 + _globals['_HOOKSRUNNINGMSG']._serialized_end=8512 + _globals['_FINISHEDRUNNINGSTATS']._serialized_start=8514 + _globals['_FINISHEDRUNNINGSTATS']._serialized_end=8598 + _globals['_FINISHEDRUNNINGSTATSMSG']._serialized_start=8600 + _globals['_FINISHEDRUNNINGSTATSMSG']._serialized_end=8725 + _globals['_CONSTRAINTNOTENFORCED']._serialized_start=8727 + _globals['_CONSTRAINTNOTENFORCED']._serialized_end=8787 + _globals['_CONSTRAINTNOTENFORCEDMSG']._serialized_start=8789 + _globals['_CONSTRAINTNOTENFORCEDMSG']._serialized_end=8916 + _globals['_CONSTRAINTNOTSUPPORTED']._serialized_start=8918 + _globals['_CONSTRAINTNOTSUPPORTED']._serialized_end=8979 + _globals['_CONSTRAINTNOTSUPPORTEDMSG']._serialized_start=8982 + _globals['_CONSTRAINTNOTSUPPORTEDMSG']._serialized_end=9111 + _globals['_TYPECODENOTFOUND']._serialized_start=9113 + _globals['_TYPECODENOTFOUND']._serialized_end=9150 + _globals['_TYPECODENOTFOUNDMSG']._serialized_start=9152 + _globals['_TYPECODENOTFOUNDMSG']._serialized_end=9269 # @@protoc_insertion_point(module_scope) diff --git a/dbt/adapters/exceptions/compilation.py b/dbt/adapters/exceptions/compilation.py index 46ca5219..d82924e3 100644 --- a/dbt/adapters/exceptions/compilation.py +++ b/dbt/adapters/exceptions/compilation.py @@ -150,8 +150,10 @@ def __init__(self, missing: List): super().__init__(msg=self.get_message()) def get_message(self) -> str: - msg = 'Snapshot target is not a snapshot table (missing "{}")'.format( - '", "'.join(self.missing) + missing = '", "'.join(self.missing) + msg = ( + f'Snapshot target is missing configured columns (missing "{missing}"). ' + "See https://docs.getdbt.com/docs/build/snapshots#snapshot-meta-fields for more information." ) return msg diff --git a/dbt/adapters/factory.py b/dbt/adapters/factory.py index b1854f67..8a018619 100644 --- a/dbt/adapters/factory.py +++ b/dbt/adapters/factory.py @@ -188,7 +188,7 @@ def get_include_paths(self, name: Optional[str]) -> List[Path]: def get_adapter_type_names(self, name: Optional[str]) -> List[str]: return [p.adapter.type() for p in self.get_adapter_plugins(name)] - def get_adapter_constraint_support(self, name: Optional[str]) -> List[str]: + def get_adapter_constraint_support(self, name: Optional[str]) -> Dict[str, str]: return self.lookup_adapter(name).CONSTRAINT_SUPPORT # type: ignore @@ -251,7 +251,7 @@ def get_adapter_type_names(name: Optional[str]) -> List[str]: return FACTORY.get_adapter_type_names(name) -def get_adapter_constraint_support(name: Optional[str]) -> List[str]: +def get_adapter_constraint_support(name: Optional[str]) -> Dict[str, str]: return FACTORY.get_adapter_constraint_support(name) diff --git a/dbt/adapters/record.py b/dbt/adapters/record.py deleted file mode 100644 index 5204f59c..00000000 --- a/dbt/adapters/record.py +++ /dev/null @@ -1,67 +0,0 @@ -import dataclasses -from io import StringIO -import json -import re -from typing import Any, Optional, Mapping - -from agate import Table - -from dbt_common.events.contextvars import get_node_info -from dbt_common.record import Record, Recorder - -from dbt.adapters.contracts.connection import AdapterResponse - - -@dataclasses.dataclass -class QueryRecordParams: - sql: str - auto_begin: bool = False - fetch: bool = False - limit: Optional[int] = None - node_unique_id: Optional[str] = None - - def __post_init__(self) -> None: - if self.node_unique_id is None: - node_info = get_node_info() - self.node_unique_id = node_info["unique_id"] if node_info else "" - - @staticmethod - def _clean_up_sql(sql: str) -> str: - sql = re.sub(r"--.*?\n", "", sql) # Remove single-line comments (--) - sql = re.sub(r"/\*.*?\*/", "", sql, flags=re.DOTALL) # Remove multi-line comments (/* */) - return sql.replace(" ", "").replace("\n", "") - - def _matches(self, other: "QueryRecordParams") -> bool: - return self.node_unique_id == other.node_unique_id and self._clean_up_sql( - self.sql - ) == self._clean_up_sql(other.sql) - - -@dataclasses.dataclass -class QueryRecordResult: - adapter_response: Optional["AdapterResponse"] - table: Optional[Table] - - def _to_dict(self) -> Any: - buf = StringIO() - self.table.to_json(buf) # type: ignore - - return { - "adapter_response": self.adapter_response.to_dict(), # type: ignore - "table": buf.getvalue(), - } - - @classmethod - def _from_dict(cls, dct: Mapping) -> "QueryRecordResult": - return QueryRecordResult( - adapter_response=AdapterResponse.from_dict(dct["adapter_response"]), - table=Table.from_object(json.loads(dct["table"])), - ) - - -class QueryRecord(Record): - params_cls = QueryRecordParams - result_cls = QueryRecordResult - - -Recorder.register_record_type(QueryRecord) diff --git a/dbt/adapters/record/__init__.py b/dbt/adapters/record/__init__.py new file mode 100644 index 00000000..afde4a01 --- /dev/null +++ b/dbt/adapters/record/__init__.py @@ -0,0 +1,2 @@ +from dbt.adapters.record.handle import RecordReplayHandle +from dbt.adapters.record.cursor.cursor import RecordReplayCursor diff --git a/dbt/adapters/record/cursor/cursor.py b/dbt/adapters/record/cursor/cursor.py new file mode 100644 index 00000000..577178db --- /dev/null +++ b/dbt/adapters/record/cursor/cursor.py @@ -0,0 +1,54 @@ +from typing import Any, Optional + +from dbt_common.record import record_function + +from dbt.adapters.contracts.connection import Connection +from dbt.adapters.record.cursor.description import CursorGetDescriptionRecord +from dbt.adapters.record.cursor.execute import CursorExecuteRecord +from dbt.adapters.record.cursor.fetchone import CursorFetchOneRecord +from dbt.adapters.record.cursor.fetchmany import CursorFetchManyRecord +from dbt.adapters.record.cursor.fetchall import CursorFetchAllRecord +from dbt.adapters.record.cursor.rowcount import CursorGetRowCountRecord + + +class RecordReplayCursor: + """A proxy object used to wrap native database cursors under record/replay + modes. In record mode, this proxy notes the parameters and return values + of the methods and properties it implements, which closely match the Python + DB API 2.0 cursor methods used by many dbt adapters to interact with the + database or DWH. In replay mode, it mocks out those calls using previously + recorded calls, so that no interaction with a database actually occurs.""" + + def __init__(self, native_cursor: Any, connection: Connection) -> None: + self.native_cursor = native_cursor + self.connection = connection + + @record_function(CursorExecuteRecord, method=True, id_field_name="connection_name") + def execute(self, operation, parameters=None) -> None: + self.native_cursor.execute(operation, parameters) + + @record_function(CursorFetchOneRecord, method=True, id_field_name="connection_name") + def fetchone(self) -> Any: + return self.native_cursor.fetchone() + + @record_function(CursorFetchManyRecord, method=True, id_field_name="connection_name") + def fetchmany(self, size: int) -> Any: + return self.native_cursor.fetchmany(size) + + @record_function(CursorFetchAllRecord, method=True, id_field_name="connection_name") + def fetchall(self) -> Any: + return self.native_cursor.fetchall() + + @property + def connection_name(self) -> Optional[str]: + return self.connection.name + + @property + @record_function(CursorGetRowCountRecord, method=True, id_field_name="connection_name") + def rowcount(self) -> int: + return self.native_cursor.rowcount + + @property + @record_function(CursorGetDescriptionRecord, method=True, id_field_name="connection_name") + def description(self) -> str: + return self.native_cursor.description diff --git a/dbt/adapters/record/cursor/description.py b/dbt/adapters/record/cursor/description.py new file mode 100644 index 00000000..d6ba15d9 --- /dev/null +++ b/dbt/adapters/record/cursor/description.py @@ -0,0 +1,37 @@ +import dataclasses +from typing import Any, Iterable, Mapping + +from dbt_common.record import Record, Recorder + + +@dataclasses.dataclass +class CursorGetDescriptionParams: + connection_name: str + + +@dataclasses.dataclass +class CursorGetDescriptionResult: + columns: Iterable[Any] + + def _to_dict(self) -> Any: + column_dicts = [] + for c in self.columns: + # This captures the mandatory column information, but we might need + # more for some adapters. + # See https://peps.python.org/pep-0249/#description + column_dicts.append((c[0], c[1])) + + return {"columns": column_dicts} + + @classmethod + def _from_dict(cls, dct: Mapping) -> "CursorGetDescriptionResult": + return CursorGetDescriptionResult(columns=dct["columns"]) + + +@Recorder.register_record_type +class CursorGetDescriptionRecord(Record): + """Implements record/replay support for the cursor.description property.""" + + params_cls = CursorGetDescriptionParams + result_cls = CursorGetDescriptionResult + group = "Database" diff --git a/dbt/adapters/record/cursor/execute.py b/dbt/adapters/record/cursor/execute.py new file mode 100644 index 00000000..e7e69859 --- /dev/null +++ b/dbt/adapters/record/cursor/execute.py @@ -0,0 +1,20 @@ +import dataclasses +from typing import Any, Iterable, Union, Mapping + +from dbt_common.record import Record, Recorder + + +@dataclasses.dataclass +class CursorExecuteParams: + connection_name: str + operation: str + parameters: Union[Iterable[Any], Mapping[str, Any]] + + +@Recorder.register_record_type +class CursorExecuteRecord(Record): + """Implements record/replay support for the cursor.execute() method.""" + + params_cls = CursorExecuteParams + result_cls = None + group = "Database" diff --git a/dbt/adapters/record/cursor/fetchall.py b/dbt/adapters/record/cursor/fetchall.py new file mode 100644 index 00000000..090cc160 --- /dev/null +++ b/dbt/adapters/record/cursor/fetchall.py @@ -0,0 +1,66 @@ +import dataclasses +import datetime +from typing import Any, Dict, List, Mapping + +from dbt_common.record import Record, Recorder + + +@dataclasses.dataclass +class CursorFetchAllParams: + connection_name: str + + +@dataclasses.dataclass +class CursorFetchAllResult: + results: List[Any] + + def _to_dict(self) -> Dict[str, Any]: + processed_results = [] + for result in self.results: + result = tuple(map(self._process_value, result)) + processed_results.append(result) + + return {"results": processed_results} + + @classmethod + def _from_dict(cls, dct: Mapping) -> "CursorFetchAllResult": + unprocessed_results = [] + for result in dct["results"]: + result = tuple(map(cls._unprocess_value, result)) + unprocessed_results.append(result) + + return CursorFetchAllResult(unprocessed_results) + + @classmethod + def _process_value(cls, value: Any) -> Any: + if type(value) is datetime.date: + return {"type": "date", "value": value.isoformat()} + elif type(value) is datetime.datetime: + return {"type": "datetime", "value": value.isoformat()} + else: + return value + + @classmethod + def _unprocess_value(cls, value: Any) -> Any: + if type(value) is dict: + value_type = value.get("type") + if value_type == "date": + date_string = value.get("value") + assert isinstance(date_string, str) + return datetime.date.fromisoformat(date_string) + elif value_type == "datetime": + date_string = value.get("value") + assert isinstance(date_string, str) + return datetime.datetime.fromisoformat(date_string) + return value + else: + return value + + +@Recorder.register_record_type +class CursorFetchAllRecord(Record): + """Implements record/replay support for the cursor.fetchall() method.""" + + params_cls = CursorFetchAllParams + result_cls = CursorFetchAllResult + group = "Database" diff --git a/dbt/adapters/record/cursor/fetchmany.py b/dbt/adapters/record/cursor/fetchmany.py new file mode 100644 index 00000000..86f15440 --- /dev/null +++ b/dbt/adapters/record/cursor/fetchmany.py @@ -0,0 +1,23 @@ +import dataclasses +from typing import Any, List + +from dbt_common.record import Record, Recorder + + +@dataclasses.dataclass +class CursorFetchManyParams: + connection_name: str + + +@dataclasses.dataclass +class CursorFetchManyResult: + results: List[Any] + + +@Recorder.register_record_type +class CursorFetchManyRecord(Record): + """Implements record/replay support for the cursor.fetchmany() method.""" + + params_cls = CursorFetchManyParams + result_cls = CursorFetchManyResult + group = "Database" diff --git a/dbt/adapters/record/cursor/fetchone.py b/dbt/adapters/record/cursor/fetchone.py new file mode 100644 index 00000000..42ffe210 --- /dev/null +++ b/dbt/adapters/record/cursor/fetchone.py @@ -0,0 +1,23 @@ +import dataclasses +from typing import Any + +from dbt_common.record import Record, Recorder + + +@dataclasses.dataclass +class CursorFetchOneParams: + connection_name: str + + +@dataclasses.dataclass +class CursorFetchOneResult: + result: Any + + +@Recorder.register_record_type +class CursorFetchOneRecord(Record): + """Implements record/replay support for the cursor.fetchone() method.""" + + params_cls = CursorFetchOneParams + result_cls = CursorFetchOneResult + group = "Database" diff --git a/dbt/adapters/record/cursor/rowcount.py b/dbt/adapters/record/cursor/rowcount.py new file mode 100644 index 00000000..c024817e --- /dev/null +++ b/dbt/adapters/record/cursor/rowcount.py @@ -0,0 +1,23 @@ +import dataclasses +from typing import Optional + +from dbt_common.record import Record, Recorder + + +@dataclasses.dataclass +class CursorGetRowCountParams: + connection_name: str + + +@dataclasses.dataclass +class CursorGetRowCountResult: + rowcount: Optional[int] + + +@Recorder.register_record_type +class CursorGetRowCountRecord(Record): + """Implements record/replay support for the cursor.rowcount property.""" + + params_cls = CursorGetRowCountParams + result_cls = CursorGetRowCountResult + group = "Database" diff --git a/dbt/adapters/record/handle.py b/dbt/adapters/record/handle.py new file mode 100644 index 00000000..31817c37 --- /dev/null +++ b/dbt/adapters/record/handle.py @@ -0,0 +1,24 @@ +from typing import Any + +from dbt.adapters.contracts.connection import Connection + +from dbt.adapters.record.cursor.cursor import RecordReplayCursor + + +class RecordReplayHandle: + """A proxy object used for record/replay modes. What adapters call a + 'handle' is typically a native database connection, but should not be + confused with the Connection protocol, which is a dbt-adapters concept. + + Currently, the only function of the handle proxy is to provide a record/replay + aware cursor object when cursor() is called.""" + + def __init__(self, native_handle: Any, connection: Connection) -> None: + self.native_handle = native_handle + self.connection = connection + + def cursor(self) -> Any: + # The native handle could be None if we are in replay mode, because no + # actual database access should be performed in that mode. + cursor = None if self.native_handle is None else self.native_handle.cursor() + return RecordReplayCursor(cursor, self.connection) diff --git a/dbt/adapters/sql/connections.py b/dbt/adapters/sql/connections.py index 13111a6e..baccddc9 100644 --- a/dbt/adapters/sql/connections.py +++ b/dbt/adapters/sql/connections.py @@ -5,7 +5,6 @@ from dbt_common.events.contextvars import get_node_info from dbt_common.events.functions import fire_event from dbt_common.exceptions import DbtInternalError, NotImplementedError -from dbt_common.record import record_function from dbt_common.utils import cast_to_str from dbt.adapters.base import BaseConnectionManager @@ -20,7 +19,6 @@ SQLQuery, SQLQueryStatus, ) -from dbt.adapters.record import QueryRecord if TYPE_CHECKING: import agate @@ -94,11 +92,14 @@ def add_query( cursor = connection.handle.cursor() cursor.execute(sql, bindings) + result = self.get_response(cursor) + fire_event( SQLQueryStatus( - status=str(self.get_response(cursor)), + status=str(result), elapsed=time.perf_counter() - pre, node_info=get_node_info(), + query_id=result.query_id, ) ) @@ -143,7 +144,6 @@ def get_result_from_cursor(cls, cursor: Any, limit: Optional[int]) -> "agate.Tab return table_from_data_flat(data, column_names) - @record_function(QueryRecord, method=True, tuple_result=True) def execute( self, sql: str, diff --git a/dbt/include/global_project/macros/adapters/apply_grants.sql b/dbt/include/global_project/macros/adapters/apply_grants.sql index 10906e7f..c75eef89 100644 --- a/dbt/include/global_project/macros/adapters/apply_grants.sql +++ b/dbt/include/global_project/macros/adapters/apply_grants.sql @@ -61,7 +61,7 @@ {% endmacro %} {% macro default__get_show_grant_sql(relation) %} - show grants on {{ relation }} + show grants on {{ relation.render() }} {% endmacro %} @@ -70,7 +70,7 @@ {% endmacro %} {%- macro default__get_grant_sql(relation, privilege, grantees) -%} - grant {{ privilege }} on {{ relation }} to {{ grantees | join(', ') }} + grant {{ privilege }} on {{ relation.render() }} to {{ grantees | join(', ') }} {%- endmacro -%} @@ -79,7 +79,7 @@ {% endmacro %} {%- macro default__get_revoke_sql(relation, privilege, grantees) -%} - revoke {{ privilege }} on {{ relation }} from {{ grantees | join(', ') }} + revoke {{ privilege }} on {{ relation.render() }} from {{ grantees | join(', ') }} {%- endmacro -%} @@ -147,7 +147,7 @@ {% set needs_granting = diff_of_two_dicts(grant_config, current_grants_dict) %} {% set needs_revoking = diff_of_two_dicts(current_grants_dict, grant_config) %} {% if not (needs_granting or needs_revoking) %} - {{ log('On ' ~ relation ~': All grants are in place, no revocation or granting needed.')}} + {{ log('On ' ~ relation.render() ~': All grants are in place, no revocation or granting needed.')}} {% endif %} {% else %} {#-- We don't think there's any chance of previous grants having carried over. --#} diff --git a/dbt/include/global_project/macros/adapters/columns.sql b/dbt/include/global_project/macros/adapters/columns.sql index 663a827b..96e6f3f2 100644 --- a/dbt/include/global_project/macros/adapters/columns.sql +++ b/dbt/include/global_project/macros/adapters/columns.sql @@ -96,10 +96,10 @@ {%- set tmp_column = column_name + "__dbt_alter" -%} {% call statement('alter_column_type') %} - alter table {{ relation }} add column {{ adapter.quote(tmp_column) }} {{ new_column_type }}; - update {{ relation }} set {{ adapter.quote(tmp_column) }} = {{ adapter.quote(column_name) }}; - alter table {{ relation }} drop column {{ adapter.quote(column_name) }} cascade; - alter table {{ relation }} rename column {{ adapter.quote(tmp_column) }} to {{ adapter.quote(column_name) }} + alter table {{ relation.render() }} add column {{ adapter.quote(tmp_column) }} {{ new_column_type }}; + update {{ relation.render() }} set {{ adapter.quote(tmp_column) }} = {{ adapter.quote(column_name) }}; + alter table {{ relation.render() }} drop column {{ adapter.quote(column_name) }} cascade; + alter table {{ relation.render() }} rename column {{ adapter.quote(tmp_column) }} to {{ adapter.quote(column_name) }} {% endcall %} {% endmacro %} @@ -120,7 +120,7 @@ {% set sql -%} - alter {{ relation.type }} {{ relation }} + alter {{ relation.type }} {{ relation.render() }} {% for column in add_columns %} add column {{ column.name }} {{ column.data_type }}{{ ',' if not loop.last }} diff --git a/dbt/include/global_project/macros/adapters/relation.sql b/dbt/include/global_project/macros/adapters/relation.sql index 1c2bd880..b9af4969 100644 --- a/dbt/include/global_project/macros/adapters/relation.sql +++ b/dbt/include/global_project/macros/adapters/relation.sql @@ -38,7 +38,7 @@ {% macro default__truncate_relation(relation) -%} {% call statement('truncate_relation') -%} - truncate table {{ relation }} + truncate table {{ relation.render() }} {%- endcall %} {% endmacro %} diff --git a/dbt/include/global_project/macros/adapters/show.sql b/dbt/include/global_project/macros/adapters/show.sql index 33a93f3d..3a5faa98 100644 --- a/dbt/include/global_project/macros/adapters/show.sql +++ b/dbt/include/global_project/macros/adapters/show.sql @@ -1,22 +1,26 @@ +{# + We expect a syntax error if dbt show is invoked both with a --limit flag to show + and with a limit predicate embedded in its inline query. No special handling is + provided out-of-box. +#} {% macro get_show_sql(compiled_code, sql_header, limit) -%} - {%- if sql_header -%} + {%- if sql_header is not none -%} {{ sql_header }} - {%- endif -%} - {%- if limit is not none -%} + {%- endif %} {{ get_limit_subquery_sql(compiled_code, limit) }} - {%- else -%} - {{ compiled_code }} - {%- endif -%} {% endmacro %} -{% macro get_limit_subquery_sql(sql, limit) %} - {{ adapter.dispatch('get_limit_subquery_sql', 'dbt')(sql, limit) }} -{% endmacro %} +{# + Not necessarily a true subquery anymore. Now, merely a query subordinate + to the calling macro. +#} +{%- macro get_limit_subquery_sql(sql, limit) -%} + {{ adapter.dispatch('get_limit_sql', 'dbt')(sql, limit) }} +{%- endmacro -%} -{% macro default__get_limit_subquery_sql(sql, limit) %} - select * - from ( - {{ sql }} - ) as model_limit_subq - limit {{ limit }} +{% macro default__get_limit_sql(sql, limit) %} + {{ compiled_code }} + {% if limit is not none %} + limit {{ limit }} + {%- endif -%} {% endmacro %} diff --git a/dbt/include/global_project/macros/adapters/timestamps.sql b/dbt/include/global_project/macros/adapters/timestamps.sql index 64b5fd3d..c936c844 100644 --- a/dbt/include/global_project/macros/adapters/timestamps.sql +++ b/dbt/include/global_project/macros/adapters/timestamps.sql @@ -15,6 +15,14 @@ {{ current_timestamp() }} {% endmacro %} +{% macro get_snapshot_get_time_data_type() %} + {% set snapshot_time = adapter.dispatch('snapshot_get_time', 'dbt')() %} + {% set time_data_type_sql = 'select ' ~ snapshot_time ~ ' as dbt_snapshot_time' %} + {% set snapshot_time_column_schema = get_column_schema_from_query(time_data_type_sql) %} + {% set time_data_type = snapshot_time_column_schema[0].dtype %} + {{ return(time_data_type or none) }} +{% endmacro %} + --------------------------------------------- /* {# diff --git a/dbt/include/global_project/macros/materializations/models/clone/clone.sql b/dbt/include/global_project/macros/materializations/models/clone/clone.sql index 01c8c393..56d80082 100644 --- a/dbt/include/global_project/macros/materializations/models/clone/clone.sql +++ b/dbt/include/global_project/macros/materializations/models/clone/clone.sql @@ -27,14 +27,14 @@ {%- set target_relation = this.incorporate(type='table') -%} {% if existing_relation is not none and not existing_relation.is_table %} - {{ log("Dropping relation " ~ existing_relation ~ " because it is of type " ~ existing_relation.type) }} + {{ log("Dropping relation " ~ existing_relation.render() ~ " because it is of type " ~ existing_relation.type) }} {{ drop_relation_if_exists(existing_relation) }} {% endif %} -- as a general rule, data platforms that can clone tables can also do atomic 'create or replace' {% call statement('main') %} {% if target_relation and defer_relation and target_relation == defer_relation %} - {{ log("Target relation and defer relation are the same, skipping clone for relation: " ~ target_relation) }} + {{ log("Target relation and defer relation are the same, skipping clone for relation: " ~ target_relation.render()) }} {% else %} {{ create_or_replace_clone(target_relation, defer_relation) }} {% endif %} diff --git a/dbt/include/global_project/macros/materializations/models/clone/create_or_replace_clone.sql b/dbt/include/global_project/macros/materializations/models/clone/create_or_replace_clone.sql index 204e9e87..cdb2559c 100644 --- a/dbt/include/global_project/macros/materializations/models/clone/create_or_replace_clone.sql +++ b/dbt/include/global_project/macros/materializations/models/clone/create_or_replace_clone.sql @@ -3,5 +3,5 @@ {% endmacro %} {% macro default__create_or_replace_clone(this_relation, defer_relation) %} - create or replace table {{ this_relation }} clone {{ defer_relation }} + create or replace table {{ this_relation.render() }} clone {{ defer_relation.render() }} {% endmacro %} diff --git a/dbt/include/global_project/macros/materializations/models/incremental/incremental.sql b/dbt/include/global_project/macros/materializations/models/incremental/incremental.sql index e8ff5c1e..41d2de26 100644 --- a/dbt/include/global_project/macros/materializations/models/incremental/incremental.sql +++ b/dbt/include/global_project/macros/materializations/models/incremental/incremental.sql @@ -32,6 +32,9 @@ {% set to_drop = [] %} + {% set incremental_strategy = config.get('incremental_strategy') or 'default' %} + {% set strategy_sql_macro_func = adapter.get_incremental_strategy_macro(context, incremental_strategy) %} + {% if existing_relation is none %} {% set build_sql = get_create_table_as_sql(False, target_relation, sql) %} {% elif full_refresh_mode %} @@ -39,9 +42,12 @@ {% set need_swap = true %} {% else %} {% do run_query(get_create_table_as_sql(True, temp_relation, sql)) %} - {% do adapter.expand_target_column_types( - from_relation=temp_relation, - to_relation=target_relation) %} + {% set contract_config = config.get('contract') %} + {% if not contract_config or not contract_config.enforced %} + {% do adapter.expand_target_column_types( + from_relation=temp_relation, + to_relation=target_relation) %} + {% endif %} {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#} {% set dest_columns = process_schema_changes(on_schema_change, temp_relation, existing_relation) %} {% if not dest_columns %} @@ -49,9 +55,7 @@ {% endif %} {#-- Get the incremental_strategy, the macro to use for the strategy, and build the sql --#} - {% set incremental_strategy = config.get('incremental_strategy') or 'default' %} {% set incremental_predicates = config.get('predicates', none) or config.get('incremental_predicates', none) %} - {% set strategy_sql_macro_func = adapter.get_incremental_strategy_macro(context, incremental_strategy) %} {% set strategy_arg_dict = ({'target_relation': target_relation, 'temp_relation': temp_relation, 'unique_key': unique_key, 'dest_columns': dest_columns, 'incremental_predicates': incremental_predicates }) %} {% set build_sql = strategy_sql_macro_func(strategy_arg_dict) %} diff --git a/dbt/include/global_project/macros/materializations/models/incremental/strategies.sql b/dbt/include/global_project/macros/materializations/models/incremental/strategies.sql index 72082cca..111d3887 100644 --- a/dbt/include/global_project/macros/materializations/models/incremental/strategies.sql +++ b/dbt/include/global_project/macros/materializations/models/incremental/strategies.sql @@ -66,6 +66,19 @@ {% endmacro %} +{% macro get_incremental_microbatch_sql(arg_dict) %} + + {{ return(adapter.dispatch('get_incremental_microbatch_sql', 'dbt')(arg_dict)) }} + +{% endmacro %} + +{% macro default__get_incremental_microbatch_sql(arg_dict) %} + + {{ exceptions.raise_not_implemented('microbatch materialization strategy not implemented for adapter ' + adapter.type()) }} + +{% endmacro %} + + {% macro get_insert_into_sql(target_relation, temp_relation, dest_columns) %} {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute="name")) -%} diff --git a/dbt/include/global_project/macros/materializations/models/materialized_view.sql b/dbt/include/global_project/macros/materializations/models/materialized_view.sql index 6dc30bf9..a39f8aa2 100644 --- a/dbt/include/global_project/macros/materializations/models/materialized_view.sql +++ b/dbt/include/global_project/macros/materializations/models/materialized_view.sql @@ -71,9 +71,9 @@ {% set build_sql = get_alter_materialized_view_as_sql(target_relation, configuration_changes, sql, existing_relation, backup_relation, intermediate_relation) %} {% elif on_configuration_change == 'continue' %} {% set build_sql = '' %} - {{ exceptions.warn("Configuration changes were identified and `on_configuration_change` was set to `continue` for `" ~ target_relation ~ "`") }} + {{ exceptions.warn("Configuration changes were identified and `on_configuration_change` was set to `continue` for `" ~ target_relation.render() ~ "`") }} {% elif on_configuration_change == 'fail' %} - {{ exceptions.raise_fail_fast_error("Configuration changes were identified and `on_configuration_change` was set to `fail` for `" ~ target_relation ~ "`") }} + {{ exceptions.raise_fail_fast_error("Configuration changes were identified and `on_configuration_change` was set to `fail` for `" ~ target_relation.render() ~ "`") }} {% else %} -- this only happens if the user provides a value other than `apply`, 'skip', 'fail' diff --git a/dbt/include/global_project/macros/materializations/seeds/helpers.sql b/dbt/include/global_project/macros/materializations/seeds/helpers.sql index 44dbf370..d87c258b 100644 --- a/dbt/include/global_project/macros/materializations/seeds/helpers.sql +++ b/dbt/include/global_project/macros/materializations/seeds/helpers.sql @@ -37,7 +37,7 @@ {% set sql = create_csv_table(model, agate_table) %} {% else %} {{ adapter.truncate_relation(old_relation) }} - {% set sql = "truncate table " ~ old_relation %} + {% set sql = "truncate table " ~ old_relation.render() %} {% endif %} {{ return(sql) }} diff --git a/dbt/include/global_project/macros/materializations/seeds/seed.sql b/dbt/include/global_project/macros/materializations/seeds/seed.sql index 3b66252d..4ee4fb80 100644 --- a/dbt/include/global_project/macros/materializations/seeds/seed.sql +++ b/dbt/include/global_project/macros/materializations/seeds/seed.sql @@ -22,7 +22,7 @@ -- build model {% set create_table_sql = "" %} {% if exists_as_view %} - {{ exceptions.raise_compiler_error("Cannot seed to '{}', it is a view".format(old_relation)) }} + {{ exceptions.raise_compiler_error("Cannot seed to '{}', it is a view".format(old_relation.render())) }} {% elif exists_as_table %} {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %} {% else %} diff --git a/dbt/include/global_project/macros/materializations/snapshots/helpers.sql b/dbt/include/global_project/macros/materializations/snapshots/helpers.sql index 7fd4bfd5..b4cd7c14 100644 --- a/dbt/include/global_project/macros/materializations/snapshots/helpers.sql +++ b/dbt/include/global_project/macros/materializations/snapshots/helpers.sql @@ -8,7 +8,7 @@ {% macro default__create_columns(relation, columns) %} {% for column in columns %} {% call statement() %} - alter table {{ relation }} add column "{{ column.name }}" {{ column.data_type }}; + alter table {{ relation.render() }} add column "{{ column.name }}" {{ column.data_type }}; {% endcall %} {% endfor %} {% endmacro %} @@ -34,7 +34,12 @@ {{ adapter.dispatch('snapshot_staging_table', 'dbt')(strategy, source_sql, target_relation) }} {% endmacro %} +{% macro get_snapshot_table_column_names() %} + {{ return({'dbt_valid_to': 'dbt_valid_to', 'dbt_valid_from': 'dbt_valid_from', 'dbt_scd_id': 'dbt_scd_id', 'dbt_updated_at': 'dbt_updated_at'}) }} +{% endmacro %} + {% macro default__snapshot_staging_table(strategy, source_sql, target_relation) -%} + {% set columns = config.get('snapshot_table_column_names') or get_snapshot_table_column_names() %} with snapshot_query as ( @@ -44,35 +49,35 @@ snapshotted_data as ( - select *, - {{ strategy.unique_key }} as dbt_unique_key - + select *, {{ unique_key_fields(strategy.unique_key) }} from {{ target_relation }} - where dbt_valid_to is null + where + {% if config.get('dbt_valid_to_current') %} + {# Check for either dbt_valid_to_current OR null, in order to correctly update records with nulls #} + ( {{ columns.dbt_valid_to }} = {{ config.get('dbt_valid_to_current') }} or {{ columns.dbt_valid_to }} is null) + {% else %} + {{ columns.dbt_valid_to }} is null + {% endif %} ), insertions_source_data as ( - select - *, - {{ strategy.unique_key }} as dbt_unique_key, - {{ strategy.updated_at }} as dbt_updated_at, - {{ strategy.updated_at }} as dbt_valid_from, - nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to, - {{ strategy.scd_id }} as dbt_scd_id + select *, {{ unique_key_fields(strategy.unique_key) }}, + {{ strategy.updated_at }} as {{ columns.dbt_updated_at }}, + {{ strategy.updated_at }} as {{ columns.dbt_valid_from }}, + {{ get_dbt_valid_to_current(strategy, columns) }}, + {{ strategy.scd_id }} as {{ columns.dbt_scd_id }} from snapshot_query ), updates_source_data as ( - select - *, - {{ strategy.unique_key }} as dbt_unique_key, - {{ strategy.updated_at }} as dbt_updated_at, - {{ strategy.updated_at }} as dbt_valid_from, - {{ strategy.updated_at }} as dbt_valid_to + select *, {{ unique_key_fields(strategy.unique_key) }}, + {{ strategy.updated_at }} as {{ columns.dbt_updated_at }}, + {{ strategy.updated_at }} as {{ columns.dbt_valid_from }}, + {{ strategy.updated_at }} as {{ columns.dbt_valid_to }} from snapshot_query ), @@ -81,9 +86,7 @@ deletes_source_data as ( - select - *, - {{ strategy.unique_key }} as dbt_unique_key + select *, {{ unique_key_fields(strategy.unique_key) }} from snapshot_query ), {% endif %} @@ -95,13 +98,11 @@ source_data.* from insertions_source_data as source_data - left outer join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key - where snapshotted_data.dbt_unique_key is null - or ( - snapshotted_data.dbt_unique_key is not null - and ( - {{ strategy.row_changed }} - ) + left outer join snapshotted_data + on {{ unique_key_join_on(strategy.unique_key, "snapshotted_data", "source_data") }} + where {{ unique_key_is_null(strategy.unique_key, "snapshotted_data") }} + or ({{ unique_key_is_not_null(strategy.unique_key, "snapshotted_data") }} and ({{ strategy.row_changed }}) + ) ), @@ -111,10 +112,11 @@ select 'update' as dbt_change_type, source_data.*, - snapshotted_data.dbt_scd_id + snapshotted_data.{{ columns.dbt_scd_id }} from updates_source_data as source_data - join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key + join snapshotted_data + on {{ unique_key_join_on(strategy.unique_key, "snapshotted_data", "source_data") }} where ( {{ strategy.row_changed }} ) @@ -128,14 +130,15 @@ select 'delete' as dbt_change_type, source_data.*, - {{ snapshot_get_time() }} as dbt_valid_from, - {{ snapshot_get_time() }} as dbt_updated_at, - {{ snapshot_get_time() }} as dbt_valid_to, - snapshotted_data.dbt_scd_id + {{ snapshot_get_time() }} as {{ columns.dbt_valid_from }}, + {{ snapshot_get_time() }} as {{ columns.dbt_updated_at }}, + {{ snapshot_get_time() }} as {{ columns.dbt_valid_to }}, + snapshotted_data.{{ columns.dbt_scd_id }} from snapshotted_data - left join deletes_source_data as source_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key - where source_data.dbt_unique_key is null + left join deletes_source_data as source_data + on {{ unique_key_join_on(strategy.unique_key, "snapshotted_data", "source_data") }} + where {{ unique_key_is_null(strategy.unique_key, "source_data") }} ) {%- endif %} @@ -155,12 +158,13 @@ {% endmacro %} {% macro default__build_snapshot_table(strategy, sql) %} + {% set columns = config.get('snapshot_table_column_names') or get_snapshot_table_column_names() %} select *, - {{ strategy.scd_id }} as dbt_scd_id, - {{ strategy.updated_at }} as dbt_updated_at, - {{ strategy.updated_at }} as dbt_valid_from, - nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to + {{ strategy.scd_id }} as {{ columns.dbt_scd_id }}, + {{ strategy.updated_at }} as {{ columns.dbt_updated_at }}, + {{ strategy.updated_at }} as {{ columns.dbt_valid_from }}, + {{ get_dbt_valid_to_current(strategy, columns) }} from ( {{ sql }} ) sbq @@ -179,3 +183,77 @@ {% do return(temp_relation) %} {% endmacro %} + + +{% macro get_updated_at_column_data_type(snapshot_sql) %} + {% set snapshot_sql_column_schema = get_column_schema_from_query(snapshot_sql) %} + {% set dbt_updated_at_data_type = null %} + {% set ns = namespace() -%} {#-- handle for-loop scoping with a namespace --#} + {% set ns.dbt_updated_at_data_type = null -%} + {% for column in snapshot_sql_column_schema %} + {% if ((column.column == 'dbt_updated_at') or (column.column == 'DBT_UPDATED_AT')) %} + {% set ns.dbt_updated_at_data_type = column.dtype %} + {% endif %} + {% endfor %} + {{ return(ns.dbt_updated_at_data_type or none) }} +{% endmacro %} + + +{% macro check_time_data_types(sql) %} + {% set dbt_updated_at_data_type = get_updated_at_column_data_type(sql) %} + {% set snapshot_get_time_data_type = get_snapshot_get_time_data_type() %} + {% if snapshot_get_time_data_type is not none and dbt_updated_at_data_type is not none and snapshot_get_time_data_type != dbt_updated_at_data_type %} + {% if exceptions.warn_snapshot_timestamp_data_types %} + {{ exceptions.warn_snapshot_timestamp_data_types(snapshot_get_time_data_type, dbt_updated_at_data_type) }} + {% endif %} + {% endif %} +{% endmacro %} + + +{% macro get_dbt_valid_to_current(strategy, columns) %} + {% set dbt_valid_to_current = config.get('dbt_valid_to_current') or "null" %} + coalesce(nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}), {{dbt_valid_to_current}}) + as {{ columns.dbt_valid_to }} +{% endmacro %} + + +{% macro unique_key_fields(unique_key) %} + {% if unique_key | is_list %} + {% for key in unique_key %} + {{ key }} as dbt_unique_key_{{ loop.index }} + {%- if not loop.last %} , {%- endif %} + {% endfor %} + {% else %} + {{ unique_key }} as dbt_unique_key + {% endif %} +{% endmacro %} + + +{% macro unique_key_join_on(unique_key, identifier, from_identifier) %} + {% if unique_key | is_list %} + {% for key in unique_key %} + {{ identifier }}.dbt_unique_key_{{ loop.index }} = {{ from_identifier }}.dbt_unique_key_{{ loop.index }} + {%- if not loop.last %} and {%- endif %} + {% endfor %} + {% else %} + {{ identifier }}.dbt_unique_key = {{ from_identifier }}.dbt_unique_key + {% endif %} +{% endmacro %} + + +{% macro unique_key_is_null(unique_key, identifier) %} + {% if unique_key | is_list %} + {{ identifier }}.dbt_unique_key_1 is null + {% else %} + {{ identifier }}.dbt_unique_key is null + {% endif %} +{% endmacro %} + + +{% macro unique_key_is_not_null(unique_key, identifier) %} + {% if unique_key | is_list %} + {{ identifier }}.dbt_unique_key_1 is not null + {% else %} + {{ identifier }}.dbt_unique_key is not null + {% endif %} +{% endmacro %} diff --git a/dbt/include/global_project/macros/materializations/snapshots/snapshot.sql b/dbt/include/global_project/macros/materializations/snapshots/snapshot.sql index b0fe9222..0c9590b6 100644 --- a/dbt/include/global_project/macros/materializations/snapshots/snapshot.sql +++ b/dbt/include/global_project/macros/materializations/snapshots/snapshot.sql @@ -1,5 +1,4 @@ {% materialization snapshot, default %} - {%- set config = model['config'] -%} {%- set target_table = model.get('alias', model.get('name')) -%} @@ -24,37 +23,45 @@ {{ run_hooks(pre_hooks, inside_transaction=True) }} {% set strategy_macro = strategy_dispatch(strategy_name) %} - {% set strategy = strategy_macro(model, "snapshotted_data", "source_data", config, target_relation_exists) %} + {# The model['config'] parameter below is no longer used, but passing anyway for compatibility #} + {# It was a dictionary of config, instead of the config object from the context #} + {% set strategy = strategy_macro(model, "snapshotted_data", "source_data", model['config'], target_relation_exists) %} {% if not target_relation_exists %} {% set build_sql = build_snapshot_table(strategy, model['compiled_code']) %} + {% set build_or_select_sql = build_sql %} {% set final_sql = create_table_as(False, target_relation, build_sql) %} {% else %} - {{ adapter.valid_snapshot_target(target_relation) }} + {% set columns = config.get("snapshot_table_column_names") or get_snapshot_table_column_names() %} + {{ adapter.valid_snapshot_target(target_relation, columns) }} + + {% set build_or_select_sql = snapshot_staging_table(strategy, sql, target_relation) %} {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %} -- this may no-op if the database does not require column expansion {% do adapter.expand_target_column_types(from_relation=staging_table, to_relation=target_relation) %} + {% set remove_columns = ['dbt_change_type', 'DBT_CHANGE_TYPE', 'dbt_unique_key', 'DBT_UNIQUE_KEY'] %} + {% if unique_key | is_list %} + {% for key in strategy.unique_key %} + {{ remove_columns.append('dbt_unique_key_' + loop.index|string) }} + {{ remove_columns.append('DBT_UNIQUE_KEY_' + loop.index|string) }} + {% endfor %} + {% endif %} + {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation) - | rejectattr('name', 'equalto', 'dbt_change_type') - | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE') - | rejectattr('name', 'equalto', 'dbt_unique_key') - | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY') + | rejectattr('name', 'in', remove_columns) | list %} {% do create_columns(target_relation, missing_columns) %} {% set source_columns = adapter.get_columns_in_relation(staging_table) - | rejectattr('name', 'equalto', 'dbt_change_type') - | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE') - | rejectattr('name', 'equalto', 'dbt_unique_key') - | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY') + | rejectattr('name', 'in', remove_columns) | list %} {% set quoted_source_columns = [] %} @@ -71,6 +78,9 @@ {% endif %} + + {{ check_time_data_types(build_or_select_sql) }} + {% call statement('main') %} {{ final_sql }} {% endcall %} diff --git a/dbt/include/global_project/macros/materializations/snapshots/snapshot_merge.sql b/dbt/include/global_project/macros/materializations/snapshots/snapshot_merge.sql index 6bc50fd3..cf787e4f 100644 --- a/dbt/include/global_project/macros/materializations/snapshots/snapshot_merge.sql +++ b/dbt/include/global_project/macros/materializations/snapshots/snapshot_merge.sql @@ -7,15 +7,22 @@ {% macro default__snapshot_merge_sql(target, source, insert_cols) -%} {%- set insert_cols_csv = insert_cols | join(', ') -%} - merge into {{ target }} as DBT_INTERNAL_DEST + {%- set columns = config.get("snapshot_table_column_names") or get_snapshot_table_column_names() -%} + + merge into {{ target.render() }} as DBT_INTERNAL_DEST using {{ source }} as DBT_INTERNAL_SOURCE - on DBT_INTERNAL_SOURCE.dbt_scd_id = DBT_INTERNAL_DEST.dbt_scd_id + on DBT_INTERNAL_SOURCE.{{ columns.dbt_scd_id }} = DBT_INTERNAL_DEST.{{ columns.dbt_scd_id }} when matched - and DBT_INTERNAL_DEST.dbt_valid_to is null + {% if config.get("dbt_valid_to_current") %} + and (DBT_INTERNAL_DEST.{{ columns.dbt_valid_to }} = {{ config.get('dbt_valid_to_current') }} or + DBT_INTERNAL_DEST.{{ columns.dbt_valid_to }} is null) + {% else %} + and DBT_INTERNAL_DEST.{{ columns.dbt_valid_to }} is null + {% endif %} and DBT_INTERNAL_SOURCE.dbt_change_type in ('update', 'delete') then update - set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to + set {{ columns.dbt_valid_to }} = DBT_INTERNAL_SOURCE.{{ columns.dbt_valid_to }} when not matched and DBT_INTERNAL_SOURCE.dbt_change_type = 'insert' diff --git a/dbt/include/global_project/macros/materializations/snapshots/strategies.sql b/dbt/include/global_project/macros/materializations/snapshots/strategies.sql index d22cc336..f9f5afbd 100644 --- a/dbt/include/global_project/macros/materializations/snapshots/strategies.sql +++ b/dbt/include/global_project/macros/materializations/snapshots/strategies.sql @@ -49,10 +49,13 @@ {# Core strategy definitions #} -{% macro snapshot_timestamp_strategy(node, snapshotted_rel, current_rel, config, target_exists) %} - {% set primary_key = config['unique_key'] %} - {% set updated_at = config['updated_at'] %} - {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %} + +{% macro snapshot_timestamp_strategy(node, snapshotted_rel, current_rel, model_config, target_exists) %} + {# The model_config parameter is no longer used, but is passed in anyway for compatibility. #} + {% set primary_key = config.get('unique_key') %} + {% set updated_at = config.get('updated_at') %} + {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes') or false %} + {% set columns = config.get("snapshot_table_column_names") or get_snapshot_table_column_names() %} {#/* The snapshot relation might not have an {{ updated_at }} value if the @@ -64,10 +67,11 @@ See https://github.com/dbt-labs/dbt-core/issues/2350 */ #} {% set row_changed_expr -%} - ({{ snapshotted_rel }}.dbt_valid_from < {{ current_rel }}.{{ updated_at }}) + ({{ snapshotted_rel }}.{{ columns.dbt_valid_from }} < {{ current_rel }}.{{ updated_at }}) {%- endset %} - {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %} + {% set scd_args = api.Relation.scd_args(primary_key, updated_at) %} + {% set scd_id_expr = snapshot_hash_arguments(scd_args) %} {% do return({ "unique_key": primary_key, @@ -133,11 +137,12 @@ {%- endmacro %} -{% macro snapshot_check_strategy(node, snapshotted_rel, current_rel, config, target_exists) %} - {% set check_cols_config = config['check_cols'] %} - {% set primary_key = config['unique_key'] %} - {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %} - {% set updated_at = config.get('updated_at', snapshot_get_time()) %} +{% macro snapshot_check_strategy(node, snapshotted_rel, current_rel, model_config, target_exists) %} + {# The model_config parameter is no longer used, but is passed in anyway for compatibility. #} + {% set check_cols_config = config.get('check_cols') %} + {% set primary_key = config.get('unique_key') %} + {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes') or false %} + {% set updated_at = config.get('updated_at') or snapshot_get_time() %} {% set column_added = false %} @@ -162,7 +167,8 @@ ) {%- endset %} - {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %} + {% set scd_args = api.Relation.scd_args(primary_key, updated_at) %} + {% set scd_id_expr = snapshot_hash_arguments(scd_args) %} {% do return({ "unique_key": primary_key, diff --git a/dbt/include/global_project/macros/relations/drop.sql b/dbt/include/global_project/macros/relations/drop.sql index 58abd14d..e66511da 100644 --- a/dbt/include/global_project/macros/relations/drop.sql +++ b/dbt/include/global_project/macros/relations/drop.sql @@ -16,7 +16,7 @@ {{ drop_materialized_view(relation) }} {%- else -%} - drop {{ relation.type }} if exists {{ relation }} cascade + drop {{ relation.type }} if exists {{ relation.render() }} cascade {%- endif -%} diff --git a/dbt/include/global_project/macros/relations/materialized_view/drop.sql b/dbt/include/global_project/macros/relations/materialized_view/drop.sql index b218d0f3..8235b1c6 100644 --- a/dbt/include/global_project/macros/relations/materialized_view/drop.sql +++ b/dbt/include/global_project/macros/relations/materialized_view/drop.sql @@ -10,5 +10,5 @@ actually executes the drop, and `get_drop_sql`, which returns the template. {% macro default__drop_materialized_view(relation) -%} - drop materialized view if exists {{ relation }} cascade + drop materialized view if exists {{ relation.render() }} cascade {%- endmacro %} diff --git a/dbt/include/global_project/macros/relations/rename.sql b/dbt/include/global_project/macros/relations/rename.sql index d7f3a72e..4b913df3 100644 --- a/dbt/include/global_project/macros/relations/rename.sql +++ b/dbt/include/global_project/macros/relations/rename.sql @@ -30,6 +30,6 @@ {% macro default__rename_relation(from_relation, to_relation) -%} {% set target_name = adapter.quote_as_configured(to_relation.identifier, 'identifier') %} {% call statement('rename_relation') -%} - alter table {{ from_relation }} rename to {{ target_name }} + alter table {{ from_relation.render() }} rename to {{ target_name }} {%- endcall %} {% endmacro %} diff --git a/dbt/include/global_project/macros/relations/table/drop.sql b/dbt/include/global_project/macros/relations/table/drop.sql index d7d5941c..038ded9e 100644 --- a/dbt/include/global_project/macros/relations/table/drop.sql +++ b/dbt/include/global_project/macros/relations/table/drop.sql @@ -10,5 +10,5 @@ actually executes the drop, and `get_drop_sql`, which returns the template. {% macro default__drop_table(relation) -%} - drop table if exists {{ relation }} cascade + drop table if exists {{ relation.render() }} cascade {%- endmacro %} diff --git a/dbt/include/global_project/macros/relations/view/create.sql b/dbt/include/global_project/macros/relations/view/create.sql index 41cd196c..ee83befa 100644 --- a/dbt/include/global_project/macros/relations/view/create.sql +++ b/dbt/include/global_project/macros/relations/view/create.sql @@ -16,7 +16,7 @@ {%- set sql_header = config.get('sql_header', none) -%} {{ sql_header if sql_header is not none }} - create view {{ relation }} + create view {{ relation.render() }} {% set contract_config = config.get('contract') %} {% if contract_config.enforced %} {{ get_assert_columns_equivalent(sql) }} diff --git a/dbt/include/global_project/macros/relations/view/drop.sql b/dbt/include/global_project/macros/relations/view/drop.sql index 7e1924fa..84c91a36 100644 --- a/dbt/include/global_project/macros/relations/view/drop.sql +++ b/dbt/include/global_project/macros/relations/view/drop.sql @@ -10,5 +10,5 @@ actually executes the drop, and `get_drop_sql`, which returns the template. {% macro default__drop_view(relation) -%} - drop view if exists {{ relation }} cascade + drop view if exists {{ relation.render() }} cascade {%- endmacro %} diff --git a/dbt/include/global_project/macros/relations/view/replace.sql b/dbt/include/global_project/macros/relations/view/replace.sql index 1da06134..a0f0dc76 100644 --- a/dbt/include/global_project/macros/relations/view/replace.sql +++ b/dbt/include/global_project/macros/relations/view/replace.sql @@ -61,6 +61,6 @@ {% endmacro %} {% macro default__handle_existing_table(full_refresh, old_relation) %} - {{ log("Dropping relation " ~ old_relation ~ " because it is of type " ~ old_relation.type) }} + {{ log("Dropping relation " ~ old_relation.render() ~ " because it is of type " ~ old_relation.type) }} {{ adapter.drop_relation(old_relation) }} {% endmacro %} diff --git a/dbt/include/global_project/macros/unit_test_sql/get_fixture_sql.sql b/dbt/include/global_project/macros/unit_test_sql/get_fixture_sql.sql index c25a87f7..a3a8173b 100644 --- a/dbt/include/global_project/macros/unit_test_sql/get_fixture_sql.sql +++ b/dbt/include/global_project/macros/unit_test_sql/get_fixture_sql.sql @@ -22,6 +22,7 @@ {%- do default_row.update({column_name: (safe_cast("null", column_type) | trim )}) -%} {%- endfor -%} +{{ validate_fixture_rows(rows, row_number) }} {%- for row in rows -%} {%- set formatted_row = format_row(row, column_name_to_data_types) -%} @@ -93,3 +94,11 @@ union all {%- endfor -%} {{ return(formatted_row) }} {%- endmacro -%} + +{%- macro validate_fixture_rows(rows, row_number) -%} + {{ return(adapter.dispatch('validate_fixture_rows', 'dbt')(rows, row_number)) }} +{%- endmacro -%} + +{%- macro default__validate_fixture_rows(rows, row_number) -%} + {# This is an abstract method for adapter overrides as needed #} +{%- endmacro -%} diff --git a/docs/guides/record_replay.md b/docs/guides/record_replay.md index 670bb843..5bcbec06 100644 --- a/docs/guides/record_replay.md +++ b/docs/guides/record_replay.md @@ -4,24 +4,12 @@ This document describes how to implement support for dbt's Record/Replay Subsyst ## Recording and Replaying Warehouse Interaction -The goal of the Record/Replay Subsystem is to record all interactions between dbt and external systems, of which the data warehouse is the most obvious. Since, warehouse interaction is mediated by adapters, full Record/Replay support requires that adapters record all interactions they have with the warehouse. (It also requires that they record access to the local filesystem or external service, if that is access is not mediated by dbt itself. This includes authentication steps, opening and closing connections, beginning and ending transactions, and so forth.) +The goal of the Record/Replay Subsystem is to record all interactions between dbt and external systems, of which the data warehouse is the most important. Since, warehouse interaction is mediated by adapters, full Record/Replay support requires that adapters record all interactions they have with the warehouse. It also requires that they record access to the local filesystem or external service, if that is access is not mediated by dbt itself. This includes authentication steps, opening and closing connections, beginning and ending transactions, etc. -In practice, this means that any request sent to the warehouse must be recorded, along with the corresponding response. If this is done correctly, as described in the document linked in the intro, the Record portion of the Record/Replay subsystem should work as expected. - -At the time of this writing, there is only an incomplete implementation of this goal, which can be found in `dbt-adapters/dbt/adapters/record.py`. - -There are some important things to notice about this implementation. First, the QueryRecordResult class provides custom serialization methods `to_dict()` and `from_dict()`. This is necessary because the `AdapterResponse` and `Agate.Table` types cannot be automatically converted to and from JSON by the dataclass library, and JSON is the format used to persist recordings to disk and reload them for replay. - -Another important feature is that `QueryRecordParams` implements the `_matches()` method. This method allows `dbt-adapters` to customize the way that the Record/Replay determines whether a query issued by dbt matches a previously recorded query. In this case, the method performs a comparison which attempts to ignore comments and whitespace which would not affect query behavior. +A basic implementation of Record/Replay functionality, suitable for most adapters which extend the `SQLAdapter` class, can be found in `dbt-adapters/dbt/adapters/record`. The `RecordReplayHandle` and `RecordReplayCursor` classes defined there are used to intercept and record or replay all DWH interactions. They are an excellent starting point for adapters which extend `SQLAdapter` and use a database library which substantially conforms to Python's DB API v2.0 (PEP 249). Examples of how library-specific deviations from that API can be found in the dbt-postgress and dbt-snowflake repositories. ## Misc. Notes and Suggestions -Currently, support for recording data warehouse interaction is very rudimentary, however, even rudimentary support is valuable and we should be concentrating on extending it in a way that adds the most value with the least work. Usefulness, rather than perfection, is the initial goal. - -Picking the right functions to record, at the right level of abstraction, will probably be the most important part of carrying this work forward. - Not every interaction with an external system has to be recorded in full detail, and authentication might prove to be a place where we exclude sensitive secrets from the recording. For example, since replay will not actually be communicating with the warehouse, it may be possible to exclude passwords and auth keys from the parameters recorded, and to exclude auth tokens from the results. In addition to adding an appropriate decorator to functions which communicate with external systems, you should check those functions for side-effects. Since the function's calls will be mocked out in replay mode, those side-effects will not be carried out during replay. At present, we are focusing on support for recording and comparing recordings, but this is worth keeping in mind. - -The current implementation records which dbt node issues a query, and uses that information to ensure a match during replay. The same node should issue the same query. A better model might be to monitor which connection issued which query, and associate the same connection with open/close operations, transaction starts/stops and so forth. diff --git a/pyproject.toml b/pyproject.toml index e50aa63a..3c62898d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ name = "dbt-adapters" description = "The set of adapter protocols and base functionality that supports integration with dbt-core" readme = "README.md" keywords = ["dbt", "adapter", "adapters", "database", "elt", "dbt-core", "dbt Core", "dbt Cloud", "dbt Labs"] -requires-python = ">=3.8.0" +requires-python = ">=3.9.0" authors = [ { name = "dbt Labs", email = "info@dbtlabs.com" }, ] @@ -12,24 +12,23 @@ maintainers = [ { name = "dbt Labs", email = "info@dbtlabs.com" }, ] classifiers = [ - "Development Status :: 2 - Pre-Alpha", + "Development Status :: 5 - Production/Stable", "License :: OSI Approved :: Apache Software License", "Operating System :: MacOS :: MacOS X", "Operating System :: Microsoft :: Windows", "Operating System :: POSIX :: Linux", - "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", ] dependencies = [ - "dbt-common>=1.3,<2.0", + "dbt-common>=1.13,<2.0", "pytz>=2015.7", # installed via dbt-common but used directly "agate>=1.0,<2.0", "mashumaro[msgpack]>=3.0,<4.0", - "protobuf>=3.0,<5.0", + "protobuf>=5.0,<6.0", "typing-extensions>=4.0,<5.0", ] [project.urls] @@ -43,9 +42,6 @@ Changelog = "https://github.com/dbt-labs/dbt-adapters/blob/main/CHANGELOG.md" requires = ["hatchling"] build-backend = "hatchling.build" -[tool.hatch.metadata] -allow-direct-references = true - [tool.hatch.version] path = "dbt/adapters/__about__.py" @@ -60,20 +56,14 @@ dependencies = [ "dbt_common @ git+https://github.com/dbt-labs/dbt-common.git", 'pre-commit==3.7.0;python_version>="3.9"', 'pre-commit==3.5.0;python_version=="3.8"', -] -[tool.hatch.envs.default.scripts] -dev = "pre-commit install" -code-quality = "pre-commit run --all-files" - -[tool.hatch.envs.unit-tests] -dependencies = [ - "dbt_common @ git+https://github.com/dbt-labs/dbt-common.git", "pytest", "pytest-dotenv", "pytest-xdist", ] -[tool.hatch.envs.unit-tests.scripts] -all = "python -m pytest {args:tests/unit}" +[tool.hatch.envs.default.scripts] +setup = "pre-commit install" +code-quality = "pre-commit run --all-files" +unit-tests = "python -m pytest {args:tests/unit}" [tool.hatch.envs.build] detached = true diff --git a/tests/unit/behavior_flag_tests/__init__.py b/tests/unit/behavior_flag_tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/unit/behavior_flag_tests/test_behavior_flags.py b/tests/unit/behavior_flag_tests/test_behavior_flags.py new file mode 100644 index 00000000..378d07bb --- /dev/null +++ b/tests/unit/behavior_flag_tests/test_behavior_flags.py @@ -0,0 +1,76 @@ +from typing import Any, Dict, List + +from dbt.adapters.base.impl import DEFAULT_BASE_BEHAVIOR_FLAGS +from dbt_common.behavior_flags import BehaviorFlag +from dbt_common.exceptions import DbtBaseException +import pytest + + +@pytest.fixture +def flags() -> Dict[str, Any]: + return { + "unregistered_flag": True, + "default_false_user_false_flag": False, + "default_false_user_true_flag": True, + "default_true_user_false_flag": False, + "default_true_user_true_flag": True, + } + + +@pytest.fixture +def behavior_flags() -> List[BehaviorFlag]: + return [ + { + "name": "default_false_user_false_flag", + "default": False, + "docs_url": "https://docs.com", + }, + { + "name": "default_false_user_true_flag", + "default": False, + "description": "This is a false flag.", + }, + { + "name": "default_false_user_skip_flag", + "default": False, + "description": "This is a true flag.", + }, + { + "name": "default_true_user_false_flag", + "default": True, + "description": "This is fake news.", + }, + { + "name": "default_true_user_true_flag", + "default": True, + "docs_url": "https://moar.docs.com", + }, + { + "name": "default_true_user_skip_flag", + "default": True, + "description": "This is a true flag.", + }, + ] + + +def test_register_behavior_flags(adapter): + # make sure that users cannot add arbitrary flags to this collection + with pytest.raises(DbtBaseException): + assert adapter.behavior.unregistered_flag + + # check the values of the valid behavior flags + assert not adapter.behavior.default_false_user_false_flag + assert adapter.behavior.default_false_user_true_flag + assert not adapter.behavior.default_false_user_skip_flag + assert not adapter.behavior.default_true_user_false_flag + assert adapter.behavior.default_true_user_true_flag + assert adapter.behavior.default_true_user_skip_flag + + +def test_behaviour_flags_property_empty(adapter_default_behaviour_flags): + assert adapter_default_behaviour_flags._behavior_flags == [] + + +def test_behavior_property_has_defaults(adapter_default_behaviour_flags): + for flag in DEFAULT_BASE_BEHAVIOR_FLAGS: + assert hasattr(adapter_default_behaviour_flags.behavior, flag["name"]) diff --git a/tests/unit/behavior_flag_tests/test_empty_project.py b/tests/unit/behavior_flag_tests/test_empty_project.py new file mode 100644 index 00000000..f9fd7a76 --- /dev/null +++ b/tests/unit/behavior_flag_tests/test_empty_project.py @@ -0,0 +1,87 @@ +from types import SimpleNamespace +from typing import Any, Dict, List + +from dbt_common.behavior_flags import BehaviorFlag +from dbt_common.exceptions import DbtBaseException +import pytest + +from dbt.adapters.contracts.connection import AdapterRequiredConfig, QueryComment + +from tests.unit.fixtures.credentials import CredentialsStub + + +@pytest.fixture +def flags() -> Dict[str, Any]: + return { + "unregistered_flag": True, + "default_false_user_false_flag": False, + "default_false_user_true_flag": True, + "default_true_user_false_flag": False, + "default_true_user_true_flag": True, + } + + +@pytest.fixture +def config(flags) -> AdapterRequiredConfig: + raw_config = { + "credentials": CredentialsStub("test_database", "test_schema"), + "profile_name": "test_profile", + "target_name": "test_target", + "threads": 4, + "project_name": "test_project", + "query_comment": QueryComment(), + "cli_vars": {}, + "target_path": "path/to/nowhere", + "log_cache_events": False, + } + return SimpleNamespace(**raw_config) + + +@pytest.fixture +def behavior_flags() -> List[BehaviorFlag]: + return [ + { + "name": "default_false_user_false_flag", + "default": False, + "docs_url": "https://docs.com", + }, + { + "name": "default_false_user_true_flag", + "default": False, + "description": "This is a false flag.", + }, + { + "name": "default_false_user_skip_flag", + "default": False, + "description": "This is a true flag.", + }, + { + "name": "default_true_user_false_flag", + "default": True, + "description": "This is fake news.", + }, + { + "name": "default_true_user_true_flag", + "default": True, + "docs_url": "https://moar.docs.com", + }, + { + "name": "default_true_user_skip_flag", + "default": True, + "description": "This is a true flag.", + }, + ] + + +def test_register_behavior_flags(adapter): + # make sure that users cannot add arbitrary flags to this collection + with pytest.raises(DbtBaseException): + assert adapter.behavior.unregistered_flag + + # check the values of the valid behavior flags + assert not adapter.behavior.default_false_user_false_flag + assert not adapter.behavior.default_false_user_true_flag + assert not adapter.behavior.default_false_user_skip_flag + assert adapter.behavior.default_true_user_false_flag + assert adapter.behavior.default_true_user_true_flag + assert adapter.behavior.default_true_user_skip_flag diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py new file mode 100644 index 00000000..225bdf57 --- /dev/null +++ b/tests/unit/conftest.py @@ -0,0 +1,7 @@ +from tests.unit.fixtures import ( + adapter, + adapter_default_behaviour_flags, + behavior_flags, + config, + flags, +) diff --git a/tests/unit/fixtures/__init__.py b/tests/unit/fixtures/__init__.py new file mode 100644 index 00000000..caa1448f --- /dev/null +++ b/tests/unit/fixtures/__init__.py @@ -0,0 +1,7 @@ +from tests.unit.fixtures.adapter import ( + adapter, + adapter_default_behaviour_flags, + behavior_flags, + config, + flags, +) diff --git a/tests/unit/fixtures/adapter.py b/tests/unit/fixtures/adapter.py new file mode 100644 index 00000000..3730a083 --- /dev/null +++ b/tests/unit/fixtures/adapter.py @@ -0,0 +1,151 @@ +from multiprocessing import get_context +from types import SimpleNamespace +from typing import Any, Dict, List + +import agate +from dbt_common.behavior_flags import BehaviorFlag +import pytest + +from dbt.adapters.base.column import Column +from dbt.adapters.base.impl import BaseAdapter +from dbt.adapters.base.relation import BaseRelation +from dbt.adapters.contracts.connection import AdapterRequiredConfig, QueryComment + +from tests.unit.fixtures.connection_manager import ConnectionManagerStub +from tests.unit.fixtures.credentials import CredentialsStub + + +class BaseAdapterStub(BaseAdapter): + """ + A stub for an adapter that uses the cache as the database + """ + + ConnectionManager = ConnectionManagerStub + + ### + # Abstract methods for database-specific values, attributes, and types + ### + @classmethod + def date_function(cls) -> str: + return "date_function" + + @classmethod + def is_cancelable(cls) -> bool: + return False + + def list_schemas(self, database: str) -> List[str]: + return list(self.cache.schemas) + + ### + # Abstract methods about relations + ### + def drop_relation(self, relation: BaseRelation) -> None: + self.cache_dropped(relation) + + def truncate_relation(self, relation: BaseRelation) -> None: + self.cache_dropped(relation) + + def rename_relation(self, from_relation: BaseRelation, to_relation: BaseRelation) -> None: + self.cache_renamed(from_relation, to_relation) + + def get_columns_in_relation(self, relation: BaseRelation) -> List[Column]: + # there's no database, so these need to be added as kwargs in the existing_relations fixture + return relation.columns + + def expand_column_types(self, goal: BaseRelation, current: BaseRelation) -> None: + # there's no database, so these need to be added as kwargs in the existing_relations fixture + object.__setattr__(current, "columns", goal.columns) + + def list_relations_without_caching(self, schema_relation: BaseRelation) -> List[BaseRelation]: + # there's no database, so use the cache as the database + return self.cache.get_relations(schema_relation.database, schema_relation.schema) + + ### + # ODBC FUNCTIONS -- these should not need to change for every adapter, + # although some adapters may override them + ### + def create_schema(self, relation: BaseRelation): + # there's no database, this happens implicitly by adding a relation to the cache + pass + + def drop_schema(self, relation: BaseRelation): + for each_relation in self.cache.get_relations(relation.database, relation.schema): + self.cache_dropped(each_relation) + + @classmethod + def quote(cls, identifier: str) -> str: + quote_char = "" + return f"{quote_char}{identifier}{quote_char}" + + ### + # Conversions: These must be implemented by concrete implementations, for + # converting agate types into their sql equivalents. + ### + @classmethod + def convert_text_type(cls, agate_table: agate.Table, col_idx: int) -> str: + return "str" + + @classmethod + def convert_number_type(cls, agate_table: agate.Table, col_idx: int) -> str: + return "float" + + @classmethod + def convert_boolean_type(cls, agate_table: agate.Table, col_idx: int) -> str: + return "bool" + + @classmethod + def convert_datetime_type(cls, agate_table: agate.Table, col_idx: int) -> str: + return "datetime" + + @classmethod + def convert_date_type(cls, *args, **kwargs): + return "date" + + @classmethod + def convert_time_type(cls, *args, **kwargs): + return "time" + + +@pytest.fixture +def adapter(config, behavior_flags) -> BaseAdapter: + + class BaseAdapterBehaviourFlagStub(BaseAdapterStub): + @property + def _behavior_flags(self) -> List[BehaviorFlag]: + return behavior_flags + + return BaseAdapterBehaviourFlagStub(config, get_context("spawn")) + + +@pytest.fixture +def adapter_default_behaviour_flags(config) -> BaseAdapter: + return BaseAdapterStub(config, get_context("spawn")) + + +@pytest.fixture +def config(flags) -> AdapterRequiredConfig: + raw_config = { + "credentials": CredentialsStub("test_database", "test_schema"), + "profile_name": "test_profile", + "target_name": "test_target", + "threads": 4, + "project_name": "test_project", + "query_comment": QueryComment(), + "cli_vars": {}, + "target_path": "path/to/nowhere", + "log_cache_events": False, + "flags": flags, + } + return SimpleNamespace(**raw_config) + + +@pytest.fixture +def flags() -> Dict[str, Any]: + # this is the flags collection in dbt_project.yaml + return {} + + +@pytest.fixture +def behavior_flags() -> List[BehaviorFlag]: + # this is the collection of behavior flags for a specific adapter + return [] diff --git a/tests/unit/fixtures/connection_manager.py b/tests/unit/fixtures/connection_manager.py new file mode 100644 index 00000000..8b353fbe --- /dev/null +++ b/tests/unit/fixtures/connection_manager.py @@ -0,0 +1,58 @@ +from contextlib import contextmanager +from typing import ContextManager, List, Optional, Tuple + +import agate + +from dbt.adapters.base.connections import BaseConnectionManager +from dbt.adapters.contracts.connection import AdapterResponse, Connection, ConnectionState + + +class ConnectionManagerStub(BaseConnectionManager): + """ + A stub for a connection manager that does not connect to a database + """ + + raised_exceptions: List[Exception] + + @contextmanager + def exception_handler(self, sql: str) -> ContextManager: # type: ignore + # catch all exceptions and put them on this class for inspection in tests + try: + yield + except Exception as exc: + self.raised_exceptions.append(exc) + finally: + pass + + def cancel_open(self) -> Optional[List[str]]: + names = [] + for connection in self.thread_connections.values(): + if connection.state == ConnectionState.OPEN: + connection.state = ConnectionState.CLOSED + if name := connection.name: + names.append(name) + return names + + @classmethod + def open(cls, connection: Connection) -> Connection: + # there's no database, so just change the state + connection.state = ConnectionState.OPEN + return connection + + def begin(self) -> None: + # there's no database, so there are no transactions + pass + + def commit(self) -> None: + # there's no database, so there are no transactions + pass + + def execute( + self, + sql: str, + auto_begin: bool = False, + fetch: bool = False, + limit: Optional[int] = None, + ) -> Tuple[AdapterResponse, agate.Table]: + # there's no database, so just return the sql + return AdapterResponse(_message="", code=sql), agate.Table([]) diff --git a/tests/unit/fixtures/credentials.py b/tests/unit/fixtures/credentials.py new file mode 100644 index 00000000..88817f6b --- /dev/null +++ b/tests/unit/fixtures/credentials.py @@ -0,0 +1,13 @@ +from dbt.adapters.contracts.connection import Credentials + + +class CredentialsStub(Credentials): + """ + A stub for a database credentials that does not connect to a database + """ + + def type(self) -> str: + return "test" + + def _connection_keys(self): + return {"database": self.database, "schema": self.schema} diff --git a/tests/unit/test_adapter_telemetry.py b/tests/unit/test_adapter_telemetry.py new file mode 100644 index 00000000..1d5c4911 --- /dev/null +++ b/tests/unit/test_adapter_telemetry.py @@ -0,0 +1,15 @@ +import dbt.adapters.__about__ + +from dbt.adapters.base.impl import BaseAdapter +from dbt.adapters.base.relation import AdapterTrackingRelationInfo + + +def test_telemetry_returns(): + res = BaseAdapter.get_adapter_run_info({}) + + assert res.adapter_name == "base" + assert res.base_adapter_version == dbt.adapters.__about__.version + assert res.adapter_version == "" + assert res.model_adapter_details == {} + + assert type(res) is AdapterTrackingRelationInfo diff --git a/tests/unit/test_base_adapter.py b/tests/unit/test_base_adapter.py index 95fe5ae2..5fa109b7 100644 --- a/tests/unit/test_base_adapter.py +++ b/tests/unit/test_base_adapter.py @@ -39,6 +39,14 @@ def connection_manager(self): [{"type": "foreign_key", "expression": "other_table (c1)"}], ["column_name integer references other_table (c1)"], ), + ( + [{"type": "foreign_key", "to": "other_table", "to_columns": ["c1"]}], + ["column_name integer references other_table (c1)"], + ), + ( + [{"type": "foreign_key", "to": "other_table", "to_columns": ["c1", "c2"]}], + ["column_name integer references other_table (c1, c2)"], + ), ([{"type": "check"}, {"type": "unique"}], ["column_name integer unique"]), ([{"type": "custom", "expression": "-- noop"}], ["column_name integer -- noop"]), ] @@ -176,6 +184,30 @@ def test_render_raw_columns_constraints_unsupported( ], ["constraint test_name foreign key (c1, c2) references other_table (c1)"], ), + ( + [ + { + "type": "foreign_key", + "columns": ["c1", "c2"], + "to": "other_table", + "to_columns": ["c1"], + "name": "test_name", + } + ], + ["constraint test_name foreign key (c1, c2) references other_table (c1)"], + ), + ( + [ + { + "type": "foreign_key", + "columns": ["c1", "c2"], + "to": "other_table", + "to_columns": ["c1", "c2"], + "name": "test_name", + } + ], + ["constraint test_name foreign key (c1, c2) references other_table (c1, c2)"], + ), ] @pytest.mark.parametrize("constraints,expected_rendered_constraints", model_constraints) diff --git a/tests/unit/test_relation.py b/tests/unit/test_relation.py index a1c01c5c..6d835e0d 100644 --- a/tests/unit/test_relation.py +++ b/tests/unit/test_relation.py @@ -1,8 +1,9 @@ -from dataclasses import replace - +from dataclasses import dataclass, replace +from datetime import datetime import pytest from dbt.adapters.base import BaseRelation +from dbt.adapters.base.relation import EventTimeFilter from dbt.adapters.contracts.relation import RelationType @@ -79,3 +80,90 @@ def test_render_limited(limit, require_alias, expected_result): actual_result = my_relation.render_limited() assert actual_result == expected_result assert str(my_relation) == expected_result + + +@pytest.mark.parametrize( + "event_time_filter,require_alias,expected_result", + [ + (None, False, '"test_database"."test_schema"."test_identifier"'), + ( + EventTimeFilter(field_name="column"), + False, + '"test_database"."test_schema"."test_identifier"', + ), + (None, True, '"test_database"."test_schema"."test_identifier"'), + ( + EventTimeFilter(field_name="column"), + True, + '"test_database"."test_schema"."test_identifier"', + ), + ( + EventTimeFilter(field_name="column", start=datetime(year=2020, month=1, day=1)), + False, + """(select * from "test_database"."test_schema"."test_identifier" where column >= '2020-01-01 00:00:00')""", + ), + ( + EventTimeFilter(field_name="column", start=datetime(year=2020, month=1, day=1)), + True, + """(select * from "test_database"."test_schema"."test_identifier" where column >= '2020-01-01 00:00:00') _dbt_et_filter_subq_test_identifier""", + ), + ( + EventTimeFilter(field_name="column", end=datetime(year=2020, month=1, day=1)), + False, + """(select * from "test_database"."test_schema"."test_identifier" where column < '2020-01-01 00:00:00')""", + ), + ( + EventTimeFilter( + field_name="column", + start=datetime(year=2020, month=1, day=1), + end=datetime(year=2020, month=1, day=2), + ), + False, + """(select * from "test_database"."test_schema"."test_identifier" where column >= '2020-01-01 00:00:00' and column < '2020-01-02 00:00:00')""", + ), + ], +) +def test_render_event_time_filtered(event_time_filter, require_alias, expected_result): + my_relation = BaseRelation.create( + database="test_database", + schema="test_schema", + identifier="test_identifier", + event_time_filter=event_time_filter, + require_alias=require_alias, + ) + actual_result = my_relation.render_event_time_filtered() + assert actual_result == expected_result + assert str(my_relation) == expected_result + + +def test_render_event_time_filtered_and_limited(): + my_relation = BaseRelation.create( + database="test_database", + schema="test_schema", + identifier="test_identifier", + event_time_filter=EventTimeFilter( + field_name="column", + start=datetime(year=2020, month=1, day=1), + end=datetime(year=2020, month=1, day=2), + ), + limit=0, + require_alias=False, + ) + expected_result = """(select * from (select * from "test_database"."test_schema"."test_identifier" where false limit 0) where column >= '2020-01-01 00:00:00' and column < '2020-01-02 00:00:00')""" + + actual_result = my_relation.render_event_time_filtered(my_relation.render_limited()) + assert actual_result == expected_result + assert str(my_relation) == expected_result + + +def test_create_ephemeral_from_uses_identifier(): + @dataclass + class Node: + """Dummy implementation of RelationConfig protocol""" + + name: str + identifier: str + + node = Node(name="name_should_not_be_used", identifier="test") + ephemeral_relation = BaseRelation.create_ephemeral_from(node) + assert str(ephemeral_relation) == "__dbt__cte__test"