diff --git a/.changes/1.10.3.md b/.changes/1.10.3.md new file mode 100644 index 00000000..29844ce2 --- /dev/null +++ b/.changes/1.10.3.md @@ -0,0 +1 @@ +## dbt-adapters 1.10.3 - October 29, 2024 diff --git a/.changes/1.10.4.md b/.changes/1.10.4.md new file mode 100644 index 00000000..f8bbd420 --- /dev/null +++ b/.changes/1.10.4.md @@ -0,0 +1 @@ +## dbt-adapters 1.10.4 - November 11, 2024 diff --git a/.changes/1.11.0.md b/.changes/1.11.0.md new file mode 100644 index 00000000..fbe85222 --- /dev/null +++ b/.changes/1.11.0.md @@ -0,0 +1,12 @@ +## dbt-adapters 1.11.0 - November 11, 2024 + +### Features + +- Use a behavior flag to gate microbatch functionality (instead of an environment variable) ([#327](https://github.com/dbt-labs/dbt-adapters/issues/327)) + +### Under the Hood + +- Add `query_id` to SQLQueryStatus ([#342](https://github.com/dbt-labs/dbt-adapters/issues/342)) + +### Contributors +- [@cmcarthur](https://github.com/cmcarthur) ([#342](https://github.com/dbt-labs/dbt-adapters/issues/342)) diff --git a/.changes/1.7.1.md b/.changes/1.7.1.md new file mode 100644 index 00000000..4acda7f8 --- /dev/null +++ b/.changes/1.7.1.md @@ -0,0 +1,5 @@ +## dbt-adapters 1.7.1 - October 15, 2024 + +### Features + +- Enable setting current value of dbt_valid_to ([#320](https://github.com/dbt-labs/dbt-adapters/issues/320)) diff --git a/.changes/1.7.2.md b/.changes/1.7.2.md new file mode 100644 index 00000000..b17bf217 --- /dev/null +++ b/.changes/1.7.2.md @@ -0,0 +1,16 @@ +## dbt-adapters 1.7.2 - October 21, 2024 + +### Breaking Changes + +- Drop support for Python 3.8 ([#332](https://github.com/dbt-labs/dbt-adapters/issues/332)) + +### Features + +- Allows unique_key for snapshots to take a list ([#181](https://github.com/dbt-labs/dbt-adapters/issues/181)) + +### Fixes + +- Always validate an incremental model's `incremental_strategy` ([#330](https://github.com/dbt-labs/dbt-adapters/issues/330)) + +### Contributors +- [@agpapa](https://github.com/agpapa) ([#181](https://github.com/dbt-labs/dbt-adapters/issues/181)) diff --git a/.changes/1.8.0.md b/.changes/1.8.0.md new file mode 100644 index 00000000..f73a0300 --- /dev/null +++ b/.changes/1.8.0.md @@ -0,0 +1,9 @@ +## dbt-adapters 1.8.0 - October 29, 2024 + +### Fixes + +- Always make behavior flags available for evaluation ([#338](https://github.com/dbt-labs/dbt-adapters/issues/338)) + +### Under the Hood + +- Add adapter telemetry. ([#301](https://github.com/dbt-labs/dbt-adapters/issues/301)) diff --git a/.changes/1.9.0.md b/.changes/1.9.0.md new file mode 100644 index 00000000..cde85d7c --- /dev/null +++ b/.changes/1.9.0.md @@ -0,0 +1,6 @@ +## dbt-adapters 1.9.0 - November 13, 2024 + +### Fixes + +- Negate the check for microbatch behavior flag in determining builtins ([#349](https://github.com/dbt-labs/dbt-adapters/issues/349)) +- Move require_batched_execution_for_custom_microbatch_strategy flag to global ([#351](https://github.com/dbt-labs/dbt-adapters/issues/351)) diff --git a/.changes/unreleased/Features-20241104-120653.yaml b/.changes/unreleased/Features-20241104-120653.yaml new file mode 100644 index 00000000..a85e1f7f --- /dev/null +++ b/.changes/unreleased/Features-20241104-120653.yaml @@ -0,0 +1,6 @@ +kind: Features +body: Add new hard_deletes="new_record" mode for snapshots. +time: 2024-11-04T12:06:53.225939-05:00 +custom: + Author: peterallenwebb + Issue: "317" diff --git a/.changes/unreleased/Features-20241120-112806.yaml b/.changes/unreleased/Features-20241120-112806.yaml new file mode 100644 index 00000000..a135f946 --- /dev/null +++ b/.changes/unreleased/Features-20241120-112806.yaml @@ -0,0 +1,6 @@ +kind: Features +body: Introduce new Capability for MicrobatchConcurrency support +time: 2024-11-20T11:28:06.258507-05:00 +custom: + Author: michelleark + Issue: "359" diff --git a/.github/actions/publish-pypi/action.yml b/.github/actions/publish-pypi/action.yml index 25bc3a8d..22e6773b 100644 --- a/.github/actions/publish-pypi/action.yml +++ b/.github/actions/publish-pypi/action.yml @@ -24,6 +24,6 @@ runs: shell: bash - name: Publish artifacts to PyPI - uses: pypa/gh-action-pypi-publish@release/v1 + uses: pypa/gh-action-pypi-publish@release/v1.11 with: repository-url: ${{ inputs.repository-url }} diff --git a/.github/actions/publish-results/action.yml b/.github/actions/publish-results/action.yml index 0d5cb7e6..7c73a94f 100644 --- a/.github/actions/publish-results/action.yml +++ b/.github/actions/publish-results/action.yml @@ -5,7 +5,7 @@ inputs: description: File type for file name stub (e.g. "unit-tests") required: true python-version: - description: Python version for the file name stub (e.g. "3.8") + description: Python version for the file name stub (e.g. "3.9") required: true source-file: description: File to be uploaded diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml index b61c83d7..b4ac615d 100644 --- a/.github/workflows/unit-tests.yml +++ b/.github/workflows/unit-tests.yml @@ -23,7 +23,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] + python-version: ["3.9", "3.10", "3.11", "3.12"] steps: - name: Check out repository diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 0f2a03f7..b7835274 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -18,19 +18,19 @@ repos: - id: dbt-core-in-adapters-check - repo: https://github.com/psf/black - rev: 24.4.0 + rev: 24.8.0 hooks: - id: black args: - --line-length=99 - - --target-version=py38 - --target-version=py39 - --target-version=py310 - --target-version=py311 + - --target-version=py312 - --force-exclude=dbt/adapters/events/adapter_types_pb2.py - repo: https://github.com/pycqa/flake8 - rev: 7.0.0 + rev: 7.1.1 hooks: - id: flake8 exclude: dbt/adapters/events/adapter_types_pb2.py|tests/functional/ @@ -41,7 +41,7 @@ repos: - --per-file-ignores=*/__init__.py:F401,*/conftest.py:F401 - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.9.0 + rev: v1.11.2 hooks: - id: mypy exclude: dbt/adapters/events/adapter_types_pb2.py|dbt-tests-adapter/dbt/__init__.py diff --git a/CHANGELOG.md b/CHANGELOG.md index dfb37ad5..87cca898 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,18 +5,75 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html), and is generated by [Changie](https://github.com/miniscruff/changie). -## dbt-adapters 1.10.2 - October 01, 2024 +## dbt-adapters 1.11.0 - November 11, 2024 + +### Features + +- Use a behavior flag to gate microbatch functionality (instead of an environment variable) ([#327](https://github.com/dbt-labs/dbt-adapters/issues/327)) ### Under the Hood -- dbt-tests-adapters: Add required begin to microbatch model config to BaseMicrobatch test ([#315](https://github.com/dbt-labs/dbt-adapters/issues/315)) +- Add `query_id` to SQLQueryStatus ([#342](https://github.com/dbt-labs/dbt-adapters/issues/342)) +### Contributors +- [@cmcarthur](https://github.com/cmcarthur) ([#342](https://github.com/dbt-labs/dbt-adapters/issues/342)) +## dbt-adapters 1.10.4 - November 11, 2024 + +## dbt-adapters 1.10.3 - October 29, 2024 + +## dbt-adapters 1.10.2 - October 01, 2024 + +### Under the Hood + +- dbt-tests-adapters: Add required begin to microbatch model config to BaseMicrobatch test ([#315](https://github.com/dbt-labs/dbt-adapters/issues/315)) ## dbt-adapters 1.10.1 - September 16, 2024 ## dbt-adapters 1.10.0 - September 12, 2024 +## dbt-adapters 1.9.0 - November 13, 2024 + +### Fixes + +- Negate the check for microbatch behavior flag in determining builtins ([#349](https://github.com/dbt-labs/dbt-adapters/issues/349)) +- Move require_batched_execution_for_custom_microbatch_strategy flag to global ([#351](https://github.com/dbt-labs/dbt-adapters/issues/351)) + + + +## dbt-adapters 1.8.0 - October 29, 2024 + +### Fixes + +- Always make behavior flags available for evaluation ([#338](https://github.com/dbt-labs/dbt-adapters/issues/338)) + +### Under the Hood + +- Add adapter telemetry. ([#301](https://github.com/dbt-labs/dbt-adapters/issues/301)) + +## dbt-adapters 1.7.2 - October 21, 2024 + +### Breaking Changes + +- Drop support for Python 3.8 ([#332](https://github.com/dbt-labs/dbt-adapters/issues/332)) + +### Features + +- Allows unique_key for snapshots to take a list ([#181](https://github.com/dbt-labs/dbt-adapters/issues/181)) + +### Fixes + +- Always validate an incremental model's `incremental_strategy` ([#330](https://github.com/dbt-labs/dbt-adapters/issues/330)) + +### Contributors +- [@agpapa](https://github.com/agpapa) ([#181](https://github.com/dbt-labs/dbt-adapters/issues/181)) + +## dbt-adapters 1.7.1 - October 15, 2024 + +### Features + +- Enable setting current value of dbt_valid_to ([#320](https://github.com/dbt-labs/dbt-adapters/issues/320)) + ## dbt-adapters 1.7.0 - September 19, 2024 ### Features diff --git a/README.md b/README.md index 7bafae02..f4cf0c5a 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ dbt logo

-# dbt-tests-adapter +# dbt-adapters This package is responsible for: @@ -10,9 +10,11 @@ This package is responsible for: - caching information from databases - determining how relations are defined -There are two major adapter types: base and sql +In this repo there is also our testing suite used for tesing adapter functionality + +# Adapters -# Directories +There are two major adapter types: base and sql ## `base` diff --git a/dbt-tests-adapter/dbt/tests/__about__.py b/dbt-tests-adapter/dbt/tests/__about__.py index 8c657eec..08e0d06b 100644 --- a/dbt-tests-adapter/dbt/tests/__about__.py +++ b/dbt-tests-adapter/dbt/tests/__about__.py @@ -1 +1 @@ -version = "1.10.2" +version = "1.10.4" diff --git a/dbt-tests-adapter/dbt/tests/adapter/basic/files.py b/dbt-tests-adapter/dbt/tests/adapter/basic/files.py index 751b01a0..d0253a53 100644 --- a/dbt-tests-adapter/dbt/tests/adapter/basic/files.py +++ b/dbt-tests-adapter/dbt/tests/adapter/basic/files.py @@ -186,6 +186,10 @@ {{ config(materialized="incremental") }} """ +config_materialized_incremental_invalid_strategy = """ + {{ config(materialized="incremental", incremental_strategy="bad_strategy") }} +""" + config_materialized_var = """ {{ config(materialized=var("materialized_var", "table"))}} """ @@ -217,3 +221,6 @@ ephemeral_view_sql = config_materialized_view + model_ephemeral ephemeral_table_sql = config_materialized_table + model_ephemeral incremental_sql = config_materialized_incremental + model_incremental +incremental_invalid_strategy_sql = ( + config_materialized_incremental_invalid_strategy + model_incremental +) diff --git a/dbt-tests-adapter/dbt/tests/adapter/basic/test_incremental.py b/dbt-tests-adapter/dbt/tests/adapter/basic/test_incremental.py index fe04a5a1..57cc4db9 100644 --- a/dbt-tests-adapter/dbt/tests/adapter/basic/test_incremental.py +++ b/dbt-tests-adapter/dbt/tests/adapter/basic/test_incremental.py @@ -86,6 +86,45 @@ def test_incremental_not_schema_change(self, project): assert run_result == RunStatus.Success +class BaseIncrementalBadStrategy: + @pytest.fixture(scope="class") + def project_config_update(self): + return {"name": "incremental"} + + @pytest.fixture(scope="class") + def models(self): + return { + "incremental.sql": files.incremental_invalid_strategy_sql, + "schema.yml": files.schema_base_yml, + } + + @pytest.fixture(scope="class") + def seeds(self): + return {"base.csv": files.seeds_base_csv, "added.csv": files.seeds_added_csv} + + @pytest.fixture(autouse=True) + def clean_up(self, project): + yield + with project.adapter.connection_named("__test"): + relation = project.adapter.Relation.create( + database=project.database, schema=project.test_schema + ) + project.adapter.drop_schema(relation) + + def test_incremental_invalid_strategy(self, project): + # seed command + results = run_dbt(["seed"]) + assert len(results) == 2 + + # try to run the incremental model, it should fail on the first attempt + results = run_dbt(["run"], expect_pass=False) + assert len(results.results) == 1 + assert ( + 'dbt could not find an incremental strategy macro with the name "get_incremental_bad_strategy_sql"' + in results.results[0].message + ) + + class Testincremental(BaseIncremental): pass diff --git a/dbt-tests-adapter/dbt/tests/adapter/incremental/test_incremental_microbatch.py b/dbt-tests-adapter/dbt/tests/adapter/incremental/test_incremental_microbatch.py index 5bbabbe1..34078ac3 100644 --- a/dbt-tests-adapter/dbt/tests/adapter/incremental/test_incremental_microbatch.py +++ b/dbt-tests-adapter/dbt/tests/adapter/incremental/test_incremental_microbatch.py @@ -1,6 +1,4 @@ -import os from pprint import pformat -from unittest import mock import pytest @@ -63,7 +61,6 @@ def assert_row_count(self, project, relation_name: str, expected_row_count: int) assert len(result) == expected_row_count, f"{relation_name}:{pformat(result)}" - @mock.patch.dict(os.environ, {"DBT_EXPERIMENTAL_MICROBATCH": "True"}) def test_run_with_event_time(self, project, insert_two_rows_sql): # initial run -- backfills all data with patch_microbatch_end_time("2020-01-03 13:57:00"): diff --git a/dbt-tests-adapter/dbt/tests/adapter/simple_snapshot/new_record_mode.py b/dbt-tests-adapter/dbt/tests/adapter/simple_snapshot/new_record_mode.py new file mode 100644 index 00000000..c50f0ff9 --- /dev/null +++ b/dbt-tests-adapter/dbt/tests/adapter/simple_snapshot/new_record_mode.py @@ -0,0 +1,225 @@ +import pytest + +from dbt.tests.util import check_relations_equal, run_dbt + +_seed_new_record_mode = """ +create table {database}.{schema}.seed ( + id INTEGER, + first_name VARCHAR(50), + last_name VARCHAR(50), + email VARCHAR(50), + gender VARCHAR(50), + ip_address VARCHAR(20), + updated_at TIMESTAMP WITHOUT TIME ZONE +); + +create table {database}.{schema}.snapshot_expected ( + id INTEGER, + first_name VARCHAR(50), + last_name VARCHAR(50), + email VARCHAR(50), + gender VARCHAR(50), + ip_address VARCHAR(20), + + -- snapshotting fields + updated_at TIMESTAMP WITHOUT TIME ZONE, + dbt_valid_from TIMESTAMP WITHOUT TIME ZONE, + dbt_valid_to TIMESTAMP WITHOUT TIME ZONE, + dbt_scd_id TEXT, + dbt_updated_at TIMESTAMP WITHOUT TIME ZONE, + dbt_is_deleted TEXT +); + + +-- seed inserts +-- use the same email for two users to verify that duplicated check_cols values +-- are handled appropriately +insert into {database}.{schema}.seed (id, first_name, last_name, email, gender, ip_address, updated_at) values +(1, 'Judith', 'Kennedy', '(not provided)', 'Female', '54.60.24.128', '2015-12-24 12:19:28'), +(2, 'Arthur', 'Kelly', '(not provided)', 'Male', '62.56.24.215', '2015-10-28 16:22:15'), +(3, 'Rachel', 'Moreno', 'rmoreno2@msu.edu', 'Female', '31.222.249.23', '2016-04-05 02:05:30'), +(4, 'Ralph', 'Turner', 'rturner3@hp.com', 'Male', '157.83.76.114', '2016-08-08 00:06:51'), +(5, 'Laura', 'Gonzales', 'lgonzales4@howstuffworks.com', 'Female', '30.54.105.168', '2016-09-01 08:25:38'), +(6, 'Katherine', 'Lopez', 'klopez5@yahoo.co.jp', 'Female', '169.138.46.89', '2016-08-30 18:52:11'), +(7, 'Jeremy', 'Hamilton', 'jhamilton6@mozilla.org', 'Male', '231.189.13.133', '2016-07-17 02:09:46'), +(8, 'Heather', 'Rose', 'hrose7@goodreads.com', 'Female', '87.165.201.65', '2015-12-29 22:03:56'), +(9, 'Gregory', 'Kelly', 'gkelly8@trellian.com', 'Male', '154.209.99.7', '2016-03-24 21:18:16'), +(10, 'Rachel', 'Lopez', 'rlopez9@themeforest.net', 'Female', '237.165.82.71', '2016-08-20 15:44:49'), +(11, 'Donna', 'Welch', 'dwelcha@shutterfly.com', 'Female', '103.33.110.138', '2016-02-27 01:41:48'), +(12, 'Russell', 'Lawrence', 'rlawrenceb@qq.com', 'Male', '189.115.73.4', '2016-06-11 03:07:09'), +(13, 'Michelle', 'Montgomery', 'mmontgomeryc@scientificamerican.com', 'Female', '243.220.95.82', '2016-06-18 16:27:19'), +(14, 'Walter', 'Castillo', 'wcastillod@pagesperso-orange.fr', 'Male', '71.159.238.196', '2016-10-06 01:55:44'), +(15, 'Robin', 'Mills', 'rmillse@vkontakte.ru', 'Female', '172.190.5.50', '2016-10-31 11:41:21'), +(16, 'Raymond', 'Holmes', 'rholmesf@usgs.gov', 'Male', '148.153.166.95', '2016-10-03 08:16:38'), +(17, 'Gary', 'Bishop', 'gbishopg@plala.or.jp', 'Male', '161.108.182.13', '2016-08-29 19:35:20'), +(18, 'Anna', 'Riley', 'arileyh@nasa.gov', 'Female', '253.31.108.22', '2015-12-11 04:34:27'), +(19, 'Sarah', 'Knight', 'sknighti@foxnews.com', 'Female', '222.220.3.177', '2016-09-26 00:49:06'), +(20, 'Phyllis', 'Fox', null, 'Female', '163.191.232.95', '2016-08-21 10:35:19'); + + +-- populate snapshot table +insert into {database}.{schema}.snapshot_expected ( + id, + first_name, + last_name, + email, + gender, + ip_address, + updated_at, + dbt_valid_from, + dbt_valid_to, + dbt_updated_at, + dbt_scd_id, + dbt_is_deleted +) + +select + id, + first_name, + last_name, + email, + gender, + ip_address, + updated_at, + -- fields added by snapshotting + updated_at as dbt_valid_from, + null::timestamp as dbt_valid_to, + updated_at as dbt_updated_at, + md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id, + 'False' as dbt_is_deleted +from {database}.{schema}.seed; +""" + +_snapshot_actual_sql = """ +{% snapshot snapshot_actual %} + + {{ + config( + unique_key='id || ' ~ "'-'" ~ ' || first_name', + ) + }} + + select * from {{target.database}}.{{target.schema}}.seed + +{% endsnapshot %} +""" + +_snapshots_yml = """ +snapshots: + - name: snapshot_actual + config: + strategy: timestamp + updated_at: updated_at + hard_deletes: new_record +""" + +_ref_snapshot_sql = """ +select * from {{ ref('snapshot_actual') }} +""" + + +_invalidate_sql = """ +-- update records 11 - 21. Change email and updated_at field +update {schema}.seed set + updated_at = updated_at + interval '1 hour', + email = case when id = 20 then 'pfoxj@creativecommons.org' else 'new_' || email end +where id >= 10 and id <= 20; + + +-- invalidate records 11 - 21 +update {schema}.snapshot_expected set + dbt_valid_to = updated_at + interval '1 hour' +where id >= 10 and id <= 20; + +""" + +_update_sql = """ +-- insert v2 of the 11 - 21 records + +insert into {database}.{schema}.snapshot_expected ( + id, + first_name, + last_name, + email, + gender, + ip_address, + updated_at, + dbt_valid_from, + dbt_valid_to, + dbt_updated_at, + dbt_scd_id, + dbt_is_deleted +) + +select + id, + first_name, + last_name, + email, + gender, + ip_address, + updated_at, + -- fields added by snapshotting + updated_at as dbt_valid_from, + null::timestamp as dbt_valid_to, + updated_at as dbt_updated_at, + md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id, + 'False' as dbt_is_deleted +from {database}.{schema}.seed +where id >= 10 and id <= 20; +""" + +_delete_sql = """ +delete from {schema}.seed where id = 1 +""" + + +class SnapshotNewRecordMode: + @pytest.fixture(scope="class") + def snapshots(self): + return {"snapshot.sql": _snapshot_actual_sql} + + @pytest.fixture(scope="class") + def models(self): + return { + "snapshots.yml": _snapshots_yml, + "ref_snapshot.sql": _ref_snapshot_sql, + } + + @pytest.fixture(scope="class") + def seed_new_record_mode(self): + return _seed_new_record_mode + + @pytest.fixture(scope="class") + def invalidate_sql(self): + return _invalidate_sql + + @pytest.fixture(scope="class") + def update_sql(self): + return _update_sql + + @pytest.fixture(scope="class") + def delete_sql(self): + return _delete_sql + + def test_snapshot_new_record_mode( + self, project, seed_new_record_mode, invalidate_sql, update_sql + ): + project.run_sql(seed_new_record_mode) + results = run_dbt(["snapshot"]) + assert len(results) == 1 + + project.run_sql(invalidate_sql) + project.run_sql(update_sql) + + results = run_dbt(["snapshot"]) + assert len(results) == 1 + + check_relations_equal(project.adapter, ["snapshot_actual", "snapshot_expected"]) + + project.run_sql(_delete_sql) + + results = run_dbt(["snapshot"]) + assert len(results) == 1 + + # TODO: Further validate results. diff --git a/dbt-tests-adapter/pyproject.toml b/dbt-tests-adapter/pyproject.toml index c9082d43..d2f732b7 100644 --- a/dbt-tests-adapter/pyproject.toml +++ b/dbt-tests-adapter/pyproject.toml @@ -4,7 +4,7 @@ name = "dbt-tests-adapter" description = "The set of reusable tests and test fixtures used to test common functionality" readme = "README.md" keywords = ["dbt", "adapter", "adapters", "database", "elt", "dbt-core", "dbt Core", "dbt Cloud", "dbt Labs"] -requires-python = ">=3.8.0" +requires-python = ">=3.9.0" authors = [ { name = "dbt Labs", email = "info@dbtlabs.com" }, ] @@ -17,7 +17,6 @@ classifiers = [ "Operating System :: MacOS :: MacOS X", "Operating System :: Microsoft :: Windows", "Operating System :: POSIX :: Linux", - "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", diff --git a/dbt/adapters/__about__.py b/dbt/adapters/__about__.py index a55413d1..977620c3 100644 --- a/dbt/adapters/__about__.py +++ b/dbt/adapters/__about__.py @@ -1 +1 @@ -version = "1.7.0" +version = "1.10.3" diff --git a/dbt/adapters/base/__init__.py b/dbt/adapters/base/__init__.py index 9e7549f8..0600fc00 100644 --- a/dbt/adapters/base/__init__.py +++ b/dbt/adapters/base/__init__.py @@ -13,4 +13,5 @@ BaseRelation, RelationType, SchemaSearchMap, + AdapterTrackingRelationInfo, ) diff --git a/dbt/adapters/base/impl.py b/dbt/adapters/base/impl.py index 7f8820dc..25c5e5fd 100644 --- a/dbt/adapters/base/impl.py +++ b/dbt/adapters/base/impl.py @@ -4,6 +4,7 @@ from contextlib import contextmanager from datetime import datetime from enum import Enum +from importlib import import_module from multiprocessing.context import SpawnContext from typing import ( Any, @@ -22,7 +23,6 @@ Union, TYPE_CHECKING, ) -import os import pytz from dbt_common.behavior_flags import Behavior, BehaviorFlag from dbt_common.clients.jinja import CallableMacroGenerator @@ -62,12 +62,14 @@ ComponentName, InformationSchema, SchemaSearchMap, + AdapterTrackingRelationInfo, ) from dbt.adapters.cache import RelationsCache, _make_ref_key_dict from dbt.adapters.capability import Capability, CapabilityDict from dbt.adapters.contracts.connection import Credentials from dbt.adapters.contracts.macros import MacroResolverProtocol from dbt.adapters.contracts.relation import RelationConfig + from dbt.adapters.events.types import ( CacheMiss, CatalogGenerationError, @@ -97,6 +99,13 @@ GET_CATALOG_RELATIONS_MACRO_NAME = "get_catalog_relations" FRESHNESS_MACRO_NAME = "collect_freshness" GET_RELATION_LAST_MODIFIED_MACRO_NAME = "get_relation_last_modified" +DEFAULT_BASE_BEHAVIOR_FLAGS = [ + { + "name": "require_batched_execution_for_custom_microbatch_strategy", + "default": False, + "docs_url": "https://docs.getdbt.com/docs/build/incremental-microbatch", + } +] class ConstraintSupport(str, Enum): @@ -198,6 +207,14 @@ class FreshnessResponse(TypedDict): age: float # age in seconds +class SnapshotStrategy(TypedDict): + unique_key: Optional[str] + updated_at: Optional[str] + row_changed: Optional[str] + scd_id: Optional[str] + hard_deletes: Optional[str] + + class BaseAdapter(metaclass=AdapterMeta): """The BaseAdapter provides an abstract base class for adapters. @@ -276,8 +293,7 @@ def __init__(self, config, mp_context: SpawnContext) -> None: self.connections = self.ConnectionManager(config, mp_context) self._macro_resolver: Optional[MacroResolverProtocol] = None self._macro_context_generator: Optional[MacroContextGeneratorCallable] = None - # this will be updated to include global behavior flags once they exist - self.behavior = [] # type: ignore + self.behavior = DEFAULT_BASE_BEHAVIOR_FLAGS # type: ignore ### # Methods to set / access a macro resolver @@ -305,17 +321,20 @@ def behavior(self) -> Behavior: @behavior.setter # type: ignore def behavior(self, flags: List[BehaviorFlag]) -> None: flags.extend(self._behavior_flags) - try: - # we don't always get project flags, for example during `dbt debug` - self._behavior = Behavior(flags, self.config.flags) - except AttributeError: - # in that case, don't load any behavior to avoid unexpected defaults - self._behavior = Behavior([], {}) + + # we don't always get project flags, for example, the project file is not loaded during `dbt debug` + # in that case, load the default values for behavior flags to avoid compilation errors + # this mimics not loading a project file, or not specifying flags in a project file + user_overrides = getattr(self.config, "flags", {}) + + self._behavior = Behavior(flags, user_overrides) @property def _behavior_flags(self) -> List[BehaviorFlag]: """ This method should be overwritten by adapter maintainers to provide platform-specific flags + + The BaseAdapter should NOT include any global flags here as those should be defined via DEFAULT_BASE_BEHAVIOR_FLAGS """ return [] @@ -789,8 +808,8 @@ def valid_snapshot_target( columns = self.get_columns_in_relation(relation) names = set(c.name.lower() for c in columns) missing = [] - # Note: we're not checking dbt_updated_at here because it's not - # always present. + # Note: we're not checking dbt_updated_at or dbt_is_deleted here because they + # aren't always present. for column in ("dbt_scd_id", "dbt_valid_from", "dbt_valid_to"): desired = column_names[column] if column_names else column if desired not in names: @@ -799,6 +818,28 @@ def valid_snapshot_target( if missing: raise SnapshotTargetNotSnapshotTableError(missing) + @available.parse_none + def assert_valid_snapshot_target_given_strategy( + self, relation: BaseRelation, column_names: Dict[str, str], strategy: SnapshotStrategy + ) -> None: + # Assert everything we can with the legacy function. + self.valid_snapshot_target(relation, column_names) + + # Now do strategy-specific checks. + # TODO: Make these checks more comprehensive. + if strategy.get("hard_deletes", None) == "new_record": + columns = self.get_columns_in_relation(relation) + names = set(c.name.lower() for c in columns) + missing = [] + + for column in ("dbt_is_deleted",): + desired = column_names[column] if column_names else column + if desired not in names: + missing.append(desired) + + if missing: + raise SnapshotTargetNotSnapshotTableError(missing) + @available.parse_none def expand_target_column_types( self, from_relation: BaseRelation, to_relation: BaseRelation @@ -1574,14 +1615,31 @@ def valid_incremental_strategies(self): return ["append"] def builtin_incremental_strategies(self): + """ + List of possible builtin strategies for adapters + + Microbatch is added by _default_. It is only not added when the behavior flag + `require_batched_execution_for_custom_microbatch_strategy` is True. + """ builtin_strategies = ["append", "delete+insert", "merge", "insert_overwrite"] - if os.environ.get("DBT_EXPERIMENTAL_MICROBATCH"): + if not self.behavior.require_batched_execution_for_custom_microbatch_strategy.no_warn: builtin_strategies.append("microbatch") return builtin_strategies @available.parse_none def get_incremental_strategy_macro(self, model_context, strategy: str): + """Gets the macro for the given incremental strategy. + + Additionally some validations are done: + 1. Assert that if the given strategy is a "builtin" strategy, then it must + also be defined as a "valid" strategy for the associated adapter + 2. Assert that the incremental strategy exists in the model context + + Notably, something be defined by the adapter as "valid" without it being + a "builtin", and nothing will break (and that is desirable). + """ + # Construct macro_name from strategy name if strategy is None: strategy = "default" @@ -1748,6 +1806,53 @@ def capabilities(cls) -> CapabilityDict: def supports(cls, capability: Capability) -> bool: return bool(cls.capabilities()[capability]) + @classmethod + def get_adapter_run_info(cls, config: RelationConfig) -> AdapterTrackingRelationInfo: + adapter_class_name, *_ = cls.__name__.split("Adapter") + adapter_name = adapter_class_name.lower() + + if adapter_name == "base": + adapter_version = "" + else: + adapter_version = import_module(f"dbt.adapters.{adapter_name}.__version__").version + + return AdapterTrackingRelationInfo( + adapter_name=adapter_name, + base_adapter_version=import_module("dbt.adapters.__about__").version, + adapter_version=adapter_version, + model_adapter_details=cls._get_adapter_specific_run_info(config), + ) + + @classmethod + def _get_adapter_specific_run_info(cls, config) -> Dict[str, Any]: + """ + Adapter maintainers should overwrite this method to return any run metadata that should be captured during a run. + """ + return {} + + @available.parse_none + @classmethod + def get_hard_deletes_behavior(cls, config): + """Check the hard_deletes config enum, and the legacy invalidate_hard_deletes + config flag in order to determine which behavior should be used for deleted + records in a snapshot. The default is to ignore them.""" + invalidate_hard_deletes = config.get("invalidate_hard_deletes", None) + hard_deletes = config.get("hard_deletes", None) + + if invalidate_hard_deletes is not None and hard_deletes is not None: + raise DbtValidationError( + "You cannot set both the invalidate_hard_deletes and hard_deletes config properties on the same snapshot." + ) + + if invalidate_hard_deletes or hard_deletes == "invalidate": + return "invalidate" + elif hard_deletes == "new_record": + return "new_record" + elif hard_deletes is None or hard_deletes == "ignore": + return "ignore" + + raise DbtValidationError("Invalid setting for property hard_deletes.") + COLUMNS_EQUAL_SQL = """ with diff_count as ( diff --git a/dbt/adapters/base/relation.py b/dbt/adapters/base/relation.py index db6f86b6..b60528fe 100644 --- a/dbt/adapters/base/relation.py +++ b/dbt/adapters/base/relation.py @@ -6,6 +6,7 @@ Dict, FrozenSet, Iterator, + List, Optional, Set, Tuple, @@ -342,6 +343,16 @@ def create( ) return cls.from_dict(kwargs) + @classmethod + def scd_args(cls: Type[Self], primary_key: Union[str, List[str]], updated_at) -> List[str]: + scd_args = [] + if isinstance(primary_key, list): + scd_args.extend(primary_key) + else: + scd_args.append(primary_key) + scd_args.append(updated_at) + return scd_args + @property def can_be_renamed(self) -> bool: return self.type in self.renameable_relations @@ -532,3 +543,11 @@ def flatten(self, allow_multiple_databases: bool = False) -> "SchemaSearchMap": ) return new + + +@dataclass(frozen=True, eq=False, repr=False) +class AdapterTrackingRelationInfo(FakeAPIObject, Hashable): + adapter_name: str + base_adapter_version: str + adapter_version: str + model_adapter_details: Any diff --git a/dbt/adapters/capability.py b/dbt/adapters/capability.py index 4e410bb2..ca61f752 100644 --- a/dbt/adapters/capability.py +++ b/dbt/adapters/capability.py @@ -23,6 +23,9 @@ class Capability(str, Enum): CreateExternalCatalog = "CreateExternalCatalog" + MicrobatchConcurrency = "MicrobatchConcurrency" + """Indicates support running the microbatch incremental materialization strategy concurrently across threads.""" + class Support(str, Enum): Unknown = "Unknown" diff --git a/dbt/adapters/contracts/connection.py b/dbt/adapters/contracts/connection.py index 751d6135..1c317c53 100644 --- a/dbt/adapters/contracts/connection.py +++ b/dbt/adapters/contracts/connection.py @@ -43,6 +43,7 @@ class AdapterResponse(dbtClassMixin): _message: str code: Optional[str] = None rows_affected: Optional[int] = None + query_id: Optional[str] = None def __str__(self): return self._message diff --git a/dbt/adapters/events/adapter_types.proto b/dbt/adapters/events/adapter_types.proto index 69d64325..70b4e1e3 100644 --- a/dbt/adapters/events/adapter_types.proto +++ b/dbt/adapters/events/adapter_types.proto @@ -266,6 +266,7 @@ message SQLQueryStatus { AdapterNodeInfo node_info = 1; string status = 2; float elapsed = 3; + string query_id = 4; } message SQLQueryStatusMsg { diff --git a/dbt/adapters/events/adapter_types_pb2.py b/dbt/adapters/events/adapter_types_pb2.py index bfd44080..6a411842 100644 --- a/dbt/adapters/events/adapter_types_pb2.py +++ b/dbt/adapters/events/adapter_types_pb2.py @@ -1,11 +1,22 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE # source: adapter_types.proto +# Protobuf Python Version: 5.28.3 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version from google.protobuf import symbol_database as _symbol_database from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 5, + 28, + 3, + '', + 'adapter_types.proto' +) # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() @@ -15,16 +26,16 @@ from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x13\x61\x64\x61pter_types.proto\x12\x0bproto_types\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/protobuf/struct.proto\"\xab\x02\n\x16\x41\x64\x61pterCommonEventInfo\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04\x63ode\x18\x02 \x01(\t\x12\x0b\n\x03msg\x18\x03 \x01(\t\x12\r\n\x05level\x18\x04 \x01(\t\x12\x15\n\rinvocation_id\x18\x05 \x01(\t\x12\x0b\n\x03pid\x18\x06 \x01(\x05\x12\x0e\n\x06thread\x18\x07 \x01(\t\x12&\n\x02ts\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12=\n\x05\x65xtra\x18\t \x03(\x0b\x32..proto_types.AdapterCommonEventInfo.ExtraEntry\x12\x10\n\x08\x63\x61tegory\x18\n \x01(\t\x1a,\n\nExtraEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"]\n\x13\x41\x64\x61pterNodeRelation\x12\x10\n\x08\x64\x61tabase\x18\n \x01(\t\x12\x0e\n\x06schema\x18\x0b \x01(\t\x12\r\n\x05\x61lias\x18\x0c \x01(\t\x12\x15\n\rrelation_name\x18\r \x01(\t\"\x9f\x02\n\x0f\x41\x64\x61pterNodeInfo\x12\x11\n\tnode_path\x18\x01 \x01(\t\x12\x11\n\tnode_name\x18\x02 \x01(\t\x12\x11\n\tunique_id\x18\x03 \x01(\t\x12\x15\n\rresource_type\x18\x04 \x01(\t\x12\x14\n\x0cmaterialized\x18\x05 \x01(\t\x12\x13\n\x0bnode_status\x18\x06 \x01(\t\x12\x17\n\x0fnode_started_at\x18\x07 \x01(\t\x12\x18\n\x10node_finished_at\x18\x08 \x01(\t\x12%\n\x04meta\x18\t \x01(\x0b\x32\x17.google.protobuf.Struct\x12\x37\n\rnode_relation\x18\n \x01(\x0b\x32 .proto_types.AdapterNodeRelation\"G\n\x0fReferenceKeyMsg\x12\x10\n\x08\x64\x61tabase\x18\x01 \x01(\t\x12\x0e\n\x06schema\x18\x02 \x01(\t\x12\x12\n\nidentifier\x18\x03 \x01(\t\"?\n\x19\x41\x64\x61pterDeprecationWarning\x12\x10\n\x08old_name\x18\x01 \x01(\t\x12\x10\n\x08new_name\x18\x02 \x01(\t\"\x87\x01\n\x1c\x41\x64\x61pterDeprecationWarningMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x34\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32&.proto_types.AdapterDeprecationWarning\"!\n\x1f\x43ollectFreshnessReturnSignature\"\x93\x01\n\"CollectFreshnessReturnSignatureMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12:\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32,.proto_types.CollectFreshnessReturnSignature\"\x8e\x01\n\x11\x41\x64\x61pterEventDebug\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x62\x61se_msg\x18\x03 \x01(\t\x12(\n\x04\x61rgs\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.ListValue\"w\n\x14\x41\x64\x61pterEventDebugMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.AdapterEventDebug\"\x8d\x01\n\x10\x41\x64\x61pterEventInfo\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x62\x61se_msg\x18\x03 \x01(\t\x12(\n\x04\x61rgs\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.ListValue\"u\n\x13\x41\x64\x61pterEventInfoMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.AdapterEventInfo\"\x90\x01\n\x13\x41\x64\x61pterEventWarning\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x62\x61se_msg\x18\x03 \x01(\t\x12(\n\x04\x61rgs\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.ListValue\"{\n\x16\x41\x64\x61pterEventWarningMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.AdapterEventWarning\"\xa0\x01\n\x11\x41\x64\x61pterEventError\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x62\x61se_msg\x18\x03 \x01(\t\x12(\n\x04\x61rgs\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.ListValue\x12\x10\n\x08\x65xc_info\x18\x05 \x01(\t\"w\n\x14\x41\x64\x61pterEventErrorMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.AdapterEventError\"f\n\rNewConnection\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_type\x18\x02 \x01(\t\x12\x11\n\tconn_name\x18\x03 \x01(\t\"o\n\x10NewConnectionMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.NewConnection\"=\n\x10\x43onnectionReused\x12\x11\n\tconn_name\x18\x01 \x01(\t\x12\x16\n\x0eorig_conn_name\x18\x02 \x01(\t\"u\n\x13\x43onnectionReusedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.ConnectionReused\"0\n\x1b\x43onnectionLeftOpenInCleanup\x12\x11\n\tconn_name\x18\x01 \x01(\t\"\x8b\x01\n\x1e\x43onnectionLeftOpenInCleanupMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x36\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32(.proto_types.ConnectionLeftOpenInCleanup\".\n\x19\x43onnectionClosedInCleanup\x12\x11\n\tconn_name\x18\x01 \x01(\t\"\x87\x01\n\x1c\x43onnectionClosedInCleanupMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x34\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32&.proto_types.ConnectionClosedInCleanup\"f\n\x0eRollbackFailed\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\x12\x10\n\x08\x65xc_info\x18\x03 \x01(\t\"q\n\x11RollbackFailedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.RollbackFailed\"V\n\x10\x43onnectionClosed\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\"u\n\x13\x43onnectionClosedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.ConnectionClosed\"X\n\x12\x43onnectionLeftOpen\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\"y\n\x15\x43onnectionLeftOpenMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.ConnectionLeftOpen\"N\n\x08Rollback\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\"e\n\x0bRollbackMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12#\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x15.proto_types.Rollback\"@\n\tCacheMiss\x12\x11\n\tconn_name\x18\x01 \x01(\t\x12\x10\n\x08\x64\x61tabase\x18\x02 \x01(\t\x12\x0e\n\x06schema\x18\x03 \x01(\t\"g\n\x0c\x43\x61\x63heMissMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12$\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x16.proto_types.CacheMiss\"b\n\rListRelations\x12\x10\n\x08\x64\x61tabase\x18\x01 \x01(\t\x12\x0e\n\x06schema\x18\x02 \x01(\t\x12/\n\trelations\x18\x03 \x03(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"o\n\x10ListRelationsMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.ListRelations\"g\n\x0e\x43onnectionUsed\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_type\x18\x02 \x01(\t\x12\x11\n\tconn_name\x18\x03 \x01(\t\"q\n\x11\x43onnectionUsedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.ConnectionUsed\"[\n\x08SQLQuery\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\x12\x0b\n\x03sql\x18\x03 \x01(\t\"e\n\x0bSQLQueryMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12#\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x15.proto_types.SQLQuery\"b\n\x0eSQLQueryStatus\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0e\n\x06status\x18\x02 \x01(\t\x12\x0f\n\x07\x65lapsed\x18\x03 \x01(\x02\"q\n\x11SQLQueryStatusMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.SQLQueryStatus\"O\n\tSQLCommit\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\"g\n\x0cSQLCommitMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12$\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x16.proto_types.SQLCommit\"a\n\rColTypeChange\x12\x11\n\torig_type\x18\x01 \x01(\t\x12\x10\n\x08new_type\x18\x02 \x01(\t\x12+\n\x05table\x18\x03 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"o\n\x10\x43olTypeChangeMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.ColTypeChange\"@\n\x0eSchemaCreation\x12.\n\x08relation\x18\x01 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"q\n\x11SchemaCreationMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.SchemaCreation\"<\n\nSchemaDrop\x12.\n\x08relation\x18\x01 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"i\n\rSchemaDropMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12%\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x17.proto_types.SchemaDrop\"\xde\x01\n\x0b\x43\x61\x63heAction\x12\x0e\n\x06\x61\x63tion\x18\x01 \x01(\t\x12-\n\x07ref_key\x18\x02 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\x12/\n\tref_key_2\x18\x03 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\x12/\n\tref_key_3\x18\x04 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\x12.\n\x08ref_list\x18\x05 \x03(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"k\n\x0e\x43\x61\x63heActionMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12&\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x18.proto_types.CacheAction\"\x98\x01\n\x0e\x43\x61\x63heDumpGraph\x12\x33\n\x04\x64ump\x18\x01 \x03(\x0b\x32%.proto_types.CacheDumpGraph.DumpEntry\x12\x14\n\x0c\x62\x65\x66ore_after\x18\x02 \x01(\t\x12\x0e\n\x06\x61\x63tion\x18\x03 \x01(\t\x1a+\n\tDumpEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"q\n\x11\x43\x61\x63heDumpGraphMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.CacheDumpGraph\"B\n\x11\x41\x64\x61pterRegistered\x12\x14\n\x0c\x61\x64\x61pter_name\x18\x01 \x01(\t\x12\x17\n\x0f\x61\x64\x61pter_version\x18\x02 \x01(\t\"w\n\x14\x41\x64\x61pterRegisteredMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.AdapterRegistered\"!\n\x12\x41\x64\x61pterImportError\x12\x0b\n\x03\x65xc\x18\x01 \x01(\t\"y\n\x15\x41\x64\x61pterImportErrorMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.AdapterImportError\"#\n\x0fPluginLoadError\x12\x10\n\x08\x65xc_info\x18\x01 \x01(\t\"s\n\x12PluginLoadErrorMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.PluginLoadError\"a\n\x14NewConnectionOpening\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x18\n\x10\x63onnection_state\x18\x02 \x01(\t\"}\n\x17NewConnectionOpeningMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.NewConnectionOpening\"8\n\rCodeExecution\x12\x11\n\tconn_name\x18\x01 \x01(\t\x12\x14\n\x0c\x63ode_content\x18\x02 \x01(\t\"o\n\x10\x43odeExecutionMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.CodeExecution\"6\n\x13\x43odeExecutionStatus\x12\x0e\n\x06status\x18\x01 \x01(\t\x12\x0f\n\x07\x65lapsed\x18\x02 \x01(\x02\"{\n\x16\x43odeExecutionStatusMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.CodeExecutionStatus\"%\n\x16\x43\x61talogGenerationError\x12\x0b\n\x03\x65xc\x18\x01 \x01(\t\"\x81\x01\n\x19\x43\x61talogGenerationErrorMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x31\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32#.proto_types.CatalogGenerationError\"-\n\x13WriteCatalogFailure\x12\x16\n\x0enum_exceptions\x18\x01 \x01(\x05\"{\n\x16WriteCatalogFailureMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.WriteCatalogFailure\"\x1e\n\x0e\x43\x61talogWritten\x12\x0c\n\x04path\x18\x01 \x01(\t\"q\n\x11\x43\x61talogWrittenMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.CatalogWritten\"\x14\n\x12\x43\x61nnotGenerateDocs\"y\n\x15\x43\x61nnotGenerateDocsMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.CannotGenerateDocs\"\x11\n\x0f\x42uildingCatalog\"s\n\x12\x42uildingCatalogMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.BuildingCatalog\"-\n\x18\x44\x61tabaseErrorRunningHook\x12\x11\n\thook_type\x18\x01 \x01(\t\"\x85\x01\n\x1b\x44\x61tabaseErrorRunningHookMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x33\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32%.proto_types.DatabaseErrorRunningHook\"4\n\x0cHooksRunning\x12\x11\n\tnum_hooks\x18\x01 \x01(\x05\x12\x11\n\thook_type\x18\x02 \x01(\t\"m\n\x0fHooksRunningMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\'\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x19.proto_types.HooksRunning\"T\n\x14\x46inishedRunningStats\x12\x11\n\tstat_line\x18\x01 \x01(\t\x12\x11\n\texecution\x18\x02 \x01(\t\x12\x16\n\x0e\x65xecution_time\x18\x03 \x01(\x02\"}\n\x17\x46inishedRunningStatsMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.FinishedRunningStats\"<\n\x15\x43onstraintNotEnforced\x12\x12\n\nconstraint\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x64\x61pter\x18\x02 \x01(\t\"\x7f\n\x18\x43onstraintNotEnforcedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x30\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\".proto_types.ConstraintNotEnforced\"=\n\x16\x43onstraintNotSupported\x12\x12\n\nconstraint\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x64\x61pter\x18\x02 \x01(\t\"\x81\x01\n\x19\x43onstraintNotSupportedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x31\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32#.proto_types.ConstraintNotSupported\"%\n\x10TypeCodeNotFound\x12\x11\n\ttype_code\x18\x01 \x01(\x05\"u\n\x13TypeCodeNotFoundMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.TypeCodeNotFoundb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x13\x61\x64\x61pter_types.proto\x12\x0bproto_types\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/protobuf/struct.proto\"\xab\x02\n\x16\x41\x64\x61pterCommonEventInfo\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04\x63ode\x18\x02 \x01(\t\x12\x0b\n\x03msg\x18\x03 \x01(\t\x12\r\n\x05level\x18\x04 \x01(\t\x12\x15\n\rinvocation_id\x18\x05 \x01(\t\x12\x0b\n\x03pid\x18\x06 \x01(\x05\x12\x0e\n\x06thread\x18\x07 \x01(\t\x12&\n\x02ts\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12=\n\x05\x65xtra\x18\t \x03(\x0b\x32..proto_types.AdapterCommonEventInfo.ExtraEntry\x12\x10\n\x08\x63\x61tegory\x18\n \x01(\t\x1a,\n\nExtraEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"]\n\x13\x41\x64\x61pterNodeRelation\x12\x10\n\x08\x64\x61tabase\x18\n \x01(\t\x12\x0e\n\x06schema\x18\x0b \x01(\t\x12\r\n\x05\x61lias\x18\x0c \x01(\t\x12\x15\n\rrelation_name\x18\r \x01(\t\"\x9f\x02\n\x0f\x41\x64\x61pterNodeInfo\x12\x11\n\tnode_path\x18\x01 \x01(\t\x12\x11\n\tnode_name\x18\x02 \x01(\t\x12\x11\n\tunique_id\x18\x03 \x01(\t\x12\x15\n\rresource_type\x18\x04 \x01(\t\x12\x14\n\x0cmaterialized\x18\x05 \x01(\t\x12\x13\n\x0bnode_status\x18\x06 \x01(\t\x12\x17\n\x0fnode_started_at\x18\x07 \x01(\t\x12\x18\n\x10node_finished_at\x18\x08 \x01(\t\x12%\n\x04meta\x18\t \x01(\x0b\x32\x17.google.protobuf.Struct\x12\x37\n\rnode_relation\x18\n \x01(\x0b\x32 .proto_types.AdapterNodeRelation\"G\n\x0fReferenceKeyMsg\x12\x10\n\x08\x64\x61tabase\x18\x01 \x01(\t\x12\x0e\n\x06schema\x18\x02 \x01(\t\x12\x12\n\nidentifier\x18\x03 \x01(\t\"?\n\x19\x41\x64\x61pterDeprecationWarning\x12\x10\n\x08old_name\x18\x01 \x01(\t\x12\x10\n\x08new_name\x18\x02 \x01(\t\"\x87\x01\n\x1c\x41\x64\x61pterDeprecationWarningMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x34\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32&.proto_types.AdapterDeprecationWarning\"!\n\x1f\x43ollectFreshnessReturnSignature\"\x93\x01\n\"CollectFreshnessReturnSignatureMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12:\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32,.proto_types.CollectFreshnessReturnSignature\"\x8e\x01\n\x11\x41\x64\x61pterEventDebug\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x62\x61se_msg\x18\x03 \x01(\t\x12(\n\x04\x61rgs\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.ListValue\"w\n\x14\x41\x64\x61pterEventDebugMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.AdapterEventDebug\"\x8d\x01\n\x10\x41\x64\x61pterEventInfo\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x62\x61se_msg\x18\x03 \x01(\t\x12(\n\x04\x61rgs\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.ListValue\"u\n\x13\x41\x64\x61pterEventInfoMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.AdapterEventInfo\"\x90\x01\n\x13\x41\x64\x61pterEventWarning\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x62\x61se_msg\x18\x03 \x01(\t\x12(\n\x04\x61rgs\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.ListValue\"{\n\x16\x41\x64\x61pterEventWarningMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.AdapterEventWarning\"\xa0\x01\n\x11\x41\x64\x61pterEventError\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x62\x61se_msg\x18\x03 \x01(\t\x12(\n\x04\x61rgs\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.ListValue\x12\x10\n\x08\x65xc_info\x18\x05 \x01(\t\"w\n\x14\x41\x64\x61pterEventErrorMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.AdapterEventError\"f\n\rNewConnection\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_type\x18\x02 \x01(\t\x12\x11\n\tconn_name\x18\x03 \x01(\t\"o\n\x10NewConnectionMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.NewConnection\"=\n\x10\x43onnectionReused\x12\x11\n\tconn_name\x18\x01 \x01(\t\x12\x16\n\x0eorig_conn_name\x18\x02 \x01(\t\"u\n\x13\x43onnectionReusedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.ConnectionReused\"0\n\x1b\x43onnectionLeftOpenInCleanup\x12\x11\n\tconn_name\x18\x01 \x01(\t\"\x8b\x01\n\x1e\x43onnectionLeftOpenInCleanupMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x36\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32(.proto_types.ConnectionLeftOpenInCleanup\".\n\x19\x43onnectionClosedInCleanup\x12\x11\n\tconn_name\x18\x01 \x01(\t\"\x87\x01\n\x1c\x43onnectionClosedInCleanupMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x34\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32&.proto_types.ConnectionClosedInCleanup\"f\n\x0eRollbackFailed\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\x12\x10\n\x08\x65xc_info\x18\x03 \x01(\t\"q\n\x11RollbackFailedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.RollbackFailed\"V\n\x10\x43onnectionClosed\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\"u\n\x13\x43onnectionClosedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.ConnectionClosed\"X\n\x12\x43onnectionLeftOpen\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\"y\n\x15\x43onnectionLeftOpenMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.ConnectionLeftOpen\"N\n\x08Rollback\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\"e\n\x0bRollbackMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12#\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x15.proto_types.Rollback\"@\n\tCacheMiss\x12\x11\n\tconn_name\x18\x01 \x01(\t\x12\x10\n\x08\x64\x61tabase\x18\x02 \x01(\t\x12\x0e\n\x06schema\x18\x03 \x01(\t\"g\n\x0c\x43\x61\x63heMissMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12$\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x16.proto_types.CacheMiss\"b\n\rListRelations\x12\x10\n\x08\x64\x61tabase\x18\x01 \x01(\t\x12\x0e\n\x06schema\x18\x02 \x01(\t\x12/\n\trelations\x18\x03 \x03(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"o\n\x10ListRelationsMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.ListRelations\"g\n\x0e\x43onnectionUsed\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_type\x18\x02 \x01(\t\x12\x11\n\tconn_name\x18\x03 \x01(\t\"q\n\x11\x43onnectionUsedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.ConnectionUsed\"[\n\x08SQLQuery\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\x12\x0b\n\x03sql\x18\x03 \x01(\t\"e\n\x0bSQLQueryMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12#\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x15.proto_types.SQLQuery\"t\n\x0eSQLQueryStatus\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0e\n\x06status\x18\x02 \x01(\t\x12\x0f\n\x07\x65lapsed\x18\x03 \x01(\x02\x12\x10\n\x08query_id\x18\x04 \x01(\t\"q\n\x11SQLQueryStatusMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.SQLQueryStatus\"O\n\tSQLCommit\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\"g\n\x0cSQLCommitMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12$\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x16.proto_types.SQLCommit\"a\n\rColTypeChange\x12\x11\n\torig_type\x18\x01 \x01(\t\x12\x10\n\x08new_type\x18\x02 \x01(\t\x12+\n\x05table\x18\x03 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"o\n\x10\x43olTypeChangeMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.ColTypeChange\"@\n\x0eSchemaCreation\x12.\n\x08relation\x18\x01 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"q\n\x11SchemaCreationMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.SchemaCreation\"<\n\nSchemaDrop\x12.\n\x08relation\x18\x01 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"i\n\rSchemaDropMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12%\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x17.proto_types.SchemaDrop\"\xde\x01\n\x0b\x43\x61\x63heAction\x12\x0e\n\x06\x61\x63tion\x18\x01 \x01(\t\x12-\n\x07ref_key\x18\x02 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\x12/\n\tref_key_2\x18\x03 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\x12/\n\tref_key_3\x18\x04 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\x12.\n\x08ref_list\x18\x05 \x03(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"k\n\x0e\x43\x61\x63heActionMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12&\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x18.proto_types.CacheAction\"\x98\x01\n\x0e\x43\x61\x63heDumpGraph\x12\x33\n\x04\x64ump\x18\x01 \x03(\x0b\x32%.proto_types.CacheDumpGraph.DumpEntry\x12\x14\n\x0c\x62\x65\x66ore_after\x18\x02 \x01(\t\x12\x0e\n\x06\x61\x63tion\x18\x03 \x01(\t\x1a+\n\tDumpEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"q\n\x11\x43\x61\x63heDumpGraphMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.CacheDumpGraph\"B\n\x11\x41\x64\x61pterRegistered\x12\x14\n\x0c\x61\x64\x61pter_name\x18\x01 \x01(\t\x12\x17\n\x0f\x61\x64\x61pter_version\x18\x02 \x01(\t\"w\n\x14\x41\x64\x61pterRegisteredMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.AdapterRegistered\"!\n\x12\x41\x64\x61pterImportError\x12\x0b\n\x03\x65xc\x18\x01 \x01(\t\"y\n\x15\x41\x64\x61pterImportErrorMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.AdapterImportError\"#\n\x0fPluginLoadError\x12\x10\n\x08\x65xc_info\x18\x01 \x01(\t\"s\n\x12PluginLoadErrorMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.PluginLoadError\"a\n\x14NewConnectionOpening\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x18\n\x10\x63onnection_state\x18\x02 \x01(\t\"}\n\x17NewConnectionOpeningMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.NewConnectionOpening\"8\n\rCodeExecution\x12\x11\n\tconn_name\x18\x01 \x01(\t\x12\x14\n\x0c\x63ode_content\x18\x02 \x01(\t\"o\n\x10\x43odeExecutionMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.CodeExecution\"6\n\x13\x43odeExecutionStatus\x12\x0e\n\x06status\x18\x01 \x01(\t\x12\x0f\n\x07\x65lapsed\x18\x02 \x01(\x02\"{\n\x16\x43odeExecutionStatusMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.CodeExecutionStatus\"%\n\x16\x43\x61talogGenerationError\x12\x0b\n\x03\x65xc\x18\x01 \x01(\t\"\x81\x01\n\x19\x43\x61talogGenerationErrorMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x31\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32#.proto_types.CatalogGenerationError\"-\n\x13WriteCatalogFailure\x12\x16\n\x0enum_exceptions\x18\x01 \x01(\x05\"{\n\x16WriteCatalogFailureMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.WriteCatalogFailure\"\x1e\n\x0e\x43\x61talogWritten\x12\x0c\n\x04path\x18\x01 \x01(\t\"q\n\x11\x43\x61talogWrittenMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.CatalogWritten\"\x14\n\x12\x43\x61nnotGenerateDocs\"y\n\x15\x43\x61nnotGenerateDocsMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.CannotGenerateDocs\"\x11\n\x0f\x42uildingCatalog\"s\n\x12\x42uildingCatalogMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.BuildingCatalog\"-\n\x18\x44\x61tabaseErrorRunningHook\x12\x11\n\thook_type\x18\x01 \x01(\t\"\x85\x01\n\x1b\x44\x61tabaseErrorRunningHookMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x33\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32%.proto_types.DatabaseErrorRunningHook\"4\n\x0cHooksRunning\x12\x11\n\tnum_hooks\x18\x01 \x01(\x05\x12\x11\n\thook_type\x18\x02 \x01(\t\"m\n\x0fHooksRunningMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\'\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x19.proto_types.HooksRunning\"T\n\x14\x46inishedRunningStats\x12\x11\n\tstat_line\x18\x01 \x01(\t\x12\x11\n\texecution\x18\x02 \x01(\t\x12\x16\n\x0e\x65xecution_time\x18\x03 \x01(\x02\"}\n\x17\x46inishedRunningStatsMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.FinishedRunningStats\"<\n\x15\x43onstraintNotEnforced\x12\x12\n\nconstraint\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x64\x61pter\x18\x02 \x01(\t\"\x7f\n\x18\x43onstraintNotEnforcedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x30\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\".proto_types.ConstraintNotEnforced\"=\n\x16\x43onstraintNotSupported\x12\x12\n\nconstraint\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x64\x61pter\x18\x02 \x01(\t\"\x81\x01\n\x19\x43onstraintNotSupportedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x31\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32#.proto_types.ConstraintNotSupported\"%\n\x10TypeCodeNotFound\x12\x11\n\ttype_code\x18\x01 \x01(\x05\"u\n\x13TypeCodeNotFoundMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.TypeCodeNotFoundb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'adapter_types_pb2', _globals) -if _descriptor._USE_C_DESCRIPTORS == False: - DESCRIPTOR._options = None - _globals['_ADAPTERCOMMONEVENTINFO_EXTRAENTRY']._options = None +if not _descriptor._USE_C_DESCRIPTORS: + DESCRIPTOR._loaded_options = None + _globals['_ADAPTERCOMMONEVENTINFO_EXTRAENTRY']._loaded_options = None _globals['_ADAPTERCOMMONEVENTINFO_EXTRAENTRY']._serialized_options = b'8\001' - _globals['_CACHEDUMPGRAPH_DUMPENTRY']._options = None + _globals['_CACHEDUMPGRAPH_DUMPENTRY']._loaded_options = None _globals['_CACHEDUMPGRAPH_DUMPENTRY']._serialized_options = b'8\001' _globals['_ADAPTERCOMMONEVENTINFO']._serialized_start=100 _globals['_ADAPTERCOMMONEVENTINFO']._serialized_end=399 @@ -109,101 +120,101 @@ _globals['_SQLQUERYMSG']._serialized_start=4628 _globals['_SQLQUERYMSG']._serialized_end=4729 _globals['_SQLQUERYSTATUS']._serialized_start=4731 - _globals['_SQLQUERYSTATUS']._serialized_end=4829 - _globals['_SQLQUERYSTATUSMSG']._serialized_start=4831 - _globals['_SQLQUERYSTATUSMSG']._serialized_end=4944 - _globals['_SQLCOMMIT']._serialized_start=4946 - _globals['_SQLCOMMIT']._serialized_end=5025 - _globals['_SQLCOMMITMSG']._serialized_start=5027 - _globals['_SQLCOMMITMSG']._serialized_end=5130 - _globals['_COLTYPECHANGE']._serialized_start=5132 - _globals['_COLTYPECHANGE']._serialized_end=5229 - _globals['_COLTYPECHANGEMSG']._serialized_start=5231 - _globals['_COLTYPECHANGEMSG']._serialized_end=5342 - _globals['_SCHEMACREATION']._serialized_start=5344 - _globals['_SCHEMACREATION']._serialized_end=5408 - _globals['_SCHEMACREATIONMSG']._serialized_start=5410 - _globals['_SCHEMACREATIONMSG']._serialized_end=5523 - _globals['_SCHEMADROP']._serialized_start=5525 - _globals['_SCHEMADROP']._serialized_end=5585 - _globals['_SCHEMADROPMSG']._serialized_start=5587 - _globals['_SCHEMADROPMSG']._serialized_end=5692 - _globals['_CACHEACTION']._serialized_start=5695 - _globals['_CACHEACTION']._serialized_end=5917 - _globals['_CACHEACTIONMSG']._serialized_start=5919 - _globals['_CACHEACTIONMSG']._serialized_end=6026 - _globals['_CACHEDUMPGRAPH']._serialized_start=6029 - _globals['_CACHEDUMPGRAPH']._serialized_end=6181 - _globals['_CACHEDUMPGRAPH_DUMPENTRY']._serialized_start=6138 - _globals['_CACHEDUMPGRAPH_DUMPENTRY']._serialized_end=6181 - _globals['_CACHEDUMPGRAPHMSG']._serialized_start=6183 - _globals['_CACHEDUMPGRAPHMSG']._serialized_end=6296 - _globals['_ADAPTERREGISTERED']._serialized_start=6298 - _globals['_ADAPTERREGISTERED']._serialized_end=6364 - _globals['_ADAPTERREGISTEREDMSG']._serialized_start=6366 - _globals['_ADAPTERREGISTEREDMSG']._serialized_end=6485 - _globals['_ADAPTERIMPORTERROR']._serialized_start=6487 - _globals['_ADAPTERIMPORTERROR']._serialized_end=6520 - _globals['_ADAPTERIMPORTERRORMSG']._serialized_start=6522 - _globals['_ADAPTERIMPORTERRORMSG']._serialized_end=6643 - _globals['_PLUGINLOADERROR']._serialized_start=6645 - _globals['_PLUGINLOADERROR']._serialized_end=6680 - _globals['_PLUGINLOADERRORMSG']._serialized_start=6682 - _globals['_PLUGINLOADERRORMSG']._serialized_end=6797 - _globals['_NEWCONNECTIONOPENING']._serialized_start=6799 - _globals['_NEWCONNECTIONOPENING']._serialized_end=6896 - _globals['_NEWCONNECTIONOPENINGMSG']._serialized_start=6898 - _globals['_NEWCONNECTIONOPENINGMSG']._serialized_end=7023 - _globals['_CODEEXECUTION']._serialized_start=7025 - _globals['_CODEEXECUTION']._serialized_end=7081 - _globals['_CODEEXECUTIONMSG']._serialized_start=7083 - _globals['_CODEEXECUTIONMSG']._serialized_end=7194 - _globals['_CODEEXECUTIONSTATUS']._serialized_start=7196 - _globals['_CODEEXECUTIONSTATUS']._serialized_end=7250 - _globals['_CODEEXECUTIONSTATUSMSG']._serialized_start=7252 - _globals['_CODEEXECUTIONSTATUSMSG']._serialized_end=7375 - _globals['_CATALOGGENERATIONERROR']._serialized_start=7377 - _globals['_CATALOGGENERATIONERROR']._serialized_end=7414 - _globals['_CATALOGGENERATIONERRORMSG']._serialized_start=7417 - _globals['_CATALOGGENERATIONERRORMSG']._serialized_end=7546 - _globals['_WRITECATALOGFAILURE']._serialized_start=7548 - _globals['_WRITECATALOGFAILURE']._serialized_end=7593 - _globals['_WRITECATALOGFAILUREMSG']._serialized_start=7595 - _globals['_WRITECATALOGFAILUREMSG']._serialized_end=7718 - _globals['_CATALOGWRITTEN']._serialized_start=7720 - _globals['_CATALOGWRITTEN']._serialized_end=7750 - _globals['_CATALOGWRITTENMSG']._serialized_start=7752 - _globals['_CATALOGWRITTENMSG']._serialized_end=7865 - _globals['_CANNOTGENERATEDOCS']._serialized_start=7867 - _globals['_CANNOTGENERATEDOCS']._serialized_end=7887 - _globals['_CANNOTGENERATEDOCSMSG']._serialized_start=7889 - _globals['_CANNOTGENERATEDOCSMSG']._serialized_end=8010 - _globals['_BUILDINGCATALOG']._serialized_start=8012 - _globals['_BUILDINGCATALOG']._serialized_end=8029 - _globals['_BUILDINGCATALOGMSG']._serialized_start=8031 - _globals['_BUILDINGCATALOGMSG']._serialized_end=8146 - _globals['_DATABASEERRORRUNNINGHOOK']._serialized_start=8148 - _globals['_DATABASEERRORRUNNINGHOOK']._serialized_end=8193 - _globals['_DATABASEERRORRUNNINGHOOKMSG']._serialized_start=8196 - _globals['_DATABASEERRORRUNNINGHOOKMSG']._serialized_end=8329 - _globals['_HOOKSRUNNING']._serialized_start=8331 - _globals['_HOOKSRUNNING']._serialized_end=8383 - _globals['_HOOKSRUNNINGMSG']._serialized_start=8385 - _globals['_HOOKSRUNNINGMSG']._serialized_end=8494 - _globals['_FINISHEDRUNNINGSTATS']._serialized_start=8496 - _globals['_FINISHEDRUNNINGSTATS']._serialized_end=8580 - _globals['_FINISHEDRUNNINGSTATSMSG']._serialized_start=8582 - _globals['_FINISHEDRUNNINGSTATSMSG']._serialized_end=8707 - _globals['_CONSTRAINTNOTENFORCED']._serialized_start=8709 - _globals['_CONSTRAINTNOTENFORCED']._serialized_end=8769 - _globals['_CONSTRAINTNOTENFORCEDMSG']._serialized_start=8771 - _globals['_CONSTRAINTNOTENFORCEDMSG']._serialized_end=8898 - _globals['_CONSTRAINTNOTSUPPORTED']._serialized_start=8900 - _globals['_CONSTRAINTNOTSUPPORTED']._serialized_end=8961 - _globals['_CONSTRAINTNOTSUPPORTEDMSG']._serialized_start=8964 - _globals['_CONSTRAINTNOTSUPPORTEDMSG']._serialized_end=9093 - _globals['_TYPECODENOTFOUND']._serialized_start=9095 - _globals['_TYPECODENOTFOUND']._serialized_end=9132 - _globals['_TYPECODENOTFOUNDMSG']._serialized_start=9134 - _globals['_TYPECODENOTFOUNDMSG']._serialized_end=9251 + _globals['_SQLQUERYSTATUS']._serialized_end=4847 + _globals['_SQLQUERYSTATUSMSG']._serialized_start=4849 + _globals['_SQLQUERYSTATUSMSG']._serialized_end=4962 + _globals['_SQLCOMMIT']._serialized_start=4964 + _globals['_SQLCOMMIT']._serialized_end=5043 + _globals['_SQLCOMMITMSG']._serialized_start=5045 + _globals['_SQLCOMMITMSG']._serialized_end=5148 + _globals['_COLTYPECHANGE']._serialized_start=5150 + _globals['_COLTYPECHANGE']._serialized_end=5247 + _globals['_COLTYPECHANGEMSG']._serialized_start=5249 + _globals['_COLTYPECHANGEMSG']._serialized_end=5360 + _globals['_SCHEMACREATION']._serialized_start=5362 + _globals['_SCHEMACREATION']._serialized_end=5426 + _globals['_SCHEMACREATIONMSG']._serialized_start=5428 + _globals['_SCHEMACREATIONMSG']._serialized_end=5541 + _globals['_SCHEMADROP']._serialized_start=5543 + _globals['_SCHEMADROP']._serialized_end=5603 + _globals['_SCHEMADROPMSG']._serialized_start=5605 + _globals['_SCHEMADROPMSG']._serialized_end=5710 + _globals['_CACHEACTION']._serialized_start=5713 + _globals['_CACHEACTION']._serialized_end=5935 + _globals['_CACHEACTIONMSG']._serialized_start=5937 + _globals['_CACHEACTIONMSG']._serialized_end=6044 + _globals['_CACHEDUMPGRAPH']._serialized_start=6047 + _globals['_CACHEDUMPGRAPH']._serialized_end=6199 + _globals['_CACHEDUMPGRAPH_DUMPENTRY']._serialized_start=6156 + _globals['_CACHEDUMPGRAPH_DUMPENTRY']._serialized_end=6199 + _globals['_CACHEDUMPGRAPHMSG']._serialized_start=6201 + _globals['_CACHEDUMPGRAPHMSG']._serialized_end=6314 + _globals['_ADAPTERREGISTERED']._serialized_start=6316 + _globals['_ADAPTERREGISTERED']._serialized_end=6382 + _globals['_ADAPTERREGISTEREDMSG']._serialized_start=6384 + _globals['_ADAPTERREGISTEREDMSG']._serialized_end=6503 + _globals['_ADAPTERIMPORTERROR']._serialized_start=6505 + _globals['_ADAPTERIMPORTERROR']._serialized_end=6538 + _globals['_ADAPTERIMPORTERRORMSG']._serialized_start=6540 + _globals['_ADAPTERIMPORTERRORMSG']._serialized_end=6661 + _globals['_PLUGINLOADERROR']._serialized_start=6663 + _globals['_PLUGINLOADERROR']._serialized_end=6698 + _globals['_PLUGINLOADERRORMSG']._serialized_start=6700 + _globals['_PLUGINLOADERRORMSG']._serialized_end=6815 + _globals['_NEWCONNECTIONOPENING']._serialized_start=6817 + _globals['_NEWCONNECTIONOPENING']._serialized_end=6914 + _globals['_NEWCONNECTIONOPENINGMSG']._serialized_start=6916 + _globals['_NEWCONNECTIONOPENINGMSG']._serialized_end=7041 + _globals['_CODEEXECUTION']._serialized_start=7043 + _globals['_CODEEXECUTION']._serialized_end=7099 + _globals['_CODEEXECUTIONMSG']._serialized_start=7101 + _globals['_CODEEXECUTIONMSG']._serialized_end=7212 + _globals['_CODEEXECUTIONSTATUS']._serialized_start=7214 + _globals['_CODEEXECUTIONSTATUS']._serialized_end=7268 + _globals['_CODEEXECUTIONSTATUSMSG']._serialized_start=7270 + _globals['_CODEEXECUTIONSTATUSMSG']._serialized_end=7393 + _globals['_CATALOGGENERATIONERROR']._serialized_start=7395 + _globals['_CATALOGGENERATIONERROR']._serialized_end=7432 + _globals['_CATALOGGENERATIONERRORMSG']._serialized_start=7435 + _globals['_CATALOGGENERATIONERRORMSG']._serialized_end=7564 + _globals['_WRITECATALOGFAILURE']._serialized_start=7566 + _globals['_WRITECATALOGFAILURE']._serialized_end=7611 + _globals['_WRITECATALOGFAILUREMSG']._serialized_start=7613 + _globals['_WRITECATALOGFAILUREMSG']._serialized_end=7736 + _globals['_CATALOGWRITTEN']._serialized_start=7738 + _globals['_CATALOGWRITTEN']._serialized_end=7768 + _globals['_CATALOGWRITTENMSG']._serialized_start=7770 + _globals['_CATALOGWRITTENMSG']._serialized_end=7883 + _globals['_CANNOTGENERATEDOCS']._serialized_start=7885 + _globals['_CANNOTGENERATEDOCS']._serialized_end=7905 + _globals['_CANNOTGENERATEDOCSMSG']._serialized_start=7907 + _globals['_CANNOTGENERATEDOCSMSG']._serialized_end=8028 + _globals['_BUILDINGCATALOG']._serialized_start=8030 + _globals['_BUILDINGCATALOG']._serialized_end=8047 + _globals['_BUILDINGCATALOGMSG']._serialized_start=8049 + _globals['_BUILDINGCATALOGMSG']._serialized_end=8164 + _globals['_DATABASEERRORRUNNINGHOOK']._serialized_start=8166 + _globals['_DATABASEERRORRUNNINGHOOK']._serialized_end=8211 + _globals['_DATABASEERRORRUNNINGHOOKMSG']._serialized_start=8214 + _globals['_DATABASEERRORRUNNINGHOOKMSG']._serialized_end=8347 + _globals['_HOOKSRUNNING']._serialized_start=8349 + _globals['_HOOKSRUNNING']._serialized_end=8401 + _globals['_HOOKSRUNNINGMSG']._serialized_start=8403 + _globals['_HOOKSRUNNINGMSG']._serialized_end=8512 + _globals['_FINISHEDRUNNINGSTATS']._serialized_start=8514 + _globals['_FINISHEDRUNNINGSTATS']._serialized_end=8598 + _globals['_FINISHEDRUNNINGSTATSMSG']._serialized_start=8600 + _globals['_FINISHEDRUNNINGSTATSMSG']._serialized_end=8725 + _globals['_CONSTRAINTNOTENFORCED']._serialized_start=8727 + _globals['_CONSTRAINTNOTENFORCED']._serialized_end=8787 + _globals['_CONSTRAINTNOTENFORCEDMSG']._serialized_start=8789 + _globals['_CONSTRAINTNOTENFORCEDMSG']._serialized_end=8916 + _globals['_CONSTRAINTNOTSUPPORTED']._serialized_start=8918 + _globals['_CONSTRAINTNOTSUPPORTED']._serialized_end=8979 + _globals['_CONSTRAINTNOTSUPPORTEDMSG']._serialized_start=8982 + _globals['_CONSTRAINTNOTSUPPORTEDMSG']._serialized_end=9111 + _globals['_TYPECODENOTFOUND']._serialized_start=9113 + _globals['_TYPECODENOTFOUND']._serialized_end=9150 + _globals['_TYPECODENOTFOUNDMSG']._serialized_start=9152 + _globals['_TYPECODENOTFOUNDMSG']._serialized_end=9269 # @@protoc_insertion_point(module_scope) diff --git a/dbt/adapters/sql/connections.py b/dbt/adapters/sql/connections.py index 4d450c88..baccddc9 100644 --- a/dbt/adapters/sql/connections.py +++ b/dbt/adapters/sql/connections.py @@ -92,11 +92,14 @@ def add_query( cursor = connection.handle.cursor() cursor.execute(sql, bindings) + result = self.get_response(cursor) + fire_event( SQLQueryStatus( - status=str(self.get_response(cursor)), + status=str(result), elapsed=time.perf_counter() - pre, node_info=get_node_info(), + query_id=result.query_id, ) ) diff --git a/dbt/include/global_project/macros/materializations/models/incremental/incremental.sql b/dbt/include/global_project/macros/materializations/models/incremental/incremental.sql index f932751a..41d2de26 100644 --- a/dbt/include/global_project/macros/materializations/models/incremental/incremental.sql +++ b/dbt/include/global_project/macros/materializations/models/incremental/incremental.sql @@ -32,6 +32,9 @@ {% set to_drop = [] %} + {% set incremental_strategy = config.get('incremental_strategy') or 'default' %} + {% set strategy_sql_macro_func = adapter.get_incremental_strategy_macro(context, incremental_strategy) %} + {% if existing_relation is none %} {% set build_sql = get_create_table_as_sql(False, target_relation, sql) %} {% elif full_refresh_mode %} @@ -52,9 +55,7 @@ {% endif %} {#-- Get the incremental_strategy, the macro to use for the strategy, and build the sql --#} - {% set incremental_strategy = config.get('incremental_strategy') or 'default' %} {% set incremental_predicates = config.get('predicates', none) or config.get('incremental_predicates', none) %} - {% set strategy_sql_macro_func = adapter.get_incremental_strategy_macro(context, incremental_strategy) %} {% set strategy_arg_dict = ({'target_relation': target_relation, 'temp_relation': temp_relation, 'unique_key': unique_key, 'dest_columns': dest_columns, 'incremental_predicates': incremental_predicates }) %} {% set build_sql = strategy_sql_macro_func(strategy_arg_dict) %} diff --git a/dbt/include/global_project/macros/materializations/snapshots/helpers.sql b/dbt/include/global_project/macros/materializations/snapshots/helpers.sql index 8d982855..33492cc9 100644 --- a/dbt/include/global_project/macros/materializations/snapshots/helpers.sql +++ b/dbt/include/global_project/macros/materializations/snapshots/helpers.sql @@ -35,7 +35,7 @@ {% endmacro %} {% macro get_snapshot_table_column_names() %} - {{ return({'dbt_valid_to': 'dbt_valid_to', 'dbt_valid_from': 'dbt_valid_from', 'dbt_scd_id': 'dbt_scd_id', 'dbt_updated_at': 'dbt_updated_at'}) }} + {{ return({'dbt_valid_to': 'dbt_valid_to', 'dbt_valid_from': 'dbt_valid_from', 'dbt_scd_id': 'dbt_scd_id', 'dbt_updated_at': 'dbt_updated_at', 'dbt_is_deleted': 'dbt_is_deleted'}) }} {% endmacro %} {% macro default__snapshot_staging_table(strategy, source_sql, target_relation) -%} @@ -49,22 +49,24 @@ snapshotted_data as ( - select *, - {{ strategy.unique_key }} as dbt_unique_key - + select *, {{ unique_key_fields(strategy.unique_key) }} from {{ target_relation }} - where {{ columns.dbt_valid_to }} is null + where + {% if config.get('dbt_valid_to_current') %} + {# Check for either dbt_valid_to_current OR null, in order to correctly update records with nulls #} + ( {{ columns.dbt_valid_to }} = {{ config.get('dbt_valid_to_current') }} or {{ columns.dbt_valid_to }} is null) + {% else %} + {{ columns.dbt_valid_to }} is null + {% endif %} ), insertions_source_data as ( - select - *, - {{ strategy.unique_key }} as dbt_unique_key, + select *, {{ unique_key_fields(strategy.unique_key) }}, {{ strategy.updated_at }} as {{ columns.dbt_updated_at }}, {{ strategy.updated_at }} as {{ columns.dbt_valid_from }}, - nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as {{ columns.dbt_valid_to }}, + {{ get_dbt_valid_to_current(strategy, columns) }}, {{ strategy.scd_id }} as {{ columns.dbt_scd_id }} from snapshot_query @@ -72,9 +74,7 @@ updates_source_data as ( - select - *, - {{ strategy.unique_key }} as dbt_unique_key, + select *, {{ unique_key_fields(strategy.unique_key) }}, {{ strategy.updated_at }} as {{ columns.dbt_updated_at }}, {{ strategy.updated_at }} as {{ columns.dbt_valid_from }}, {{ strategy.updated_at }} as {{ columns.dbt_valid_to }} @@ -82,13 +82,11 @@ from snapshot_query ), - {%- if strategy.invalidate_hard_deletes %} + {%- if strategy.hard_deletes == 'invalidate' or strategy.hard_deletes == 'new_record' %} deletes_source_data as ( - select - *, - {{ strategy.unique_key }} as dbt_unique_key + select *, {{ unique_key_fields(strategy.unique_key) }} from snapshot_query ), {% endif %} @@ -98,15 +96,16 @@ select 'insert' as dbt_change_type, source_data.* + {%- if strategy.hard_deletes == 'new_record' -%} + ,'False' as {{ columns.dbt_is_deleted }} + {%- endif %} from insertions_source_data as source_data - left outer join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key - where snapshotted_data.dbt_unique_key is null - or ( - snapshotted_data.dbt_unique_key is not null - and ( - {{ strategy.row_changed }} - ) + left outer join snapshotted_data + on {{ unique_key_join_on(strategy.unique_key, "snapshotted_data", "source_data") }} + where {{ unique_key_is_null(strategy.unique_key, "snapshotted_data") }} + or ({{ unique_key_is_not_null(strategy.unique_key, "snapshotted_data") }} and ({{ strategy.row_changed }}) + ) ), @@ -117,17 +116,20 @@ 'update' as dbt_change_type, source_data.*, snapshotted_data.{{ columns.dbt_scd_id }} + {%- if strategy.hard_deletes == 'new_record' -%} + , snapshotted_data.{{ columns.dbt_is_deleted }} + {%- endif %} from updates_source_data as source_data - join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key + join snapshotted_data + on {{ unique_key_join_on(strategy.unique_key, "snapshotted_data", "source_data") }} where ( {{ strategy.row_changed }} ) ) - {%- if strategy.invalidate_hard_deletes -%} + {%- if strategy.hard_deletes == 'invalidate' or strategy.hard_deletes == 'new_record' %} , - deletes as ( select @@ -137,20 +139,57 @@ {{ snapshot_get_time() }} as {{ columns.dbt_updated_at }}, {{ snapshot_get_time() }} as {{ columns.dbt_valid_to }}, snapshotted_data.{{ columns.dbt_scd_id }} + {%- if strategy.hard_deletes == 'new_record' -%} + , snapshotted_data.{{ columns.dbt_is_deleted }} + {%- endif %} + from snapshotted_data + left join deletes_source_data as source_data + on {{ unique_key_join_on(strategy.unique_key, "snapshotted_data", "source_data") }} + where {{ unique_key_is_null(strategy.unique_key, "source_data") }} + ) + {%- endif %} + {%- if strategy.hard_deletes == 'new_record' %} + {% set source_sql_cols = get_column_schema_from_query(source_sql) %} + , + deletion_records as ( + + select + 'insert' as dbt_change_type, + {%- for col in source_sql_cols -%} + snapshotted_data.{{ adapter.quote(col.column) }}, + {% endfor -%} + {%- if strategy.unique_key | is_list -%} + {%- for key in strategy.unique_key -%} + snapshotted_data.{{ key }} as dbt_unique_key_{{ loop.index }}, + {% endfor -%} + {%- else -%} + snapshotted_data.dbt_unique_key as dbt_unique_key, + {% endif -%} + {{ snapshot_get_time() }} as {{ columns.dbt_valid_from }}, + {{ snapshot_get_time() }} as {{ columns.dbt_updated_at }}, + snapshotted_data.{{ columns.dbt_valid_to }} as {{ columns.dbt_valid_to }}, + snapshotted_data.{{ columns.dbt_scd_id }}, + 'True' as {{ columns.dbt_is_deleted }} from snapshotted_data - left join deletes_source_data as source_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key - where source_data.dbt_unique_key is null + left join deletes_source_data as source_data + on {{ unique_key_join_on(strategy.unique_key, "snapshotted_data", "source_data") }} + where {{ unique_key_is_null(strategy.unique_key, "source_data") }} ) {%- endif %} select * from insertions union all select * from updates - {%- if strategy.invalidate_hard_deletes %} + {%- if strategy.hard_deletes == 'invalidate' or strategy.hard_deletes == 'new_record' %} union all select * from deletes {%- endif %} + {%- if strategy.hard_deletes == 'new_record' %} + union all + select * from deletion_records + {%- endif %} + {%- endmacro %} @@ -166,7 +205,10 @@ {{ strategy.scd_id }} as {{ columns.dbt_scd_id }}, {{ strategy.updated_at }} as {{ columns.dbt_updated_at }}, {{ strategy.updated_at }} as {{ columns.dbt_valid_from }}, - nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as {{ columns.dbt_valid_to }} + {{ get_dbt_valid_to_current(strategy, columns) }} + {%- if strategy.hard_deletes == 'new_record' -%} + , 'False' as {{ columns.dbt_is_deleted }} + {% endif -%} from ( {{ sql }} ) sbq @@ -210,3 +252,52 @@ {% endif %} {% endif %} {% endmacro %} + + +{% macro get_dbt_valid_to_current(strategy, columns) %} + {% set dbt_valid_to_current = config.get('dbt_valid_to_current') or "null" %} + coalesce(nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}), {{dbt_valid_to_current}}) + as {{ columns.dbt_valid_to }} +{% endmacro %} + + +{% macro unique_key_fields(unique_key) %} + {% if unique_key | is_list %} + {% for key in unique_key %} + {{ key }} as dbt_unique_key_{{ loop.index }} + {%- if not loop.last %} , {%- endif %} + {% endfor %} + {% else %} + {{ unique_key }} as dbt_unique_key + {% endif %} +{% endmacro %} + + +{% macro unique_key_join_on(unique_key, identifier, from_identifier) %} + {% if unique_key | is_list %} + {% for key in unique_key %} + {{ identifier }}.dbt_unique_key_{{ loop.index }} = {{ from_identifier }}.dbt_unique_key_{{ loop.index }} + {%- if not loop.last %} and {%- endif %} + {% endfor %} + {% else %} + {{ identifier }}.dbt_unique_key = {{ from_identifier }}.dbt_unique_key + {% endif %} +{% endmacro %} + + +{% macro unique_key_is_null(unique_key, identifier) %} + {% if unique_key | is_list %} + {{ identifier }}.dbt_unique_key_1 is null + {% else %} + {{ identifier }}.dbt_unique_key is null + {% endif %} +{% endmacro %} + + +{% macro unique_key_is_not_null(unique_key, identifier) %} + {% if unique_key | is_list %} + {{ identifier }}.dbt_unique_key_1 is not null + {% else %} + {{ identifier }}.dbt_unique_key is not null + {% endif %} +{% endmacro %} diff --git a/dbt/include/global_project/macros/materializations/snapshots/snapshot.sql b/dbt/include/global_project/macros/materializations/snapshots/snapshot.sql index 5daead4c..683a0c58 100644 --- a/dbt/include/global_project/macros/materializations/snapshots/snapshot.sql +++ b/dbt/include/global_project/macros/materializations/snapshots/snapshot.sql @@ -37,7 +37,7 @@ {% set columns = config.get("snapshot_table_column_names") or get_snapshot_table_column_names() %} - {{ adapter.valid_snapshot_target(target_relation, columns) }} + {{ adapter.assert_valid_snapshot_target_given_strategy(target_relation, columns, strategy) }} {% set build_or_select_sql = snapshot_staging_table(strategy, sql, target_relation) %} {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %} @@ -46,20 +46,22 @@ {% do adapter.expand_target_column_types(from_relation=staging_table, to_relation=target_relation) %} + {% set remove_columns = ['dbt_change_type', 'DBT_CHANGE_TYPE', 'dbt_unique_key', 'DBT_UNIQUE_KEY'] %} + {% if unique_key | is_list %} + {% for key in strategy.unique_key %} + {{ remove_columns.append('dbt_unique_key_' + loop.index|string) }} + {{ remove_columns.append('DBT_UNIQUE_KEY_' + loop.index|string) }} + {% endfor %} + {% endif %} + {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation) - | rejectattr('name', 'equalto', 'dbt_change_type') - | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE') - | rejectattr('name', 'equalto', 'dbt_unique_key') - | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY') + | rejectattr('name', 'in', remove_columns) | list %} {% do create_columns(target_relation, missing_columns) %} {% set source_columns = adapter.get_columns_in_relation(staging_table) - | rejectattr('name', 'equalto', 'dbt_change_type') - | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE') - | rejectattr('name', 'equalto', 'dbt_unique_key') - | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY') + | rejectattr('name', 'in', remove_columns) | list %} {% set quoted_source_columns = [] %} diff --git a/dbt/include/global_project/macros/materializations/snapshots/snapshot_merge.sql b/dbt/include/global_project/macros/materializations/snapshots/snapshot_merge.sql index 74494ed2..cf787e4f 100644 --- a/dbt/include/global_project/macros/materializations/snapshots/snapshot_merge.sql +++ b/dbt/include/global_project/macros/materializations/snapshots/snapshot_merge.sql @@ -14,7 +14,12 @@ on DBT_INTERNAL_SOURCE.{{ columns.dbt_scd_id }} = DBT_INTERNAL_DEST.{{ columns.dbt_scd_id }} when matched - and DBT_INTERNAL_DEST.{{ columns.dbt_valid_to }} is null + {% if config.get("dbt_valid_to_current") %} + and (DBT_INTERNAL_DEST.{{ columns.dbt_valid_to }} = {{ config.get('dbt_valid_to_current') }} or + DBT_INTERNAL_DEST.{{ columns.dbt_valid_to }} is null) + {% else %} + and DBT_INTERNAL_DEST.{{ columns.dbt_valid_to }} is null + {% endif %} and DBT_INTERNAL_SOURCE.dbt_change_type in ('update', 'delete') then update set {{ columns.dbt_valid_to }} = DBT_INTERNAL_SOURCE.{{ columns.dbt_valid_to }} diff --git a/dbt/include/global_project/macros/materializations/snapshots/strategies.sql b/dbt/include/global_project/macros/materializations/snapshots/strategies.sql index 8c086182..49a381e8 100644 --- a/dbt/include/global_project/macros/materializations/snapshots/strategies.sql +++ b/dbt/include/global_project/macros/materializations/snapshots/strategies.sql @@ -54,7 +54,8 @@ {# The model_config parameter is no longer used, but is passed in anyway for compatibility. #} {% set primary_key = config.get('unique_key') %} {% set updated_at = config.get('updated_at') %} - {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes') or false %} + {% set hard_deletes = adapter.get_hard_deletes_behavior(config) %} + {% set invalidate_hard_deletes = hard_deletes == 'invalidate' %} {% set columns = config.get("snapshot_table_column_names") or get_snapshot_table_column_names() %} {#/* @@ -70,14 +71,16 @@ ({{ snapshotted_rel }}.{{ columns.dbt_valid_from }} < {{ current_rel }}.{{ updated_at }}) {%- endset %} - {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %} + {% set scd_args = api.Relation.scd_args(primary_key, updated_at) %} + {% set scd_id_expr = snapshot_hash_arguments(scd_args) %} {% do return({ "unique_key": primary_key, "updated_at": updated_at, "row_changed": row_changed_expr, "scd_id": scd_id_expr, - "invalidate_hard_deletes": invalidate_hard_deletes + "invalidate_hard_deletes": invalidate_hard_deletes, + "hard_deletes": hard_deletes }) %} {% endmacro %} @@ -140,7 +143,8 @@ {# The model_config parameter is no longer used, but is passed in anyway for compatibility. #} {% set check_cols_config = config.get('check_cols') %} {% set primary_key = config.get('unique_key') %} - {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes') or false %} + {% set hard_deletes = adapter.get_hard_deletes_behavior(config) %} + {% set invalidate_hard_deletes = hard_deletes == 'invalidate' %} {% set updated_at = config.get('updated_at') or snapshot_get_time() %} {% set column_added = false %} @@ -166,13 +170,15 @@ ) {%- endset %} - {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %} + {% set scd_args = api.Relation.scd_args(primary_key, updated_at) %} + {% set scd_id_expr = snapshot_hash_arguments(scd_args) %} {% do return({ "unique_key": primary_key, "updated_at": updated_at, "row_changed": row_changed_expr, "scd_id": scd_id_expr, - "invalidate_hard_deletes": invalidate_hard_deletes + "invalidate_hard_deletes": invalidate_hard_deletes, + "hard_deletes": hard_deletes }) %} {% endmacro %} diff --git a/pyproject.toml b/pyproject.toml index 6efffefe..47cd3ece 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ name = "dbt-adapters" description = "The set of adapter protocols and base functionality that supports integration with dbt-core" readme = "README.md" keywords = ["dbt", "adapter", "adapters", "database", "elt", "dbt-core", "dbt Core", "dbt Cloud", "dbt Labs"] -requires-python = ">=3.8.0" +requires-python = ">=3.9.0" authors = [ { name = "dbt Labs", email = "info@dbtlabs.com" }, ] @@ -17,20 +17,18 @@ classifiers = [ "Operating System :: MacOS :: MacOS X", "Operating System :: Microsoft :: Windows", "Operating System :: POSIX :: Linux", - "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", ] dependencies = [ - "dbt-common>=1.10,<2.0", - "dbt-config<1.0", + "dbt-common>=1.13,<2.0", "pytz>=2015.7", # installed via dbt-common but used directly "agate>=1.0,<2.0", - "mashumaro[msgpack]>=3.0,<4.0", - "protobuf>=3.0,<5.0", + "mashumaro[msgpack]>=3.9,<3.15", + "protobuf>=5.0,<6.0", "typing-extensions>=4.0,<5.0", ] [project.urls] @@ -55,7 +53,6 @@ include = ["dbt/adapters", "dbt/include", "dbt/__init__.py"] [tool.hatch.envs.default] dependencies = [ - "dbt-config @ git+https://github.com/dbt-labs/dbt-common.git@feature/externalCatalogConfig#subdirectory=config", "dbt_common @ git+https://github.com/dbt-labs/dbt-common.git", 'pre-commit==3.7.0;python_version>="3.9"', 'pre-commit==3.5.0;python_version=="3.8"', diff --git a/tests/unit/behavior_flag_tests/__init__.py b/tests/unit/behavior_flag_tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/unit/test_behavior_flags.py b/tests/unit/behavior_flag_tests/test_behavior_flags.py similarity index 83% rename from tests/unit/test_behavior_flags.py rename to tests/unit/behavior_flag_tests/test_behavior_flags.py index 7f3abb89..378d07bb 100644 --- a/tests/unit/test_behavior_flags.py +++ b/tests/unit/behavior_flag_tests/test_behavior_flags.py @@ -1,5 +1,6 @@ from typing import Any, Dict, List +from dbt.adapters.base.impl import DEFAULT_BASE_BEHAVIOR_FLAGS from dbt_common.behavior_flags import BehaviorFlag from dbt_common.exceptions import DbtBaseException import pytest @@ -64,3 +65,12 @@ def test_register_behavior_flags(adapter): assert not adapter.behavior.default_true_user_false_flag assert adapter.behavior.default_true_user_true_flag assert adapter.behavior.default_true_user_skip_flag + + +def test_behaviour_flags_property_empty(adapter_default_behaviour_flags): + assert adapter_default_behaviour_flags._behavior_flags == [] + + +def test_behavior_property_has_defaults(adapter_default_behaviour_flags): + for flag in DEFAULT_BASE_BEHAVIOR_FLAGS: + assert hasattr(adapter_default_behaviour_flags.behavior, flag["name"]) diff --git a/tests/unit/behavior_flag_tests/test_empty_project.py b/tests/unit/behavior_flag_tests/test_empty_project.py new file mode 100644 index 00000000..f9fd7a76 --- /dev/null +++ b/tests/unit/behavior_flag_tests/test_empty_project.py @@ -0,0 +1,87 @@ +from types import SimpleNamespace +from typing import Any, Dict, List + +from dbt_common.behavior_flags import BehaviorFlag +from dbt_common.exceptions import DbtBaseException +import pytest + +from dbt.adapters.contracts.connection import AdapterRequiredConfig, QueryComment + +from tests.unit.fixtures.credentials import CredentialsStub + + +@pytest.fixture +def flags() -> Dict[str, Any]: + return { + "unregistered_flag": True, + "default_false_user_false_flag": False, + "default_false_user_true_flag": True, + "default_true_user_false_flag": False, + "default_true_user_true_flag": True, + } + + +@pytest.fixture +def config(flags) -> AdapterRequiredConfig: + raw_config = { + "credentials": CredentialsStub("test_database", "test_schema"), + "profile_name": "test_profile", + "target_name": "test_target", + "threads": 4, + "project_name": "test_project", + "query_comment": QueryComment(), + "cli_vars": {}, + "target_path": "path/to/nowhere", + "log_cache_events": False, + } + return SimpleNamespace(**raw_config) + + +@pytest.fixture +def behavior_flags() -> List[BehaviorFlag]: + return [ + { + "name": "default_false_user_false_flag", + "default": False, + "docs_url": "https://docs.com", + }, + { + "name": "default_false_user_true_flag", + "default": False, + "description": "This is a false flag.", + }, + { + "name": "default_false_user_skip_flag", + "default": False, + "description": "This is a true flag.", + }, + { + "name": "default_true_user_false_flag", + "default": True, + "description": "This is fake news.", + }, + { + "name": "default_true_user_true_flag", + "default": True, + "docs_url": "https://moar.docs.com", + }, + { + "name": "default_true_user_skip_flag", + "default": True, + "description": "This is a true flag.", + }, + ] + + +def test_register_behavior_flags(adapter): + # make sure that users cannot add arbitrary flags to this collection + with pytest.raises(DbtBaseException): + assert adapter.behavior.unregistered_flag + + # check the values of the valid behavior flags + assert not adapter.behavior.default_false_user_false_flag + assert not adapter.behavior.default_false_user_true_flag + assert not adapter.behavior.default_false_user_skip_flag + assert adapter.behavior.default_true_user_false_flag + assert adapter.behavior.default_true_user_true_flag + assert adapter.behavior.default_true_user_skip_flag diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index 346634df..225bdf57 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -1 +1,7 @@ -from tests.unit.fixtures import adapter, behavior_flags, config, flags +from tests.unit.fixtures import ( + adapter, + adapter_default_behaviour_flags, + behavior_flags, + config, + flags, +) diff --git a/tests/unit/fixtures/__init__.py b/tests/unit/fixtures/__init__.py index 78135a2c..caa1448f 100644 --- a/tests/unit/fixtures/__init__.py +++ b/tests/unit/fixtures/__init__.py @@ -1 +1,7 @@ -from tests.unit.fixtures.adapter import adapter, behavior_flags, config, flags +from tests.unit.fixtures.adapter import ( + adapter, + adapter_default_behaviour_flags, + behavior_flags, + config, + flags, +) diff --git a/tests/unit/fixtures/adapter.py b/tests/unit/fixtures/adapter.py index b59b0423..3730a083 100644 --- a/tests/unit/fixtures/adapter.py +++ b/tests/unit/fixtures/adapter.py @@ -15,105 +15,110 @@ from tests.unit.fixtures.credentials import CredentialsStub -@pytest.fixture -def adapter(config, behavior_flags) -> BaseAdapter: +class BaseAdapterStub(BaseAdapter): + """ + A stub for an adapter that uses the cache as the database + """ + + ConnectionManager = ConnectionManagerStub + + ### + # Abstract methods for database-specific values, attributes, and types + ### + @classmethod + def date_function(cls) -> str: + return "date_function" + + @classmethod + def is_cancelable(cls) -> bool: + return False + + def list_schemas(self, database: str) -> List[str]: + return list(self.cache.schemas) + + ### + # Abstract methods about relations + ### + def drop_relation(self, relation: BaseRelation) -> None: + self.cache_dropped(relation) + + def truncate_relation(self, relation: BaseRelation) -> None: + self.cache_dropped(relation) + + def rename_relation(self, from_relation: BaseRelation, to_relation: BaseRelation) -> None: + self.cache_renamed(from_relation, to_relation) + + def get_columns_in_relation(self, relation: BaseRelation) -> List[Column]: + # there's no database, so these need to be added as kwargs in the existing_relations fixture + return relation.columns + + def expand_column_types(self, goal: BaseRelation, current: BaseRelation) -> None: + # there's no database, so these need to be added as kwargs in the existing_relations fixture + object.__setattr__(current, "columns", goal.columns) + + def list_relations_without_caching(self, schema_relation: BaseRelation) -> List[BaseRelation]: + # there's no database, so use the cache as the database + return self.cache.get_relations(schema_relation.database, schema_relation.schema) + + ### + # ODBC FUNCTIONS -- these should not need to change for every adapter, + # although some adapters may override them + ### + def create_schema(self, relation: BaseRelation): + # there's no database, this happens implicitly by adding a relation to the cache + pass + + def drop_schema(self, relation: BaseRelation): + for each_relation in self.cache.get_relations(relation.database, relation.schema): + self.cache_dropped(each_relation) + + @classmethod + def quote(cls, identifier: str) -> str: + quote_char = "" + return f"{quote_char}{identifier}{quote_char}" + + ### + # Conversions: These must be implemented by concrete implementations, for + # converting agate types into their sql equivalents. + ### + @classmethod + def convert_text_type(cls, agate_table: agate.Table, col_idx: int) -> str: + return "str" + + @classmethod + def convert_number_type(cls, agate_table: agate.Table, col_idx: int) -> str: + return "float" + + @classmethod + def convert_boolean_type(cls, agate_table: agate.Table, col_idx: int) -> str: + return "bool" + + @classmethod + def convert_datetime_type(cls, agate_table: agate.Table, col_idx: int) -> str: + return "datetime" + + @classmethod + def convert_date_type(cls, *args, **kwargs): + return "date" + + @classmethod + def convert_time_type(cls, *args, **kwargs): + return "time" - class BaseAdapterStub(BaseAdapter): - """ - A stub for an adapter that uses the cache as the database - """ - ConnectionManager = ConnectionManagerStub +@pytest.fixture +def adapter(config, behavior_flags) -> BaseAdapter: + class BaseAdapterBehaviourFlagStub(BaseAdapterStub): @property def _behavior_flags(self) -> List[BehaviorFlag]: return behavior_flags - ### - # Abstract methods for database-specific values, attributes, and types - ### - @classmethod - def date_function(cls) -> str: - return "date_function" - - @classmethod - def is_cancelable(cls) -> bool: - return False - - def list_schemas(self, database: str) -> List[str]: - return list(self.cache.schemas) - - ### - # Abstract methods about relations - ### - def drop_relation(self, relation: BaseRelation) -> None: - self.cache_dropped(relation) - - def truncate_relation(self, relation: BaseRelation) -> None: - self.cache_dropped(relation) - - def rename_relation(self, from_relation: BaseRelation, to_relation: BaseRelation) -> None: - self.cache_renamed(from_relation, to_relation) - - def get_columns_in_relation(self, relation: BaseRelation) -> List[Column]: - # there's no database, so these need to be added as kwargs in the existing_relations fixture - return relation.columns - - def expand_column_types(self, goal: BaseRelation, current: BaseRelation) -> None: - # there's no database, so these need to be added as kwargs in the existing_relations fixture - object.__setattr__(current, "columns", goal.columns) - - def list_relations_without_caching( - self, schema_relation: BaseRelation - ) -> List[BaseRelation]: - # there's no database, so use the cache as the database - return self.cache.get_relations(schema_relation.database, schema_relation.schema) - - ### - # ODBC FUNCTIONS -- these should not need to change for every adapter, - # although some adapters may override them - ### - def create_schema(self, relation: BaseRelation): - # there's no database, this happens implicitly by adding a relation to the cache - pass - - def drop_schema(self, relation: BaseRelation): - for each_relation in self.cache.get_relations(relation.database, relation.schema): - self.cache_dropped(each_relation) - - @classmethod - def quote(cls, identifier: str) -> str: - quote_char = "" - return f"{quote_char}{identifier}{quote_char}" - - ### - # Conversions: These must be implemented by concrete implementations, for - # converting agate types into their sql equivalents. - ### - @classmethod - def convert_text_type(cls, agate_table: agate.Table, col_idx: int) -> str: - return "str" - - @classmethod - def convert_number_type(cls, agate_table: agate.Table, col_idx: int) -> str: - return "float" - - @classmethod - def convert_boolean_type(cls, agate_table: agate.Table, col_idx: int) -> str: - return "bool" - - @classmethod - def convert_datetime_type(cls, agate_table: agate.Table, col_idx: int) -> str: - return "datetime" - - @classmethod - def convert_date_type(cls, *args, **kwargs): - return "date" - - @classmethod - def convert_time_type(cls, *args, **kwargs): - return "time" + return BaseAdapterBehaviourFlagStub(config, get_context("spawn")) + +@pytest.fixture +def adapter_default_behaviour_flags(config) -> BaseAdapter: return BaseAdapterStub(config, get_context("spawn")) diff --git a/tests/unit/test_adapter_telemetry.py b/tests/unit/test_adapter_telemetry.py new file mode 100644 index 00000000..1d5c4911 --- /dev/null +++ b/tests/unit/test_adapter_telemetry.py @@ -0,0 +1,15 @@ +import dbt.adapters.__about__ + +from dbt.adapters.base.impl import BaseAdapter +from dbt.adapters.base.relation import AdapterTrackingRelationInfo + + +def test_telemetry_returns(): + res = BaseAdapter.get_adapter_run_info({}) + + assert res.adapter_name == "base" + assert res.base_adapter_version == dbt.adapters.__about__.version + assert res.adapter_version == "" + assert res.model_adapter_details == {} + + assert type(res) is AdapterTrackingRelationInfo