From 2f2e0cee10094270a85c099b4035c4266b1ec926 Mon Sep 17 00:00:00 2001 From: colin-rogers-dbt <111200756+colin-rogers-dbt@users.noreply.github.com> Date: Tue, 23 Jan 2024 13:23:40 -0800 Subject: [PATCH] delete dbt/adapters and add dbt-adapters package (#9401) * delete dbt/adapters * update dbt-adapters requirement * fix dev-requirements.txt * update dev-requirements.txt * add changie --- .../Dependencies-20240123-105843.yaml | 6 + core/dbt/adapters/README.md | 30 - core/dbt/adapters/__init__.py | 7 - core/dbt/adapters/base/README.md | 10 - core/dbt/adapters/base/__init__.py | 19 - core/dbt/adapters/base/column.py | 161 -- core/dbt/adapters/base/connections.py | 445 ----- core/dbt/adapters/base/impl.py | 1649 ----------------- core/dbt/adapters/base/meta.py | 128 -- core/dbt/adapters/base/plugin.py | 33 - core/dbt/adapters/base/query_headers.py | 100 - core/dbt/adapters/base/relation.py | 459 ----- core/dbt/adapters/cache.py | 518 ------ core/dbt/adapters/capability.py | 52 - core/dbt/adapters/clients/__init__.py | 0 core/dbt/adapters/clients/jinja.py | 23 - core/dbt/adapters/contracts/__init__.py | 0 core/dbt/adapters/contracts/connection.py | 229 --- core/dbt/adapters/contracts/macros.py | 11 - core/dbt/adapters/contracts/relation.py | 129 -- core/dbt/adapters/events/README.md | 57 - core/dbt/adapters/events/__init__.py | 0 core/dbt/adapters/events/adapter_types.proto | 517 ------ core/dbt/adapters/events/adapter_types_pb2.py | 205 -- core/dbt/adapters/events/base_types.py | 39 - core/dbt/adapters/events/logging.py | 67 - core/dbt/adapters/events/types.py | 417 ----- core/dbt/adapters/exceptions/__init__.py | 5 - core/dbt/adapters/exceptions/alias.py | 24 - core/dbt/adapters/exceptions/cache.py | 68 - core/dbt/adapters/exceptions/compilation.py | 255 --- core/dbt/adapters/exceptions/connection.py | 16 - core/dbt/adapters/exceptions/database.py | 51 - core/dbt/adapters/factory.py | 239 --- .../include/global_project/__init__.py | 4 - .../include/global_project/dbt_project.yml | 7 - .../include/global_project/docs/overview.md | 43 - .../macros/adapters/apply_grants.sql | 167 -- .../macros/adapters/columns.sql | 137 -- .../macros/adapters/freshness.sql | 16 - .../macros/adapters/indexes.sql | 41 - .../macros/adapters/metadata.sql | 96 - .../macros/adapters/persist_docs.sql | 33 - .../macros/adapters/relation.sql | 79 - .../global_project/macros/adapters/schema.sql | 20 - .../global_project/macros/adapters/show.sql | 22 - .../macros/adapters/timestamps.sql | 44 - .../macros/adapters/validate_sql.sql | 10 - .../global_project/macros/etc/datetime.sql | 62 - .../global_project/macros/etc/statement.sql | 52 - .../generic_test_sql/accepted_values.sql | 27 - .../macros/generic_test_sql/not_null.sql | 9 - .../macros/generic_test_sql/relationships.sql | 23 - .../macros/generic_test_sql/unique.sql | 12 - .../get_custom_name/get_custom_alias.sql | 36 - .../get_custom_name/get_custom_database.sql | 32 - .../get_custom_name/get_custom_schema.sql | 60 - .../macros/materializations/configs.sql | 21 - .../macros/materializations/hooks.sql | 35 - .../models/clone/can_clone_table.sql | 7 - .../materializations/models/clone/clone.sql | 67 - .../models/clone/create_or_replace_clone.sql | 7 - .../models/incremental/column_helpers.sql | 80 - .../models/incremental/incremental.sql | 92 - .../models/incremental/is_incremental.sql | 13 - .../models/incremental/merge.sql | 131 -- .../models/incremental/on_schema_change.sql | 144 -- .../models/incremental/strategies.sql | 79 - .../models/materialized_view.sql | 121 -- .../macros/materializations/models/table.sql | 64 - .../macros/materializations/models/view.sql | 72 - .../macros/materializations/seeds/helpers.sql | 128 -- .../macros/materializations/seeds/seed.sql | 60 - .../materializations/snapshots/helpers.sql | 181 -- .../materializations/snapshots/snapshot.sql | 99 - .../snapshots/snapshot_merge.sql | 25 - .../materializations/snapshots/strategies.sql | 174 -- .../macros/materializations/tests/helpers.sql | 42 - .../macros/materializations/tests/test.sql | 60 - .../macros/materializations/tests/unit.sql | 29 - .../materializations/tests/where_subquery.sql | 15 - .../macros/python_model/python.sql | 103 - .../relations/column/columns_spec_ddl.sql | 89 - .../macros/relations/create.sql | 23 - .../macros/relations/create_backup.sql | 17 - .../macros/relations/create_intermediate.sql | 17 - .../global_project/macros/relations/drop.sql | 41 - .../macros/relations/drop_backup.sql | 14 - .../relations/materialized_view/alter.sql | 55 - .../relations/materialized_view/create.sql | 10 - .../relations/materialized_view/drop.sql | 14 - .../relations/materialized_view/refresh.sql | 9 - .../relations/materialized_view/rename.sql | 10 - .../relations/materialized_view/replace.sql | 10 - .../macros/relations/rename.sql | 35 - .../macros/relations/rename_intermediate.sql | 14 - .../macros/relations/replace.sql | 50 - .../macros/relations/schema.sql | 8 - .../macros/relations/table/create.sql | 60 - .../macros/relations/table/drop.sql | 14 - .../macros/relations/table/rename.sql | 10 - .../macros/relations/table/replace.sql | 10 - .../macros/relations/view/create.sql | 27 - .../macros/relations/view/drop.sql | 14 - .../macros/relations/view/rename.sql | 10 - .../macros/relations/view/replace.sql | 66 - .../macros/unit_test_sql/get_fixture_sql.sql | 76 - .../global_project/macros/utils/any_value.sql | 9 - .../macros/utils/array_append.sql | 8 - .../macros/utils/array_concat.sql | 7 - .../macros/utils/array_construct.sql | 12 - .../global_project/macros/utils/bool_or.sql | 9 - .../macros/utils/cast_bool_to_text.sql | 7 - .../global_project/macros/utils/concat.sql | 7 - .../macros/utils/data_types.sql | 129 -- .../macros/utils/date_spine.sql | 75 - .../macros/utils/date_trunc.sql | 7 - .../global_project/macros/utils/dateadd.sql | 14 - .../global_project/macros/utils/datediff.sql | 14 - .../macros/utils/escape_single_quotes.sql | 8 - .../global_project/macros/utils/except.sql | 9 - .../macros/utils/generate_series.sql | 53 - .../global_project/macros/utils/hash.sql | 7 - .../global_project/macros/utils/intersect.sql | 9 - .../global_project/macros/utils/last_day.sql | 15 - .../global_project/macros/utils/length.sql | 11 - .../global_project/macros/utils/listagg.sql | 30 - .../global_project/macros/utils/literal.sql | 7 - .../global_project/macros/utils/position.sql | 11 - .../global_project/macros/utils/replace.sql | 14 - .../global_project/macros/utils/right.sql | 12 - .../global_project/macros/utils/safe_cast.sql | 9 - .../macros/utils/split_part.sql | 26 - .../global_project/tests/generic/builtin.sql | 30 - core/dbt/adapters/protocol.py | 166 -- core/dbt/adapters/reference_keys.py | 37 - core/dbt/adapters/relation_configs/README.md | 25 - .../dbt/adapters/relation_configs/__init__.py | 12 - .../adapters/relation_configs/config_base.py | 44 - .../relation_configs/config_change.py | 23 - .../relation_configs/config_validation.py | 57 - core/dbt/adapters/sql/__init__.py | 3 - core/dbt/adapters/sql/connections.py | 184 -- core/dbt/adapters/sql/impl.py | 274 --- core/dbt/adapters/utils.py | 68 - core/dbt/context/macro_resolver.py | 2 +- core/dbt/context/macros.py | 2 +- core/dbt/task/init.py | 2 +- core/setup.py | 1 + dev-requirements.txt | 1 + .../dbt/adapters/postgres/connections.py | 3 +- tests/adapter/dbt/__init__.py | 3 + tests/unit/test_adapter_factory.py | 2 +- 153 files changed, 16 insertions(+), 10995 deletions(-) create mode 100644 .changes/unreleased/Dependencies-20240123-105843.yaml delete mode 100644 core/dbt/adapters/README.md delete mode 100644 core/dbt/adapters/__init__.py delete mode 100644 core/dbt/adapters/base/README.md delete mode 100644 core/dbt/adapters/base/__init__.py delete mode 100644 core/dbt/adapters/base/column.py delete mode 100644 core/dbt/adapters/base/connections.py delete mode 100644 core/dbt/adapters/base/impl.py delete mode 100644 core/dbt/adapters/base/meta.py delete mode 100644 core/dbt/adapters/base/plugin.py delete mode 100644 core/dbt/adapters/base/query_headers.py delete mode 100644 core/dbt/adapters/base/relation.py delete mode 100644 core/dbt/adapters/cache.py delete mode 100644 core/dbt/adapters/capability.py delete mode 100644 core/dbt/adapters/clients/__init__.py delete mode 100644 core/dbt/adapters/clients/jinja.py delete mode 100644 core/dbt/adapters/contracts/__init__.py delete mode 100644 core/dbt/adapters/contracts/connection.py delete mode 100644 core/dbt/adapters/contracts/macros.py delete mode 100644 core/dbt/adapters/contracts/relation.py delete mode 100644 core/dbt/adapters/events/README.md delete mode 100644 core/dbt/adapters/events/__init__.py delete mode 100644 core/dbt/adapters/events/adapter_types.proto delete mode 100644 core/dbt/adapters/events/adapter_types_pb2.py delete mode 100644 core/dbt/adapters/events/base_types.py delete mode 100644 core/dbt/adapters/events/logging.py delete mode 100644 core/dbt/adapters/events/types.py delete mode 100644 core/dbt/adapters/exceptions/__init__.py delete mode 100644 core/dbt/adapters/exceptions/alias.py delete mode 100644 core/dbt/adapters/exceptions/cache.py delete mode 100644 core/dbt/adapters/exceptions/compilation.py delete mode 100644 core/dbt/adapters/exceptions/connection.py delete mode 100644 core/dbt/adapters/exceptions/database.py delete mode 100644 core/dbt/adapters/factory.py delete mode 100644 core/dbt/adapters/include/global_project/__init__.py delete mode 100644 core/dbt/adapters/include/global_project/dbt_project.yml delete mode 100644 core/dbt/adapters/include/global_project/docs/overview.md delete mode 100644 core/dbt/adapters/include/global_project/macros/adapters/apply_grants.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/adapters/columns.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/adapters/freshness.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/adapters/indexes.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/adapters/metadata.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/adapters/persist_docs.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/adapters/relation.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/adapters/schema.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/adapters/show.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/adapters/timestamps.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/adapters/validate_sql.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/etc/datetime.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/etc/statement.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/generic_test_sql/accepted_values.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/generic_test_sql/not_null.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/generic_test_sql/relationships.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/generic_test_sql/unique.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/get_custom_name/get_custom_alias.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/get_custom_name/get_custom_database.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/get_custom_name/get_custom_schema.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/materializations/configs.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/materializations/hooks.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/materializations/models/clone/can_clone_table.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/materializations/models/clone/clone.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/materializations/models/clone/create_or_replace_clone.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/materializations/models/incremental/column_helpers.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/materializations/models/incremental/incremental.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/materializations/models/incremental/is_incremental.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/materializations/models/incremental/merge.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/materializations/models/incremental/on_schema_change.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/materializations/models/incremental/strategies.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/materializations/models/materialized_view.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/materializations/models/table.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/materializations/models/view.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/materializations/seeds/helpers.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/materializations/seeds/seed.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/materializations/snapshots/helpers.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/materializations/snapshots/snapshot.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/materializations/snapshots/snapshot_merge.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/materializations/snapshots/strategies.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/materializations/tests/helpers.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/materializations/tests/test.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/materializations/tests/unit.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/materializations/tests/where_subquery.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/python_model/python.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/relations/column/columns_spec_ddl.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/relations/create.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/relations/create_backup.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/relations/create_intermediate.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/relations/drop.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/relations/drop_backup.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/relations/materialized_view/alter.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/relations/materialized_view/create.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/relations/materialized_view/drop.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/relations/materialized_view/refresh.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/relations/materialized_view/rename.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/relations/materialized_view/replace.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/relations/rename.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/relations/rename_intermediate.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/relations/replace.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/relations/schema.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/relations/table/create.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/relations/table/drop.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/relations/table/rename.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/relations/table/replace.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/relations/view/create.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/relations/view/drop.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/relations/view/rename.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/relations/view/replace.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/unit_test_sql/get_fixture_sql.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/utils/any_value.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/utils/array_append.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/utils/array_concat.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/utils/array_construct.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/utils/bool_or.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/utils/cast_bool_to_text.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/utils/concat.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/utils/data_types.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/utils/date_spine.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/utils/date_trunc.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/utils/dateadd.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/utils/datediff.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/utils/escape_single_quotes.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/utils/except.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/utils/generate_series.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/utils/hash.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/utils/intersect.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/utils/last_day.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/utils/length.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/utils/listagg.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/utils/literal.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/utils/position.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/utils/replace.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/utils/right.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/utils/safe_cast.sql delete mode 100644 core/dbt/adapters/include/global_project/macros/utils/split_part.sql delete mode 100644 core/dbt/adapters/include/global_project/tests/generic/builtin.sql delete mode 100644 core/dbt/adapters/protocol.py delete mode 100644 core/dbt/adapters/reference_keys.py delete mode 100644 core/dbt/adapters/relation_configs/README.md delete mode 100644 core/dbt/adapters/relation_configs/__init__.py delete mode 100644 core/dbt/adapters/relation_configs/config_base.py delete mode 100644 core/dbt/adapters/relation_configs/config_change.py delete mode 100644 core/dbt/adapters/relation_configs/config_validation.py delete mode 100644 core/dbt/adapters/sql/__init__.py delete mode 100644 core/dbt/adapters/sql/connections.py delete mode 100644 core/dbt/adapters/sql/impl.py delete mode 100644 core/dbt/adapters/utils.py create mode 100644 tests/adapter/dbt/__init__.py diff --git a/.changes/unreleased/Dependencies-20240123-105843.yaml b/.changes/unreleased/Dependencies-20240123-105843.yaml new file mode 100644 index 00000000000..94fd865e0d0 --- /dev/null +++ b/.changes/unreleased/Dependencies-20240123-105843.yaml @@ -0,0 +1,6 @@ +kind: Dependencies +body: remove dbt/adapters and add dependency on dbt-adapters +time: 2024-01-23T10:58:43.286952-08:00 +custom: + Author: colin-rogers-dbt + PR: "9430" diff --git a/core/dbt/adapters/README.md b/core/dbt/adapters/README.md deleted file mode 100644 index 20ae9e7a56a..00000000000 --- a/core/dbt/adapters/README.md +++ /dev/null @@ -1,30 +0,0 @@ -# Adapters README - -The Adapters module is responsible for defining database connection methods, caching information from databases, how relations are defined, and the two major connection types we have - base and sql. - -# Directories - -## `base` - -Defines the base implementation Adapters can use to build out full functionality. - -## `sql` - -Defines a sql implementation for adapters that initially inherits the above base implementation and comes with some premade methods and macros that can be overwritten as needed per adapter. (most common type of adapter.) - -# Files - -## `cache.py` - -Cached information from the database. - -## `factory.py` -Defines how we generate adapter objects - -## `protocol.py` - -Defines various interfaces for various adapter objects. Helps mypy correctly resolve methods. - -## `reference_keys.py` - -Configures naming scheme for cache elements to be universal. diff --git a/core/dbt/adapters/__init__.py b/core/dbt/adapters/__init__.py deleted file mode 100644 index e52cc72d2cd..00000000000 --- a/core/dbt/adapters/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -# N.B. -# This will add to the package’s __path__ all subdirectories of directories on sys.path named after the package which effectively combines both modules into a single namespace (dbt.adapters) -# The matching statement is in plugins/postgres/dbt/adapters/__init__.py - -from pkgutil import extend_path - -__path__ = extend_path(__path__, __name__) diff --git a/core/dbt/adapters/base/README.md b/core/dbt/adapters/base/README.md deleted file mode 100644 index d3fa85d71a8..00000000000 --- a/core/dbt/adapters/base/README.md +++ /dev/null @@ -1,10 +0,0 @@ - -## Base adapters - -### impl.py - -The class `SQLAdapter` in [base/imply.py](https://github.com/dbt-labs/dbt-core/blob/main/core/dbt/adapters/base/impl.py) is a (mostly) abstract object that adapter objects inherit from. The base class scaffolds out methods that every adapter project usually should implement for smooth communication between dbt and database. - -Some target databases require more or fewer methods--it all depends on what the warehouse's featureset is. - -Look into the class for function-level comments. diff --git a/core/dbt/adapters/base/__init__.py b/core/dbt/adapters/base/__init__.py deleted file mode 100644 index 41c253efd87..00000000000 --- a/core/dbt/adapters/base/__init__.py +++ /dev/null @@ -1,19 +0,0 @@ -# these are all just exports, #noqa them so flake8 will be happy - -# TODO: Should we still include this in the `adapters` namespace? -from dbt.adapters.contracts.connection import Credentials # noqa: F401 -from dbt.adapters.base.meta import available # noqa: F401 -from dbt.adapters.base.connections import BaseConnectionManager # noqa: F401 -from dbt.adapters.base.relation import ( # noqa: F401 - BaseRelation, - RelationType, - SchemaSearchMap, -) -from dbt.adapters.base.column import Column # noqa: F401 -from dbt.adapters.base.impl import ( # noqa: F401 - AdapterConfig, - BaseAdapter, - PythonJobHelper, - ConstraintSupport, -) -from dbt.adapters.base.plugin import AdapterPlugin # noqa: F401 diff --git a/core/dbt/adapters/base/column.py b/core/dbt/adapters/base/column.py deleted file mode 100644 index 72e5576d408..00000000000 --- a/core/dbt/adapters/base/column.py +++ /dev/null @@ -1,161 +0,0 @@ -from dataclasses import dataclass -import re -from typing import Dict, ClassVar, Any, Optional - -from dbt_common.exceptions import DbtRuntimeError - - -@dataclass -class Column: - # Note: This is automatically used by contract code - # No-op conversions (INTEGER => INT) have been removed. - # Any adapter that wants to take advantage of "translate_type" - # should create a ClassVar with the appropriate conversions. - TYPE_LABELS: ClassVar[Dict[str, str]] = { - "STRING": "TEXT", - } - column: str - dtype: str - char_size: Optional[int] = None - numeric_precision: Optional[Any] = None - numeric_scale: Optional[Any] = None - - @classmethod - def translate_type(cls, dtype: str) -> str: - return cls.TYPE_LABELS.get(dtype.upper(), dtype) - - @classmethod - def create(cls, name, label_or_dtype: str) -> "Column": - column_type = cls.translate_type(label_or_dtype) - return cls(name, column_type) - - @property - def name(self) -> str: - return self.column - - @property - def quoted(self) -> str: - return '"{}"'.format(self.column) - - @property - def data_type(self) -> str: - if self.is_string(): - return self.string_type(self.string_size()) - elif self.is_numeric(): - return self.numeric_type(self.dtype, self.numeric_precision, self.numeric_scale) - else: - return self.dtype - - def is_string(self) -> bool: - return self.dtype.lower() in ["text", "character varying", "character", "varchar"] - - def is_number(self): - return any([self.is_integer(), self.is_numeric(), self.is_float()]) - - def is_float(self): - return self.dtype.lower() in [ - # floats - "real", - "float4", - "float", - "double precision", - "float8", - "double", - ] - - def is_integer(self) -> bool: - return self.dtype.lower() in [ - # real types - "smallint", - "integer", - "bigint", - "smallserial", - "serial", - "bigserial", - # aliases - "int2", - "int4", - "int8", - "serial2", - "serial4", - "serial8", - ] - - def is_numeric(self) -> bool: - return self.dtype.lower() in ["numeric", "decimal"] - - def string_size(self) -> int: - if not self.is_string(): - raise DbtRuntimeError("Called string_size() on non-string field!") - - if self.dtype == "text" or self.char_size is None: - # char_size should never be None. Handle it reasonably just in case - return 256 - else: - return int(self.char_size) - - def can_expand_to(self, other_column: "Column") -> bool: - """returns True if this column can be expanded to the size of the - other column""" - if not self.is_string() or not other_column.is_string(): - return False - - return other_column.string_size() > self.string_size() - - def literal(self, value: Any) -> str: - return "{}::{}".format(value, self.data_type) - - @classmethod - def string_type(cls, size: int) -> str: - return "character varying({})".format(size) - - @classmethod - def numeric_type(cls, dtype: str, precision: Any, scale: Any) -> str: - # This could be decimal(...), numeric(...), number(...) - # Just use whatever was fed in here -- don't try to get too clever - if precision is None or scale is None: - return dtype - else: - return "{}({},{})".format(dtype, precision, scale) - - def __repr__(self) -> str: - return "".format(self.name, self.data_type) - - @classmethod - def from_description(cls, name: str, raw_data_type: str) -> "Column": - match = re.match(r"([^(]+)(\([^)]+\))?", raw_data_type) - if match is None: - raise DbtRuntimeError(f'Could not interpret data type "{raw_data_type}"') - data_type, size_info = match.groups() - char_size = None - numeric_precision = None - numeric_scale = None - if size_info is not None: - # strip out the parentheses - size_info = size_info[1:-1] - parts = size_info.split(",") - if len(parts) == 1: - try: - char_size = int(parts[0]) - except ValueError: - raise DbtRuntimeError( - f'Could not interpret data_type "{raw_data_type}": ' - f'could not convert "{parts[0]}" to an integer' - ) - elif len(parts) == 2: - try: - numeric_precision = int(parts[0]) - except ValueError: - raise DbtRuntimeError( - f'Could not interpret data_type "{raw_data_type}": ' - f'could not convert "{parts[0]}" to an integer' - ) - try: - numeric_scale = int(parts[1]) - except ValueError: - raise DbtRuntimeError( - f'Could not interpret data_type "{raw_data_type}": ' - f'could not convert "{parts[1]}" to an integer' - ) - - return cls(name, data_type, char_size, numeric_precision, numeric_scale) diff --git a/core/dbt/adapters/base/connections.py b/core/dbt/adapters/base/connections.py deleted file mode 100644 index cdb3f7467e1..00000000000 --- a/core/dbt/adapters/base/connections.py +++ /dev/null @@ -1,445 +0,0 @@ -import abc -import os -from time import sleep -import sys -import traceback - -# multiprocessing.RLock is a function returning this type -from multiprocessing.synchronize import RLock -from multiprocessing.context import SpawnContext -from threading import get_ident -from typing import ( - Any, - Dict, - Tuple, - Hashable, - Optional, - ContextManager, - List, - Type, - Union, - Iterable, - Callable, -) - -import agate - -import dbt.adapters.exceptions -import dbt_common.exceptions.base -from dbt.adapters.contracts.connection import ( - Connection, - Identifier, - ConnectionState, - AdapterRequiredConfig, - LazyHandle, - AdapterResponse, -) -from dbt.adapters.base.query_headers import ( - MacroQueryStringSetter, -) -from dbt.adapters.events.logging import AdapterLogger -from dbt_common.events.functions import fire_event -from dbt.adapters.events.types import ( - NewConnection, - ConnectionReused, - ConnectionLeftOpenInCleanup, - ConnectionLeftOpen, - ConnectionClosedInCleanup, - ConnectionClosed, - Rollback, - RollbackFailed, -) -from dbt_common.events.contextvars import get_node_info -from dbt_common.utils import cast_to_str - -SleepTime = Union[int, float] # As taken by time.sleep. -AdapterHandle = Any # Adapter connection handle objects can be any class. - - -class BaseConnectionManager(metaclass=abc.ABCMeta): - """Methods to implement: - - exception_handler - - cancel_open - - open - - begin - - commit - - clear_transaction - - execute - - You must also set the 'TYPE' class attribute with a class-unique constant - string. - """ - - TYPE: str = NotImplemented - - def __init__(self, profile: AdapterRequiredConfig, mp_context: SpawnContext) -> None: - self.profile = profile - self.thread_connections: Dict[Hashable, Connection] = {} - self.lock: RLock = mp_context.RLock() - self.query_header: Optional[MacroQueryStringSetter] = None - - def set_query_header(self, query_header_context: Dict[str, Any]) -> None: - self.query_header = MacroQueryStringSetter(self.profile, query_header_context) - - @staticmethod - def get_thread_identifier() -> Hashable: - # note that get_ident() may be re-used, but we should never experience - # that within a single process - return (os.getpid(), get_ident()) - - def get_thread_connection(self) -> Connection: - key = self.get_thread_identifier() - with self.lock: - if key not in self.thread_connections: - raise dbt.adapters.exceptions.InvalidConnectionError( - key, list(self.thread_connections) - ) - return self.thread_connections[key] - - def set_thread_connection(self, conn: Connection) -> None: - key = self.get_thread_identifier() - if key in self.thread_connections: - raise dbt_common.exceptions.DbtInternalError( - "In set_thread_connection, existing connection exists for {}" - ) - self.thread_connections[key] = conn - - def get_if_exists(self) -> Optional[Connection]: - key = self.get_thread_identifier() - with self.lock: - return self.thread_connections.get(key) - - def clear_thread_connection(self) -> None: - key = self.get_thread_identifier() - with self.lock: - if key in self.thread_connections: - del self.thread_connections[key] - - def clear_transaction(self) -> None: - """Clear any existing transactions.""" - conn = self.get_thread_connection() - if conn is not None: - if conn.transaction_open: - self._rollback(conn) - self.begin() - self.commit() - - def rollback_if_open(self) -> None: - conn = self.get_if_exists() - if conn is not None and conn.handle and conn.transaction_open: - self._rollback(conn) - - @abc.abstractmethod - def exception_handler(self, sql: str) -> ContextManager: - """Create a context manager that handles exceptions caused by database - interactions. - - :param str sql: The SQL string that the block inside the context - manager is executing. - :return: A context manager that handles exceptions raised by the - underlying database. - """ - raise dbt_common.exceptions.base.NotImplementedError( - "`exception_handler` is not implemented for this adapter!" - ) - - def set_connection_name(self, name: Optional[str] = None) -> Connection: - """Called by 'acquire_connection' in BaseAdapter, which is called by - 'connection_named'. - Creates a connection for this thread if one doesn't already - exist, and will rename an existing connection.""" - - conn_name: str = "master" if name is None else name - - # Get a connection for this thread - conn = self.get_if_exists() - - if conn and conn.name == conn_name and conn.state == "open": - # Found a connection and nothing to do, so just return it - return conn - - if conn is None: - # Create a new connection - conn = Connection( - type=Identifier(self.TYPE), - name=conn_name, - state=ConnectionState.INIT, - transaction_open=False, - handle=None, - credentials=self.profile.credentials, - ) - conn.handle = LazyHandle(self.open) - # Add the connection to thread_connections for this thread - self.set_thread_connection(conn) - fire_event( - NewConnection(conn_name=conn_name, conn_type=self.TYPE, node_info=get_node_info()) - ) - else: # existing connection either wasn't open or didn't have the right name - if conn.state != "open": - conn.handle = LazyHandle(self.open) - if conn.name != conn_name: - orig_conn_name: str = conn.name or "" - conn.name = conn_name - fire_event(ConnectionReused(orig_conn_name=orig_conn_name, conn_name=conn_name)) - - return conn - - @classmethod - def retry_connection( - cls, - connection: Connection, - connect: Callable[[], AdapterHandle], - logger: AdapterLogger, - retryable_exceptions: Iterable[Type[Exception]], - retry_limit: int = 1, - retry_timeout: Union[Callable[[int], SleepTime], SleepTime] = 1, - _attempts: int = 0, - ) -> Connection: - """Given a Connection, set its handle by calling connect. - - The calls to connect will be retried up to retry_limit times to deal with transient - connection errors. By default, one retry will be attempted if retryable_exceptions is set. - - :param Connection connection: An instance of a Connection that needs a handle to be set, - usually when attempting to open it. - :param connect: A callable that returns the appropiate connection handle for a - given adapter. This callable will be retried retry_limit times if a subclass of any - Exception in retryable_exceptions is raised by connect. - :type connect: Callable[[], AdapterHandle] - :param AdapterLogger logger: A logger to emit messages on retry attempts or errors. When - handling expected errors, we call debug, and call warning on unexpected errors or when - all retry attempts have been exhausted. - :param retryable_exceptions: An iterable of exception classes that if raised by - connect should trigger a retry. - :type retryable_exceptions: Iterable[Type[Exception]] - :param int retry_limit: How many times to retry the call to connect. If this limit - is exceeded before a successful call, a FailedToConnectError will be raised. - Must be non-negative. - :param retry_timeout: Time to wait between attempts to connect. Can also take a - Callable that takes the number of attempts so far, beginning at 0, and returns an int - or float to be passed to time.sleep. - :type retry_timeout: Union[Callable[[int], SleepTime], SleepTime] = 1 - :param int _attempts: Parameter used to keep track of the number of attempts in calling the - connect function across recursive calls. Passed as an argument to retry_timeout if it - is a Callable. This parameter should not be set by the initial caller. - :raises dbt.adapters.exceptions.FailedToConnectError: Upon exhausting all retry attempts without - successfully acquiring a handle. - :return: The given connection with its appropriate state and handle attributes set - depending on whether we successfully acquired a handle or not. - """ - timeout = retry_timeout(_attempts) if callable(retry_timeout) else retry_timeout - if timeout < 0: - raise dbt.adapters.exceptions.FailedToConnectError( - "retry_timeout cannot be negative or return a negative time." - ) - - if retry_limit < 0 or retry_limit > sys.getrecursionlimit(): - # This guard is not perfect others may add to the recursion limit (e.g. built-ins). - connection.handle = None - connection.state = ConnectionState.FAIL - raise dbt.adapters.exceptions.FailedToConnectError("retry_limit cannot be negative") - - try: - connection.handle = connect() - connection.state = ConnectionState.OPEN - return connection - - except tuple(retryable_exceptions) as e: - if retry_limit <= 0: - connection.handle = None - connection.state = ConnectionState.FAIL - raise dbt.adapters.exceptions.FailedToConnectError(str(e)) - - logger.debug( - f"Got a retryable error when attempting to open a {cls.TYPE} connection.\n" - f"{retry_limit} attempts remaining. Retrying in {timeout} seconds.\n" - f"Error:\n{e}" - ) - - sleep(timeout) - return cls.retry_connection( - connection=connection, - connect=connect, - logger=logger, - retry_limit=retry_limit - 1, - retry_timeout=retry_timeout, - retryable_exceptions=retryable_exceptions, - _attempts=_attempts + 1, - ) - - except Exception as e: - connection.handle = None - connection.state = ConnectionState.FAIL - raise dbt.adapters.exceptions.FailedToConnectError(str(e)) - - @abc.abstractmethod - def cancel_open(self) -> Optional[List[str]]: - """Cancel all open connections on the adapter. (passable)""" - raise dbt_common.exceptions.base.NotImplementedError( - "`cancel_open` is not implemented for this adapter!" - ) - - @classmethod - @abc.abstractmethod - def open(cls, connection: Connection) -> Connection: - """Open the given connection on the adapter and return it. - - This may mutate the given connection (in particular, its state and its - handle). - - This should be thread-safe, or hold the lock if necessary. The given - connection should not be in either in_use or available. - """ - raise dbt_common.exceptions.base.NotImplementedError( - "`open` is not implemented for this adapter!" - ) - - def release(self) -> None: - with self.lock: - conn = self.get_if_exists() - if conn is None: - return - - try: - # always close the connection. close() calls _rollback() if there - # is an open transaction - self.close(conn) - except Exception: - # if rollback or close failed, remove our busted connection - self.clear_thread_connection() - raise - - def cleanup_all(self) -> None: - with self.lock: - for connection in self.thread_connections.values(): - if connection.state not in {"closed", "init"}: - fire_event(ConnectionLeftOpenInCleanup(conn_name=cast_to_str(connection.name))) - else: - fire_event(ConnectionClosedInCleanup(conn_name=cast_to_str(connection.name))) - self.close(connection) - - # garbage collect these connections - self.thread_connections.clear() - - @abc.abstractmethod - def begin(self) -> None: - """Begin a transaction. (passable)""" - raise dbt_common.exceptions.base.NotImplementedError( - "`begin` is not implemented for this adapter!" - ) - - @abc.abstractmethod - def commit(self) -> None: - """Commit a transaction. (passable)""" - raise dbt_common.exceptions.base.NotImplementedError( - "`commit` is not implemented for this adapter!" - ) - - @classmethod - def _rollback_handle(cls, connection: Connection) -> None: - """Perform the actual rollback operation.""" - try: - connection.handle.rollback() - except Exception: - fire_event( - RollbackFailed( - conn_name=cast_to_str(connection.name), - exc_info=traceback.format_exc(), - node_info=get_node_info(), - ) - ) - - @classmethod - def _close_handle(cls, connection: Connection) -> None: - """Perform the actual close operation.""" - # On windows, sometimes connection handles don't have a close() attr. - if hasattr(connection.handle, "close"): - fire_event( - ConnectionClosed(conn_name=cast_to_str(connection.name), node_info=get_node_info()) - ) - connection.handle.close() - else: - fire_event( - ConnectionLeftOpen( - conn_name=cast_to_str(connection.name), node_info=get_node_info() - ) - ) - - @classmethod - def _rollback(cls, connection: Connection) -> None: - """Roll back the given connection.""" - if connection.transaction_open is False: - raise dbt_common.exceptions.DbtInternalError( - f"Tried to rollback transaction on connection " - f'"{connection.name}", but it does not have one open!' - ) - - fire_event(Rollback(conn_name=cast_to_str(connection.name), node_info=get_node_info())) - cls._rollback_handle(connection) - - connection.transaction_open = False - - @classmethod - def close(cls, connection: Connection) -> Connection: - # if the connection is in closed or init, there's nothing to do - if connection.state in {ConnectionState.CLOSED, ConnectionState.INIT}: - return connection - - if connection.transaction_open and connection.handle: - fire_event(Rollback(conn_name=cast_to_str(connection.name), node_info=get_node_info())) - cls._rollback_handle(connection) - connection.transaction_open = False - - cls._close_handle(connection) - connection.state = ConnectionState.CLOSED - - return connection - - def commit_if_has_connection(self) -> None: - """If the named connection exists, commit the current transaction.""" - connection = self.get_if_exists() - if connection: - self.commit() - - def _add_query_comment(self, sql: str) -> str: - if self.query_header is None: - return sql - return self.query_header.add(sql) - - @abc.abstractmethod - def execute( - self, sql: str, auto_begin: bool = False, fetch: bool = False, limit: Optional[int] = None - ) -> Tuple[AdapterResponse, agate.Table]: - """Execute the given SQL. - - :param str sql: The sql to execute. - :param bool auto_begin: If set, and dbt is not currently inside a - transaction, automatically begin one. - :param bool fetch: If set, fetch results. - :param int limit: If set, limits the result set - :return: A tuple of the query status and results (empty if fetch=False). - :rtype: Tuple[AdapterResponse, agate.Table] - """ - raise dbt_common.exceptions.base.NotImplementedError( - "`execute` is not implemented for this adapter!" - ) - - def add_select_query(self, sql: str) -> Tuple[Connection, Any]: - """ - This was added here because base.impl.BaseAdapter.get_column_schema_from_query expects it to be here. - That method wouldn't work unless the adapter used sql.impl.SQLAdapter, sql.connections.SQLConnectionManager - or defined this method on ConnectionManager before passing it in to Adapter. - - See https://github.com/dbt-labs/dbt-core/issues/8396 for more information. - """ - raise dbt_common.exceptions.base.NotImplementedError( - "`add_select_query` is not implemented for this adapter!" - ) - - @classmethod - def data_type_code_to_name(cls, type_code: Union[int, str]) -> str: - """Get the string representation of the data type from the type_code.""" - # https://peps.python.org/pep-0249/#type-objects - raise dbt_common.exceptions.base.NotImplementedError( - "`data_type_code_to_name` is not implemented for this adapter!" - ) diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py deleted file mode 100644 index 6849fca7c70..00000000000 --- a/core/dbt/adapters/base/impl.py +++ /dev/null @@ -1,1649 +0,0 @@ -import abc -from concurrent.futures import as_completed, Future -from contextlib import contextmanager -from datetime import datetime -from enum import Enum -import time -from typing import ( - Any, - Callable, - Dict, - Iterator, - List, - Mapping, - Optional, - Set, - Tuple, - Type, - TypedDict, - Union, - FrozenSet, - Iterable, -) -from multiprocessing.context import SpawnContext - -from dbt.adapters.capability import Capability, CapabilityDict -from dbt_common.contracts.constraints import ( - ColumnLevelConstraint, - ConstraintType, - ModelLevelConstraint, -) -from dbt.adapters.contracts.macros import MacroResolverProtocol - -import agate -import pytz - -from dbt.adapters.exceptions import ( - SnapshotTargetIncompleteError, - SnapshotTargetNotSnapshotTableError, - NullRelationDropAttemptedError, - NullRelationCacheAttemptedError, - RelationReturnedMultipleResultsError, - UnexpectedNonTimestampError, - RenameToNoneAttemptedError, - QuoteConfigTypeError, -) - -from dbt_common.exceptions import ( - NotImplementedError, - DbtInternalError, - DbtRuntimeError, - DbtValidationError, - UnexpectedNullError, - MacroArgTypeError, - MacroResultError, -) - -from dbt.adapters.protocol import ( - AdapterConfig, - MacroContextGeneratorCallable, -) -from dbt_common.clients.agate_helper import ( - empty_table, - get_column_value_uncased, - merge_tables, - table_from_rows, - Integer, -) -from dbt_common.clients.jinja import CallableMacroGenerator -from dbt_common.events.functions import fire_event, warn_or_error -from dbt.adapters.events.types import ( - CacheMiss, - ListRelations, - CodeExecution, - CodeExecutionStatus, - CatalogGenerationError, - ConstraintNotSupported, - ConstraintNotEnforced, -) -from dbt_common.utils import filter_null_values, executor, cast_to_str, AttrDict - -from dbt.adapters.contracts.relation import RelationConfig -from dbt.adapters.base.connections import ( - Connection, - AdapterResponse, - BaseConnectionManager, -) -from dbt.adapters.base.meta import AdapterMeta, available -from dbt.adapters.base.relation import ( - ComponentName, - BaseRelation, - InformationSchema, - SchemaSearchMap, -) -from dbt.adapters.base import Column as BaseColumn -from dbt.adapters.base import Credentials -from dbt.adapters.cache import RelationsCache, _make_ref_key_dict -from dbt.adapters.events.types import CollectFreshnessReturnSignature - - -GET_CATALOG_MACRO_NAME = "get_catalog" -GET_CATALOG_RELATIONS_MACRO_NAME = "get_catalog_relations" -FRESHNESS_MACRO_NAME = "collect_freshness" -GET_RELATION_LAST_MODIFIED_MACRO_NAME = "get_relation_last_modified" - - -class ConstraintSupport(str, Enum): - ENFORCED = "enforced" - NOT_ENFORCED = "not_enforced" - NOT_SUPPORTED = "not_supported" - - -def _expect_row_value(key: str, row: agate.Row): - if key not in row.keys(): - raise DbtInternalError( - 'Got a row without "{}" column, columns: {}'.format(key, row.keys()) - ) - return row[key] - - -def _catalog_filter_schemas( - used_schemas: FrozenSet[Tuple[str, str]] -) -> Callable[[agate.Row], bool]: - """Return a function that takes a row and decides if the row should be - included in the catalog output. - """ - schemas = frozenset((d.lower(), s.lower()) for d, s in used_schemas) - - def test(row: agate.Row) -> bool: - table_database = _expect_row_value("table_database", row) - table_schema = _expect_row_value("table_schema", row) - # the schema may be present but None, which is not an error and should - # be filtered out - if table_schema is None: - return False - return (table_database.lower(), table_schema.lower()) in schemas - - return test - - -def _utc(dt: Optional[datetime], source: Optional[BaseRelation], field_name: str) -> datetime: - """If dt has a timezone, return a new datetime that's in UTC. Otherwise, - assume the datetime is already for UTC and add the timezone. - """ - if dt is None: - raise UnexpectedNullError(field_name, source) - - elif not hasattr(dt, "tzinfo"): - raise UnexpectedNonTimestampError(field_name, source, dt) - - elif dt.tzinfo: - return dt.astimezone(pytz.UTC) - else: - return dt.replace(tzinfo=pytz.UTC) - - -def _relation_name(rel: Optional[BaseRelation]) -> str: - if rel is None: - return "null relation" - else: - return str(rel) - - -def log_code_execution(code_execution_function): - # decorator to log code and execution time - if code_execution_function.__name__ != "submit_python_job": - raise ValueError("this should be only used to log submit_python_job now") - - def execution_with_log(*args): - self = args[0] - connection_name = self.connections.get_thread_connection().name - fire_event(CodeExecution(conn_name=connection_name, code_content=args[2])) - start_time = time.time() - response = code_execution_function(*args) - fire_event( - CodeExecutionStatus( - status=response._message, elapsed=round((time.time() - start_time), 2) - ) - ) - return response - - return execution_with_log - - -class PythonJobHelper: - def __init__(self, parsed_model: Dict, credential: Credentials) -> None: - raise NotImplementedError("PythonJobHelper is not implemented yet") - - def submit(self, compiled_code: str) -> Any: - raise NotImplementedError("PythonJobHelper submit function is not implemented yet") - - -class FreshnessResponse(TypedDict): - max_loaded_at: datetime - snapshotted_at: datetime - age: float # age in seconds - - -class BaseAdapter(metaclass=AdapterMeta): - """The BaseAdapter provides an abstract base class for adapters. - - Adapters must implement the following methods and macros. Some of the - methods can be safely overridden as a noop, where it makes sense - (transactions on databases that don't support them, for instance). Those - methods are marked with a (passable) in their docstrings. Check docstrings - for type information, etc. - - To implement a macro, implement "${adapter_type}__${macro_name}" in the - adapter's internal project. - - To invoke a method in an adapter macro, call it on the 'adapter' Jinja - object using dot syntax. - - To invoke a method in model code, add the @available decorator atop a method - declaration. Methods are invoked as macros. - - Methods: - - exception_handler - - date_function - - list_schemas - - drop_relation - - truncate_relation - - rename_relation - - get_columns_in_relation - - get_column_schema_from_query - - expand_column_types - - list_relations_without_caching - - is_cancelable - - create_schema - - drop_schema - - quote - - convert_text_type - - convert_number_type - - convert_boolean_type - - convert_datetime_type - - convert_date_type - - convert_time_type - - standardize_grants_dict - - Macros: - - get_catalog - """ - - Relation: Type[BaseRelation] = BaseRelation - Column: Type[BaseColumn] = BaseColumn - ConnectionManager: Type[BaseConnectionManager] - - # A set of clobber config fields accepted by this adapter - # for use in materializations - AdapterSpecificConfigs: Type[AdapterConfig] = AdapterConfig - - CONSTRAINT_SUPPORT = { - ConstraintType.check: ConstraintSupport.NOT_SUPPORTED, - ConstraintType.not_null: ConstraintSupport.ENFORCED, - ConstraintType.unique: ConstraintSupport.NOT_ENFORCED, - ConstraintType.primary_key: ConstraintSupport.NOT_ENFORCED, - ConstraintType.foreign_key: ConstraintSupport.ENFORCED, - } - - # This static member variable can be overriden in concrete adapter - # implementations to indicate adapter support for optional capabilities. - _capabilities = CapabilityDict({}) - - def __init__(self, config, mp_context: SpawnContext) -> None: - self.config = config - self.cache = RelationsCache(log_cache_events=config.log_cache_events) - self.connections = self.ConnectionManager(config, mp_context) - self._macro_resolver: Optional[MacroResolverProtocol] = None - self._macro_context_generator: Optional[MacroContextGeneratorCallable] = None - - ### - # Methods to set / access a macro resolver - ### - def set_macro_resolver(self, macro_resolver: MacroResolverProtocol) -> None: - self._macro_resolver = macro_resolver - - def get_macro_resolver(self) -> Optional[MacroResolverProtocol]: - return self._macro_resolver - - def clear_macro_resolver(self) -> None: - if self._macro_resolver is not None: - self._macro_resolver = None - - def set_macro_context_generator( - self, - macro_context_generator: MacroContextGeneratorCallable, - ) -> None: - self._macro_context_generator = macro_context_generator - - ### - # Methods that pass through to the connection manager - ### - def acquire_connection(self, name=None) -> Connection: - return self.connections.set_connection_name(name) - - def release_connection(self) -> None: - self.connections.release() - - def cleanup_connections(self) -> None: - self.connections.cleanup_all() - - def clear_transaction(self) -> None: - self.connections.clear_transaction() - - def commit_if_has_connection(self) -> None: - self.connections.commit_if_has_connection() - - def debug_query(self) -> None: - self.execute("select 1 as id") - - def nice_connection_name(self) -> str: - conn = self.connections.get_if_exists() - if conn is None or conn.name is None: - return "" - return conn.name - - @contextmanager - def connection_named(self, name: str, query_header_context: Any = None) -> Iterator[None]: - try: - if self.connections.query_header is not None: - self.connections.query_header.set(name, query_header_context) - self.acquire_connection(name) - yield - finally: - self.release_connection() - if self.connections.query_header is not None: - self.connections.query_header.reset() - - @available.parse(lambda *a, **k: ("", empty_table())) - def execute( - self, sql: str, auto_begin: bool = False, fetch: bool = False, limit: Optional[int] = None - ) -> Tuple[AdapterResponse, agate.Table]: - """Execute the given SQL. This is a thin wrapper around - ConnectionManager.execute. - - :param str sql: The sql to execute. - :param bool auto_begin: If set, and dbt is not currently inside a - transaction, automatically begin one. - :param bool fetch: If set, fetch results. - :param Optional[int] limit: If set, only fetch n number of rows - :return: A tuple of the query status and results (empty if fetch=False). - :rtype: Tuple[AdapterResponse, agate.Table] - """ - return self.connections.execute(sql=sql, auto_begin=auto_begin, fetch=fetch, limit=limit) - - def validate_sql(self, sql: str) -> AdapterResponse: - """Submit the given SQL to the engine for validation, but not execution. - - This should throw an appropriate exception if the input SQL is invalid, although - in practice that will generally be handled by delegating to an existing method - for execution and allowing the error handler to take care of the rest. - - :param str sql: The sql to validate - """ - raise NotImplementedError("`validate_sql` is not implemented for this adapter!") - - @available.parse(lambda *a, **k: []) - def get_column_schema_from_query(self, sql: str) -> List[BaseColumn]: - """Get a list of the Columns with names and data types from the given sql.""" - _, cursor = self.connections.add_select_query(sql) - columns = [ - self.Column.create( - column_name, self.connections.data_type_code_to_name(column_type_code) - ) - # https://peps.python.org/pep-0249/#description - for column_name, column_type_code, *_ in cursor.description - ] - return columns - - @available.parse(lambda *a, **k: ("", empty_table())) - def get_partitions_metadata(self, table: str) -> Tuple[agate.Table]: - """ - TODO: Can we move this to dbt-bigquery? - Obtain partitions metadata for a BigQuery partitioned table. - - :param str table: a partitioned table id, in standard SQL format. - :return: a partition metadata tuple, as described in - https://cloud.google.com/bigquery/docs/creating-partitioned-tables#getting_partition_metadata_using_meta_tables. - :rtype: agate.Table - """ - if hasattr(self.connections, "get_partitions_metadata"): - return self.connections.get_partitions_metadata(table=table) - else: - raise NotImplementedError( - "`get_partitions_metadata` is not implemented for this adapter!" - ) - - ### - # Methods that should never be overridden - ### - @classmethod - def type(cls) -> str: - """Get the type of this adapter. Types must be class-unique and - consistent. - - :return: The type name - :rtype: str - """ - return cls.ConnectionManager.TYPE - - # Caching methods - ### - def _schema_is_cached(self, database: Optional[str], schema: str) -> bool: - """Check if the schema is cached, and by default logs if it is not.""" - - if (database, schema) not in self.cache: - fire_event( - CacheMiss( - conn_name=self.nice_connection_name(), - database=cast_to_str(database), - schema=schema, - ) - ) - return False - else: - return True - - def _get_cache_schemas(self, relation_configs: Iterable[RelationConfig]) -> Set[BaseRelation]: - """Get the set of schema relations that the cache logic needs to - populate. - """ - return { - self.Relation.create_from(quoting=self.config, relation_config=relation_config) - for relation_config in relation_configs - } - - def _get_catalog_schemas(self, relation_configs: Iterable[RelationConfig]) -> SchemaSearchMap: - """Get a mapping of each node's "information_schema" relations to a - set of all schemas expected in that information_schema. - - There may be keys that are technically duplicates on the database side, - for example all of '"foo", 'foo', '"FOO"' and 'FOO' could coexist as - databases, and values could overlap as appropriate. All values are - lowercase strings. - """ - info_schema_name_map = SchemaSearchMap() - relations = self._get_catalog_relations(relation_configs) - for relation in relations: - info_schema_name_map.add(relation) - # result is a map whose keys are information_schema Relations without - # identifiers that have appropriate database prefixes, and whose values - # are sets of lowercase schema names that are valid members of those - # databases - return info_schema_name_map - - def _get_catalog_relations_by_info_schema( - self, relations - ) -> Dict[InformationSchema, List[BaseRelation]]: - relations_by_info_schema: Dict[InformationSchema, List[BaseRelation]] = dict() - for relation in relations: - info_schema = relation.information_schema_only() - if info_schema not in relations_by_info_schema: - relations_by_info_schema[info_schema] = [] - relations_by_info_schema[info_schema].append(relation) - - return relations_by_info_schema - - def _get_catalog_relations( - self, relation_configs: Iterable[RelationConfig] - ) -> List[BaseRelation]: - relations = [ - self.Relation.create_from(quoting=self.config, relation_config=relation_config) - for relation_config in relation_configs - ] - return relations - - def _relations_cache_for_schemas( - self, - relation_configs: Iterable[RelationConfig], - cache_schemas: Optional[Set[BaseRelation]] = None, - ) -> None: - """Populate the relations cache for the given schemas. Returns an - iterable of the schemas populated, as strings. - """ - if not cache_schemas: - cache_schemas = self._get_cache_schemas(relation_configs) - with executor(self.config) as tpe: - futures: List[Future[List[BaseRelation]]] = [] - for cache_schema in cache_schemas: - fut = tpe.submit_connected( - self, - f"list_{cache_schema.database}_{cache_schema.schema}", - self.list_relations_without_caching, - cache_schema, - ) - futures.append(fut) - - for future in as_completed(futures): - # if we can't read the relations we need to just raise anyway, - # so just call future.result() and let that raise on failure - for relation in future.result(): - self.cache.add(relation) - - # it's possible that there were no relations in some schemas. We want - # to insert the schemas we query into the cache's `.schemas` attribute - # so we can check it later - cache_update: Set[Tuple[Optional[str], str]] = set() - for relation in cache_schemas: - if relation.schema: - cache_update.add((relation.database, relation.schema)) - self.cache.update_schemas(cache_update) - - def set_relations_cache( - self, - relation_configs: Iterable[RelationConfig], - clear: bool = False, - required_schemas: Optional[Set[BaseRelation]] = None, - ) -> None: - """Run a query that gets a populated cache of the relations in the - database and set the cache on this adapter. - """ - with self.cache.lock: - if clear: - self.cache.clear() - self._relations_cache_for_schemas(relation_configs, required_schemas) - - @available - def cache_added(self, relation: Optional[BaseRelation]) -> str: - """Cache a new relation in dbt. It will show up in `list relations`.""" - if relation is None: - name = self.nice_connection_name() - raise NullRelationCacheAttemptedError(name) - self.cache.add(relation) - # so jinja doesn't render things - return "" - - @available - def cache_dropped(self, relation: Optional[BaseRelation]) -> str: - """Drop a relation in dbt. It will no longer show up in - `list relations`, and any bound views will be dropped from the cache - """ - if relation is None: - name = self.nice_connection_name() - raise NullRelationDropAttemptedError(name) - self.cache.drop(relation) - return "" - - @available - def cache_renamed( - self, - from_relation: Optional[BaseRelation], - to_relation: Optional[BaseRelation], - ) -> str: - """Rename a relation in dbt. It will show up with a new name in - `list_relations`, but bound views will remain bound. - """ - if from_relation is None or to_relation is None: - name = self.nice_connection_name() - src_name = _relation_name(from_relation) - dst_name = _relation_name(to_relation) - raise RenameToNoneAttemptedError(src_name, dst_name, name) - - self.cache.rename(from_relation, to_relation) - return "" - - ### - # Abstract methods for database-specific values, attributes, and types - ### - @classmethod - @abc.abstractmethod - def date_function(cls) -> str: - """Get the date function used by this adapter's database.""" - raise NotImplementedError("`date_function` is not implemented for this adapter!") - - @classmethod - @abc.abstractmethod - def is_cancelable(cls) -> bool: - raise NotImplementedError("`is_cancelable` is not implemented for this adapter!") - - ### - # Abstract methods about schemas - ### - @abc.abstractmethod - def list_schemas(self, database: str) -> List[str]: - """Get a list of existing schemas in database""" - raise NotImplementedError("`list_schemas` is not implemented for this adapter!") - - @available.parse(lambda *a, **k: False) - def check_schema_exists(self, database: str, schema: str) -> bool: - """Check if a schema exists. - - The default implementation of this is potentially unnecessarily slow, - and adapters should implement it if there is an optimized path (and - there probably is) - """ - search = (s.lower() for s in self.list_schemas(database=database)) - return schema.lower() in search - - ### - # Abstract methods about relations - ### - @abc.abstractmethod - @available.parse_none - def drop_relation(self, relation: BaseRelation) -> None: - """Drop the given relation. - - *Implementors must call self.cache.drop() to preserve cache state!* - """ - raise NotImplementedError("`drop_relation` is not implemented for this adapter!") - - @abc.abstractmethod - @available.parse_none - def truncate_relation(self, relation: BaseRelation) -> None: - """Truncate the given relation.""" - raise NotImplementedError("`truncate_relation` is not implemented for this adapter!") - - @abc.abstractmethod - @available.parse_none - def rename_relation(self, from_relation: BaseRelation, to_relation: BaseRelation) -> None: - """Rename the relation from from_relation to to_relation. - - Implementors must call self.cache.rename() to preserve cache state. - """ - raise NotImplementedError("`rename_relation` is not implemented for this adapter!") - - @abc.abstractmethod - @available.parse_list - def get_columns_in_relation(self, relation: BaseRelation) -> List[BaseColumn]: - """Get a list of the columns in the given Relation.""" - raise NotImplementedError("`get_columns_in_relation` is not implemented for this adapter!") - - @available.deprecated("get_columns_in_relation", lambda *a, **k: []) - def get_columns_in_table(self, schema: str, identifier: str) -> List[BaseColumn]: - """DEPRECATED: Get a list of the columns in the given table.""" - relation = self.Relation.create( - database=self.config.credentials.database, - schema=schema, - identifier=identifier, - quote_policy=self.config.quoting, - ) - return self.get_columns_in_relation(relation) - - @abc.abstractmethod - def expand_column_types(self, goal: BaseRelation, current: BaseRelation) -> None: - """Expand the current table's types to match the goal table. (passable) - - :param self.Relation goal: A relation that currently exists in the - database with columns of the desired types. - :param self.Relation current: A relation that currently exists in the - database with columns of unspecified types. - """ - raise NotImplementedError( - "`expand_target_column_types` is not implemented for this adapter!" - ) - - @abc.abstractmethod - def list_relations_without_caching(self, schema_relation: BaseRelation) -> List[BaseRelation]: - """List relations in the given schema, bypassing the cache. - - This is used as the underlying behavior to fill the cache. - - :param schema_relation: A relation containing the database and schema - as appropraite for the underlying data warehouse - :return: The relations in schema - :rtype: List[self.Relation] - """ - raise NotImplementedError( - "`list_relations_without_caching` is not implemented for this adapter!" - ) - - ### - # Methods about grants - ### - @available - def standardize_grants_dict(self, grants_table: agate.Table) -> dict: - """Translate the result of `show grants` (or equivalent) to match the - grants which a user would configure in their project. - - Ideally, the SQL to show grants should also be filtering: - filter OUT any grants TO the current user/role (e.g. OWNERSHIP). - If that's not possible in SQL, it can be done in this method instead. - - :param grants_table: An agate table containing the query result of - the SQL returned by get_show_grant_sql - :return: A standardized dictionary matching the `grants` config - :rtype: dict - """ - grants_dict: Dict[str, List[str]] = {} - for row in grants_table: - grantee = row["grantee"] - privilege = row["privilege_type"] - if privilege in grants_dict.keys(): - grants_dict[privilege].append(grantee) - else: - grants_dict.update({privilege: [grantee]}) - return grants_dict - - ### - # Provided methods about relations - ### - @available.parse_list - def get_missing_columns( - self, from_relation: BaseRelation, to_relation: BaseRelation - ) -> List[BaseColumn]: - """Returns a list of Columns in from_relation that are missing from - to_relation. - """ - if not isinstance(from_relation, self.Relation): - raise MacroArgTypeError( - method_name="get_missing_columns", - arg_name="from_relation", - got_value=from_relation, - expected_type=self.Relation, - ) - - if not isinstance(to_relation, self.Relation): - raise MacroArgTypeError( - method_name="get_missing_columns", - arg_name="to_relation", - got_value=to_relation, - expected_type=self.Relation, - ) - - from_columns = {col.name: col for col in self.get_columns_in_relation(from_relation)} - - to_columns = {col.name: col for col in self.get_columns_in_relation(to_relation)} - - missing_columns = set(from_columns.keys()) - set(to_columns.keys()) - - return [col for (col_name, col) in from_columns.items() if col_name in missing_columns] - - @available.parse_none - def valid_snapshot_target(self, relation: BaseRelation) -> None: - """Ensure that the target relation is valid, by making sure it has the - expected columns. - - :param Relation relation: The relation to check - :raises InvalidMacroArgType: If the columns are - incorrect. - """ - if not isinstance(relation, self.Relation): - raise MacroArgTypeError( - method_name="valid_snapshot_target", - arg_name="relation", - got_value=relation, - expected_type=self.Relation, - ) - - columns = self.get_columns_in_relation(relation) - names = set(c.name.lower() for c in columns) - expanded_keys = ("scd_id", "valid_from", "valid_to") - extra = [] - missing = [] - for legacy in expanded_keys: - desired = "dbt_" + legacy - if desired not in names: - missing.append(desired) - if legacy in names: - extra.append(legacy) - - if missing: - if extra: - raise SnapshotTargetIncompleteError(extra, missing) - else: - raise SnapshotTargetNotSnapshotTableError(missing) - - @available.parse_none - def expand_target_column_types( - self, from_relation: BaseRelation, to_relation: BaseRelation - ) -> None: - if not isinstance(from_relation, self.Relation): - raise MacroArgTypeError( - method_name="expand_target_column_types", - arg_name="from_relation", - got_value=from_relation, - expected_type=self.Relation, - ) - - if not isinstance(to_relation, self.Relation): - raise MacroArgTypeError( - method_name="expand_target_column_types", - arg_name="to_relation", - got_value=to_relation, - expected_type=self.Relation, - ) - - self.expand_column_types(from_relation, to_relation) - - def list_relations(self, database: Optional[str], schema: str) -> List[BaseRelation]: - if self._schema_is_cached(database, schema): - return self.cache.get_relations(database, schema) - - schema_relation = self.Relation.create( - database=database, - schema=schema, - identifier="", - quote_policy=self.config.quoting, - ).without_identifier() - - # we can't build the relations cache because we don't have a - # manifest so we can't run any operations. - relations = self.list_relations_without_caching(schema_relation) - - # if the cache is already populated, add this schema in - # otherwise, skip updating the cache and just ignore - if self.cache: - for relation in relations: - self.cache.add(relation) - if not relations: - # it's possible that there were no relations in some schemas. We want - # to insert the schemas we query into the cache's `.schemas` attribute - # so we can check it later - self.cache.update_schemas([(database, schema)]) - - fire_event( - ListRelations( - database=cast_to_str(database), - schema=schema, - relations=[_make_ref_key_dict(x) for x in relations], - ) - ) - - return relations - - def _make_match_kwargs(self, database: str, schema: str, identifier: str) -> Dict[str, str]: - quoting = self.config.quoting - if identifier is not None and quoting["identifier"] is False: - identifier = identifier.lower() - - if schema is not None and quoting["schema"] is False: - schema = schema.lower() - - if database is not None and quoting["database"] is False: - database = database.lower() - - return filter_null_values( - { - "database": database, - "identifier": identifier, - "schema": schema, - } - ) - - def _make_match( - self, - relations_list: List[BaseRelation], - database: str, - schema: str, - identifier: str, - ) -> List[BaseRelation]: - matches = [] - - search = self._make_match_kwargs(database, schema, identifier) - - for relation in relations_list: - if relation.matches(**search): - matches.append(relation) - - return matches - - @available.parse_none - def get_relation(self, database: str, schema: str, identifier: str) -> Optional[BaseRelation]: - relations_list = self.list_relations(database, schema) - - matches = self._make_match(relations_list, database, schema, identifier) - - if len(matches) > 1: - kwargs = { - "identifier": identifier, - "schema": schema, - "database": database, - } - raise RelationReturnedMultipleResultsError(kwargs, matches) - - elif matches: - return matches[0] - - return None - - @available.deprecated("get_relation", lambda *a, **k: False) - def already_exists(self, schema: str, name: str) -> bool: - """DEPRECATED: Return if a model already exists in the database""" - database = self.config.credentials.database - relation = self.get_relation(database, schema, name) - return relation is not None - - ### - # ODBC FUNCTIONS -- these should not need to change for every adapter, - # although some adapters may override them - ### - @abc.abstractmethod - @available.parse_none - def create_schema(self, relation: BaseRelation): - """Create the given schema if it does not exist.""" - raise NotImplementedError("`create_schema` is not implemented for this adapter!") - - @abc.abstractmethod - @available.parse_none - def drop_schema(self, relation: BaseRelation): - """Drop the given schema (and everything in it) if it exists.""" - raise NotImplementedError("`drop_schema` is not implemented for this adapter!") - - @available - @classmethod - @abc.abstractmethod - def quote(cls, identifier: str) -> str: - """Quote the given identifier, as appropriate for the database.""" - raise NotImplementedError("`quote` is not implemented for this adapter!") - - @available - def quote_as_configured(self, identifier: str, quote_key: str) -> str: - """Quote or do not quote the given identifer as configured in the - project config for the quote key. - - The quote key should be one of 'database' (on bigquery, 'profile'), - 'identifier', or 'schema', or it will be treated as if you set `True`. - """ - try: - key = ComponentName(quote_key) - except ValueError: - return identifier - - default = self.Relation.get_default_quote_policy().get_part(key) - if self.config.quoting.get(key, default): - return self.quote(identifier) - else: - return identifier - - @available - def quote_seed_column(self, column: str, quote_config: Optional[bool]) -> str: - quote_columns: bool = True - if isinstance(quote_config, bool): - quote_columns = quote_config - elif quote_config is None: - pass - else: - raise QuoteConfigTypeError(quote_config) - - if quote_columns: - return self.quote(column) - else: - return column - - ### - # Conversions: These must be implemented by concrete implementations, for - # converting agate types into their sql equivalents. - ### - @classmethod - @abc.abstractmethod - def convert_text_type(cls, agate_table: agate.Table, col_idx: int) -> str: - """Return the type in the database that best maps to the agate.Text - type for the given agate table and column index. - - :param agate_table: The table - :param col_idx: The index into the agate table for the column. - :return: The name of the type in the database - """ - raise NotImplementedError("`convert_text_type` is not implemented for this adapter!") - - @classmethod - @abc.abstractmethod - def convert_number_type(cls, agate_table: agate.Table, col_idx: int) -> str: - """Return the type in the database that best maps to the agate.Number - type for the given agate table and column index. - - :param agate_table: The table - :param col_idx: The index into the agate table for the column. - :return: The name of the type in the database - """ - raise NotImplementedError("`convert_number_type` is not implemented for this adapter!") - - @classmethod - def convert_integer_type(cls, agate_table: agate.Table, col_idx: int) -> str: - """Return the type in the database that best maps to the agate.Number - type for the given agate table and column index. - - :param agate_table: The table - :param col_idx: The index into the agate table for the column. - :return: The name of the type in the database - """ - return "integer" - - @classmethod - @abc.abstractmethod - def convert_boolean_type(cls, agate_table: agate.Table, col_idx: int) -> str: - """Return the type in the database that best maps to the agate.Boolean - type for the given agate table and column index. - - :param agate_table: The table - :param col_idx: The index into the agate table for the column. - :return: The name of the type in the database - """ - raise NotImplementedError("`convert_boolean_type` is not implemented for this adapter!") - - @classmethod - @abc.abstractmethod - def convert_datetime_type(cls, agate_table: agate.Table, col_idx: int) -> str: - """Return the type in the database that best maps to the agate.DateTime - type for the given agate table and column index. - - :param agate_table: The table - :param col_idx: The index into the agate table for the column. - :return: The name of the type in the database - """ - raise NotImplementedError("`convert_datetime_type` is not implemented for this adapter!") - - @classmethod - @abc.abstractmethod - def convert_date_type(cls, agate_table: agate.Table, col_idx: int) -> str: - """Return the type in the database that best maps to the agate.Date - type for the given agate table and column index. - - :param agate_table: The table - :param col_idx: The index into the agate table for the column. - :return: The name of the type in the database - """ - raise NotImplementedError("`convert_date_type` is not implemented for this adapter!") - - @classmethod - @abc.abstractmethod - def convert_time_type(cls, agate_table: agate.Table, col_idx: int) -> str: - """Return the type in the database that best maps to the - agate.TimeDelta type for the given agate table and column index. - - :param agate_table: The table - :param col_idx: The index into the agate table for the column. - :return: The name of the type in the database - """ - raise NotImplementedError("`convert_time_type` is not implemented for this adapter!") - - @available - @classmethod - def convert_type(cls, agate_table: agate.Table, col_idx: int) -> Optional[str]: - return cls.convert_agate_type(agate_table, col_idx) - - @classmethod - def convert_agate_type(cls, agate_table: agate.Table, col_idx: int) -> Optional[str]: - agate_type: Type = agate_table.column_types[col_idx] - conversions: List[Tuple[Type, Callable[..., str]]] = [ - (Integer, cls.convert_integer_type), - (agate.Text, cls.convert_text_type), - (agate.Number, cls.convert_number_type), - (agate.Boolean, cls.convert_boolean_type), - (agate.DateTime, cls.convert_datetime_type), - (agate.Date, cls.convert_date_type), - (agate.TimeDelta, cls.convert_time_type), - ] - for agate_cls, func in conversions: - if isinstance(agate_type, agate_cls): - return func(agate_table, col_idx) - - return None - - ### - # Operations involving the manifest - ### - def execute_macro( - self, - macro_name: str, - macro_resolver: Optional[MacroResolverProtocol] = None, - project: Optional[str] = None, - context_override: Optional[Dict[str, Any]] = None, - kwargs: Optional[Dict[str, Any]] = None, - ) -> AttrDict: - """Look macro_name up in the manifest and execute its results. - - :param macro_name: The name of the macro to execute. - :param manifest: The manifest to use for generating the base macro - execution context. If none is provided, use the internal manifest. - :param project: The name of the project to search in, or None for the - first match. - :param context_override: An optional dict to update() the macro - execution context. - :param kwargs: An optional dict of keyword args used to pass to the - macro. - """ - - if kwargs is None: - kwargs = {} - if context_override is None: - context_override = {} - - resolver = macro_resolver or self._macro_resolver - if resolver is None: - raise DbtInternalError("Macro resolver was None when calling execute_macro!") - - if self._macro_context_generator is None: - raise DbtInternalError("Macro context generator was None when calling execute_macro!") - - macro = resolver.find_macro_by_name(macro_name, self.config.project_name, project) - if macro is None: - if project is None: - package_name = "any package" - else: - package_name = 'the "{}" package'.format(project) - - raise DbtRuntimeError( - 'dbt could not find a macro with the name "{}" in {}'.format( - macro_name, package_name - ) - ) - - macro_context = self._macro_context_generator(macro, self.config, resolver, project) - macro_context.update(context_override) - - macro_function = CallableMacroGenerator(macro, macro_context) - - with self.connections.exception_handler(f"macro {macro_name}"): - result = macro_function(**kwargs) - return result - - @classmethod - def _catalog_filter_table( - cls, table: agate.Table, used_schemas: FrozenSet[Tuple[str, str]] - ) -> agate.Table: - """Filter the table as appropriate for catalog entries. Subclasses can - override this to change filtering rules on a per-adapter basis. - """ - # force database + schema to be strings - table = table_from_rows( - table.rows, - table.column_names, - text_only_columns=["table_database", "table_schema", "table_name"], - ) - return table.where(_catalog_filter_schemas(used_schemas)) - - def _get_one_catalog( - self, - information_schema: InformationSchema, - schemas: Set[str], - used_schemas: FrozenSet[Tuple[str, str]], - ) -> agate.Table: - kwargs = {"information_schema": information_schema, "schemas": schemas} - table = self.execute_macro(GET_CATALOG_MACRO_NAME, kwargs=kwargs) - - results = self._catalog_filter_table(table, used_schemas) # type: ignore[arg-type] - return results - - def _get_one_catalog_by_relations( - self, - information_schema: InformationSchema, - relations: List[BaseRelation], - used_schemas: FrozenSet[Tuple[str, str]], - ) -> agate.Table: - - kwargs = { - "information_schema": information_schema, - "relations": relations, - } - table = self.execute_macro(GET_CATALOG_RELATIONS_MACRO_NAME, kwargs=kwargs) - - results = self._catalog_filter_table(table, used_schemas) # type: ignore[arg-type] - return results - - def get_filtered_catalog( - self, - relation_configs: Iterable[RelationConfig], - used_schemas: FrozenSet[Tuple[str, str]], - relations: Optional[Set[BaseRelation]] = None, - ): - catalogs: agate.Table - if ( - relations is None - or len(relations) > 100 - or not self.supports(Capability.SchemaMetadataByRelations) - ): - # Do it the traditional way. We get the full catalog. - catalogs, exceptions = self.get_catalog(relation_configs, used_schemas) - else: - # Do it the new way. We try to save time by selecting information - # only for the exact set of relations we are interested in. - catalogs, exceptions = self.get_catalog_by_relations(used_schemas, relations) - - if relations and catalogs: - relation_map = { - ( - r.database.casefold() if r.database else None, - r.schema.casefold() if r.schema else None, - r.identifier.casefold() if r.identifier else None, - ) - for r in relations - } - - def in_map(row: agate.Row): - d = _expect_row_value("table_database", row) - s = _expect_row_value("table_schema", row) - i = _expect_row_value("table_name", row) - d = d.casefold() if d is not None else None - s = s.casefold() if s is not None else None - i = i.casefold() if i is not None else None - return (d, s, i) in relation_map - - catalogs = catalogs.where(in_map) - - return catalogs, exceptions - - def row_matches_relation(self, row: agate.Row, relations: Set[BaseRelation]): - pass - - def get_catalog( - self, - relation_configs: Iterable[RelationConfig], - used_schemas: FrozenSet[Tuple[str, str]], - ) -> Tuple[agate.Table, List[Exception]]: - with executor(self.config) as tpe: - futures: List[Future[agate.Table]] = [] - schema_map: SchemaSearchMap = self._get_catalog_schemas(relation_configs) - for info, schemas in schema_map.items(): - if len(schemas) == 0: - continue - name = ".".join([str(info.database), "information_schema"]) - fut = tpe.submit_connected( - self, name, self._get_one_catalog, info, schemas, used_schemas - ) - futures.append(fut) - - catalogs, exceptions = catch_as_completed(futures) - return catalogs, exceptions - - def get_catalog_by_relations( - self, used_schemas: FrozenSet[Tuple[str, str]], relations: Set[BaseRelation] - ) -> Tuple[agate.Table, List[Exception]]: - with executor(self.config) as tpe: - futures: List[Future[agate.Table]] = [] - relations_by_schema = self._get_catalog_relations_by_info_schema(relations) - for info_schema in relations_by_schema: - name = ".".join([str(info_schema.database), "information_schema"]) - relations = set(relations_by_schema[info_schema]) - fut = tpe.submit_connected( - self, - name, - self._get_one_catalog_by_relations, - info_schema, - relations, - used_schemas, - ) - futures.append(fut) - - catalogs, exceptions = catch_as_completed(futures) - return catalogs, exceptions - - def cancel_open_connections(self): - """Cancel all open connections.""" - return self.connections.cancel_open() - - def calculate_freshness( - self, - source: BaseRelation, - loaded_at_field: str, - filter: Optional[str], - macro_resolver: Optional[MacroResolverProtocol] = None, - ) -> Tuple[Optional[AdapterResponse], FreshnessResponse]: - """Calculate the freshness of sources in dbt, and return it""" - kwargs: Dict[str, Any] = { - "source": source, - "loaded_at_field": loaded_at_field, - "filter": filter, - } - - # run the macro - # in older versions of dbt-core, the 'collect_freshness' macro returned the table of results directly - # starting in v1.5, by default, we return both the table and the adapter response (metadata about the query) - result: Union[ - AttrDict, # current: contains AdapterResponse + agate.Table - agate.Table, # previous: just table - ] - result = self.execute_macro( - FRESHNESS_MACRO_NAME, kwargs=kwargs, macro_resolver=macro_resolver - ) - if isinstance(result, agate.Table): - warn_or_error(CollectFreshnessReturnSignature()) - adapter_response = None - table = result - else: - adapter_response, table = result.response, result.table # type: ignore[attr-defined] - # now we have a 1-row table of the maximum `loaded_at_field` value and - # the current time according to the db. - if len(table) != 1 or len(table[0]) != 2: - raise MacroResultError(FRESHNESS_MACRO_NAME, table) - if table[0][0] is None: - # no records in the table, so really the max_loaded_at was - # infinitely long ago. Just call it 0:00 January 1 year UTC - max_loaded_at = datetime(1, 1, 1, 0, 0, 0, tzinfo=pytz.UTC) - else: - max_loaded_at = _utc(table[0][0], source, loaded_at_field) - - snapshotted_at = _utc(table[0][1], source, loaded_at_field) - age = (snapshotted_at - max_loaded_at).total_seconds() - freshness: FreshnessResponse = { - "max_loaded_at": max_loaded_at, - "snapshotted_at": snapshotted_at, - "age": age, - } - return adapter_response, freshness - - def calculate_freshness_from_metadata( - self, - source: BaseRelation, - macro_resolver: Optional[MacroResolverProtocol] = None, - ) -> Tuple[Optional[AdapterResponse], FreshnessResponse]: - kwargs: Dict[str, Any] = { - "information_schema": source.information_schema_only(), - "relations": [source], - } - result = self.execute_macro( - GET_RELATION_LAST_MODIFIED_MACRO_NAME, kwargs=kwargs, macro_resolver=macro_resolver - ) - adapter_response, table = result.response, result.table # type: ignore[attr-defined] - - try: - row = table[0] - last_modified_val = get_column_value_uncased("last_modified", row) - snapshotted_at_val = get_column_value_uncased("snapshotted_at", row) - except Exception: - raise MacroResultError(GET_RELATION_LAST_MODIFIED_MACRO_NAME, table) - - if last_modified_val is None: - # Interpret missing value as "infinitely long ago" - max_loaded_at = datetime(1, 1, 1, 0, 0, 0, tzinfo=pytz.UTC) - else: - max_loaded_at = _utc(last_modified_val, None, "last_modified") - - snapshotted_at = _utc(snapshotted_at_val, None, "snapshotted_at") - - age = (snapshotted_at - max_loaded_at).total_seconds() - - freshness: FreshnessResponse = { - "max_loaded_at": max_loaded_at, - "snapshotted_at": snapshotted_at, - "age": age, - } - - return adapter_response, freshness - - def pre_model_hook(self, config: Mapping[str, Any]) -> Any: - """A hook for running some operation before the model materialization - runs. The hook can assume it has a connection available. - - The only parameter is a configuration dictionary (the same one - available in the materialization context). It should be considered - read-only. - - The pre-model hook may return anything as a context, which will be - passed to the post-model hook. - """ - pass - - def post_model_hook(self, config: Mapping[str, Any], context: Any) -> None: - """A hook for running some operation after the model materialization - runs. The hook can assume it has a connection available. - - The first parameter is a configuration dictionary (the same one - available in the materialization context). It should be considered - read-only. - - The second parameter is the value returned by pre_mdoel_hook. - """ - pass - - # Methods used in adapter tests - def update_column_sql( - self, - dst_name: str, - dst_column: str, - clause: str, - where_clause: Optional[str] = None, - ) -> str: - clause = f"update {dst_name} set {dst_column} = {clause}" - if where_clause is not None: - clause += f" where {where_clause}" - return clause - - def timestamp_add_sql(self, add_to: str, number: int = 1, interval: str = "hour") -> str: - # for backwards compatibility, we're compelled to set some sort of - # default. A lot of searching has lead me to believe that the - # '+ interval' syntax used in postgres/redshift is relatively common - # and might even be the SQL standard's intention. - return f"{add_to} + interval '{number} {interval}'" - - def string_add_sql( - self, - add_to: str, - value: str, - location="append", - ) -> str: - if location == "append": - return f"{add_to} || '{value}'" - elif location == "prepend": - return f"'{value}' || {add_to}" - else: - raise DbtRuntimeError(f'Got an unexpected location value of "{location}"') - - def get_rows_different_sql( - self, - relation_a: BaseRelation, - relation_b: BaseRelation, - column_names: Optional[List[str]] = None, - except_operator: str = "EXCEPT", - ) -> str: - """Generate SQL for a query that returns a single row with a two - columns: the number of rows that are different between the two - relations and the number of mismatched rows. - """ - # This method only really exists for test reasons. - names: List[str] - if column_names is None: - columns = self.get_columns_in_relation(relation_a) - names = sorted((self.quote(c.name) for c in columns)) - else: - names = sorted((self.quote(n) for n in column_names)) - columns_csv = ", ".join(names) - - sql = COLUMNS_EQUAL_SQL.format( - columns=columns_csv, - relation_a=str(relation_a), - relation_b=str(relation_b), - except_op=except_operator, - ) - - return sql - - @property - def python_submission_helpers(self) -> Dict[str, Type[PythonJobHelper]]: - raise NotImplementedError("python_submission_helpers is not specified") - - @property - def default_python_submission_method(self) -> str: - raise NotImplementedError("default_python_submission_method is not specified") - - @log_code_execution - def submit_python_job(self, parsed_model: dict, compiled_code: str) -> AdapterResponse: - submission_method = parsed_model["config"].get( - "submission_method", self.default_python_submission_method - ) - if submission_method not in self.python_submission_helpers: - raise NotImplementedError( - "Submission method {} is not supported for current adapter".format( - submission_method - ) - ) - job_helper = self.python_submission_helpers[submission_method]( - parsed_model, self.connections.profile.credentials - ) - submission_result = job_helper.submit(compiled_code) - # process submission result to generate adapter response - return self.generate_python_submission_response(submission_result) - - def generate_python_submission_response(self, submission_result: Any) -> AdapterResponse: - raise NotImplementedError( - "Your adapter need to implement generate_python_submission_response" - ) - - def valid_incremental_strategies(self): - """The set of standard builtin strategies which this adapter supports out-of-the-box. - Not used to validate custom strategies defined by end users. - """ - return ["append"] - - def builtin_incremental_strategies(self): - return ["append", "delete+insert", "merge", "insert_overwrite"] - - @available.parse_none - def get_incremental_strategy_macro(self, model_context, strategy: str): - # Construct macro_name from strategy name - if strategy is None: - strategy = "default" - - # validate strategies for this adapter - valid_strategies = self.valid_incremental_strategies() - valid_strategies.append("default") - builtin_strategies = self.builtin_incremental_strategies() - if strategy in builtin_strategies and strategy not in valid_strategies: - raise DbtRuntimeError( - f"The incremental strategy '{strategy}' is not valid for this adapter" - ) - - strategy = strategy.replace("+", "_") - macro_name = f"get_incremental_{strategy}_sql" - # The model_context should have callable objects for all macros - if macro_name not in model_context: - raise DbtRuntimeError( - 'dbt could not find an incremental strategy macro with the name "{}" in {}'.format( - macro_name, self.config.project_name - ) - ) - - # This returns a callable macro - return model_context[macro_name] - - @classmethod - def _parse_column_constraint(cls, raw_constraint: Dict[str, Any]) -> ColumnLevelConstraint: - try: - ColumnLevelConstraint.validate(raw_constraint) - return ColumnLevelConstraint.from_dict(raw_constraint) - except Exception: - raise DbtValidationError(f"Could not parse constraint: {raw_constraint}") - - @classmethod - def render_column_constraint(cls, constraint: ColumnLevelConstraint) -> Optional[str]: - """Render the given constraint as DDL text. Should be overriden by adapters which need custom constraint - rendering.""" - constraint_expression = constraint.expression or "" - - rendered_column_constraint = None - if constraint.type == ConstraintType.check and constraint_expression: - rendered_column_constraint = f"check ({constraint_expression})" - elif constraint.type == ConstraintType.not_null: - rendered_column_constraint = f"not null {constraint_expression}" - elif constraint.type == ConstraintType.unique: - rendered_column_constraint = f"unique {constraint_expression}" - elif constraint.type == ConstraintType.primary_key: - rendered_column_constraint = f"primary key {constraint_expression}" - elif constraint.type == ConstraintType.foreign_key and constraint_expression: - rendered_column_constraint = f"references {constraint_expression}" - elif constraint.type == ConstraintType.custom and constraint_expression: - rendered_column_constraint = constraint_expression - - if rendered_column_constraint: - rendered_column_constraint = rendered_column_constraint.strip() - - return rendered_column_constraint - - @available - @classmethod - def render_raw_columns_constraints(cls, raw_columns: Dict[str, Dict[str, Any]]) -> List: - rendered_column_constraints = [] - - for v in raw_columns.values(): - col_name = cls.quote(v["name"]) if v.get("quote") else v["name"] - rendered_column_constraint = [f"{col_name} {v['data_type']}"] - for con in v.get("constraints", None): - constraint = cls._parse_column_constraint(con) - c = cls.process_parsed_constraint(constraint, cls.render_column_constraint) - if c is not None: - rendered_column_constraint.append(c) - rendered_column_constraints.append(" ".join(rendered_column_constraint)) - - return rendered_column_constraints - - @classmethod - def process_parsed_constraint( - cls, parsed_constraint: Union[ColumnLevelConstraint, ModelLevelConstraint], render_func - ) -> Optional[str]: - if ( - parsed_constraint.warn_unsupported - and cls.CONSTRAINT_SUPPORT[parsed_constraint.type] == ConstraintSupport.NOT_SUPPORTED - ): - warn_or_error( - ConstraintNotSupported(constraint=parsed_constraint.type.value, adapter=cls.type()) - ) - if ( - parsed_constraint.warn_unenforced - and cls.CONSTRAINT_SUPPORT[parsed_constraint.type] == ConstraintSupport.NOT_ENFORCED - ): - warn_or_error( - ConstraintNotEnforced(constraint=parsed_constraint.type.value, adapter=cls.type()) - ) - if cls.CONSTRAINT_SUPPORT[parsed_constraint.type] != ConstraintSupport.NOT_SUPPORTED: - return render_func(parsed_constraint) - - return None - - @classmethod - def _parse_model_constraint(cls, raw_constraint: Dict[str, Any]) -> ModelLevelConstraint: - try: - ModelLevelConstraint.validate(raw_constraint) - c = ModelLevelConstraint.from_dict(raw_constraint) - return c - except Exception: - raise DbtValidationError(f"Could not parse constraint: {raw_constraint}") - - @available - @classmethod - def render_raw_model_constraints(cls, raw_constraints: List[Dict[str, Any]]) -> List[str]: - return [c for c in map(cls.render_raw_model_constraint, raw_constraints) if c is not None] - - @classmethod - def render_raw_model_constraint(cls, raw_constraint: Dict[str, Any]) -> Optional[str]: - constraint = cls._parse_model_constraint(raw_constraint) - return cls.process_parsed_constraint(constraint, cls.render_model_constraint) - - @classmethod - def render_model_constraint(cls, constraint: ModelLevelConstraint) -> Optional[str]: - """Render the given constraint as DDL text. Should be overriden by adapters which need custom constraint - rendering.""" - constraint_prefix = f"constraint {constraint.name} " if constraint.name else "" - column_list = ", ".join(constraint.columns) - if constraint.type == ConstraintType.check and constraint.expression: - return f"{constraint_prefix}check ({constraint.expression})" - elif constraint.type == ConstraintType.unique: - constraint_expression = f" {constraint.expression}" if constraint.expression else "" - return f"{constraint_prefix}unique{constraint_expression} ({column_list})" - elif constraint.type == ConstraintType.primary_key: - constraint_expression = f" {constraint.expression}" if constraint.expression else "" - return f"{constraint_prefix}primary key{constraint_expression} ({column_list})" - elif constraint.type == ConstraintType.foreign_key and constraint.expression: - return f"{constraint_prefix}foreign key ({column_list}) references {constraint.expression}" - elif constraint.type == ConstraintType.custom and constraint.expression: - return f"{constraint_prefix}{constraint.expression}" - else: - return None - - @classmethod - def capabilities(cls) -> CapabilityDict: - return cls._capabilities - - @classmethod - def supports(cls, capability: Capability) -> bool: - return bool(cls.capabilities()[capability]) - - -COLUMNS_EQUAL_SQL = """ -with diff_count as ( - SELECT - 1 as id, - COUNT(*) as num_missing FROM ( - (SELECT {columns} FROM {relation_a} {except_op} - SELECT {columns} FROM {relation_b}) - UNION ALL - (SELECT {columns} FROM {relation_b} {except_op} - SELECT {columns} FROM {relation_a}) - ) as a -), table_a as ( - SELECT COUNT(*) as num_rows FROM {relation_a} -), table_b as ( - SELECT COUNT(*) as num_rows FROM {relation_b} -), row_count_diff as ( - select - 1 as id, - table_a.num_rows - table_b.num_rows as difference - from table_a, table_b -) -select - row_count_diff.difference as row_count_difference, - diff_count.num_missing as num_mismatched -from row_count_diff -join diff_count using (id) -""".strip() - - -def catch_as_completed( - futures, # typing: List[Future[agate.Table]] -) -> Tuple[agate.Table, List[Exception]]: - # catalogs: agate.Table = agate.Table(rows=[]) - tables: List[agate.Table] = [] - exceptions: List[Exception] = [] - - for future in as_completed(futures): - exc = future.exception() - # we want to re-raise on ctrl+c and BaseException - if exc is None: - catalog = future.result() - tables.append(catalog) - elif isinstance(exc, KeyboardInterrupt) or not isinstance(exc, Exception): - raise exc - else: - warn_or_error(CatalogGenerationError(exc=str(exc))) - # exc is not None, derives from Exception, and isn't ctrl+c - exceptions.append(exc) - return merge_tables(tables), exceptions diff --git a/core/dbt/adapters/base/meta.py b/core/dbt/adapters/base/meta.py deleted file mode 100644 index bf887a407ab..00000000000 --- a/core/dbt/adapters/base/meta.py +++ /dev/null @@ -1,128 +0,0 @@ -import abc -from functools import wraps -from typing import Callable, Optional, Any, FrozenSet, Dict, Set -from dbt_common.events.functions import warn_or_error -from dbt.adapters.events.types import AdapterDeprecationWarning - -Decorator = Callable[[Any], Callable] - - -class _Available: - def __call__(self, func: Callable) -> Callable: - func._is_available_ = True # type: ignore - return func - - def parse(self, parse_replacement: Callable) -> Decorator: - """A decorator factory to indicate that a method on the adapter will be - exposed to the database wrapper, and will be stubbed out at parse time - with the given function. - - @available.parse() - def my_method(self, a, b): - if something: - return None - return big_expensive_db_query() - - @available.parse(lambda *args, **args: {}) - def my_other_method(self, a, b): - x = {} - x.update(big_expensive_db_query()) - return x - """ - - def inner(func): - func._parse_replacement_ = parse_replacement - return self(func) - - return inner - - def deprecated( - self, supported_name: str, parse_replacement: Optional[Callable] = None - ) -> Decorator: - """A decorator that marks a function as available, but also prints a - deprecation warning. Use like - - @available.deprecated('my_new_method') - def my_old_method(self, arg): - args = compatability_shim(arg) - return self.my_new_method(*args) - - @available.deprecated('my_new_slow_method', lambda *a, **k: (0, '')) - def my_old_slow_method(self, arg): - args = compatibility_shim(arg) - return self.my_new_slow_method(*args) - - To make `adapter.my_old_method` available but also print out a warning - on use directing users to `my_new_method`. - - The optional parse_replacement, if provided, will provide a parse-time - replacement for the actual method (see `available.parse`). - """ - - def wrapper(func): - func_name = func.__name__ - - @wraps(func) - def inner(*args, **kwargs): - warn_or_error( - AdapterDeprecationWarning(old_name=func_name, new_name=supported_name) - ) - return func(*args, **kwargs) - - if parse_replacement: - available_function = self.parse(parse_replacement) - else: - available_function = self - return available_function(inner) - - return wrapper - - def parse_none(self, func: Callable) -> Callable: - wrapper = self.parse(lambda *a, **k: None) - return wrapper(func) - - def parse_list(self, func: Callable) -> Callable: - wrapper = self.parse(lambda *a, **k: []) - return wrapper(func) - - -available = _Available() - - -class AdapterMeta(abc.ABCMeta): - _available_: FrozenSet[str] - _parse_replacements_: Dict[str, Callable] - - def __new__(mcls, name, bases, namespace, **kwargs) -> "AdapterMeta": - # mypy does not like the `**kwargs`. But `ABCMeta` itself takes - # `**kwargs` in its argspec here (and passes them to `type.__new__`. - # I'm not sure there is any benefit to it after poking around a bit, - # but having it doesn't hurt on the python side (and omitting it could - # hurt for obscure metaclass reasons, for all I know) - cls = abc.ABCMeta.__new__(mcls, name, bases, namespace, **kwargs) # type: ignore - - # this is very much inspired by ABCMeta's own implementation - - # dict mapping the method name to whether the model name should be - # injected into the arguments. All methods in here are exposed to the - # context. - available: Set[str] = set() - replacements: Dict[str, Any] = {} - - # collect base class data first - for base in bases: - available.update(getattr(base, "_available_", set())) - replacements.update(getattr(base, "_parse_replacements_", set())) - - # override with local data if it exists - for name, value in namespace.items(): - if getattr(value, "_is_available_", False): - available.add(name) - parse_replacement = getattr(value, "_parse_replacement_", None) - if parse_replacement is not None: - replacements[name] = parse_replacement - - cls._available_ = frozenset(available) - # should this be a namedtuple so it will be immutable like _available_? - cls._parse_replacements_ = replacements - return cls diff --git a/core/dbt/adapters/base/plugin.py b/core/dbt/adapters/base/plugin.py deleted file mode 100644 index dc41fb68110..00000000000 --- a/core/dbt/adapters/base/plugin.py +++ /dev/null @@ -1,33 +0,0 @@ -from typing import List, Optional, Type -from pathlib import Path - -from dbt.adapters.base import Credentials -from dbt.adapters.protocol import AdapterProtocol - - -class AdapterPlugin: - """Defines the basic requirements for a dbt adapter plugin. - - :param include_path: The path to this adapter plugin's root - :param dependencies: A list of adapter names that this adapter depends - upon. - """ - - def __init__( - self, - adapter: Type[AdapterProtocol], - credentials: Type[Credentials], - include_path: str, - dependencies: Optional[List[str]] = None, - project_name: Optional[str] = None, - ) -> None: - - self.adapter: Type[AdapterProtocol] = adapter - self.credentials: Type[Credentials] = credentials - self.include_path: str = include_path - self.project_name: str = project_name or f"dbt_{Path(include_path).name}" - self.dependencies: List[str] - if dependencies is None: - self.dependencies = [] - else: - self.dependencies = dependencies diff --git a/core/dbt/adapters/base/query_headers.py b/core/dbt/adapters/base/query_headers.py deleted file mode 100644 index 8ab8088aa3c..00000000000 --- a/core/dbt/adapters/base/query_headers.py +++ /dev/null @@ -1,100 +0,0 @@ -from threading import local -from typing import Optional, Callable, Dict, Any - -from dbt.adapters.clients.jinja import QueryStringGenerator -from dbt.adapters.contracts.connection import AdapterRequiredConfig, QueryComment -from dbt_common.exceptions import DbtRuntimeError - - -class QueryHeaderContextWrapper: - def __init__(self, context) -> None: - self._inner_context = context - - def __getattr__(self, name): - return getattr(self._inner_context, name, "") - - -class _QueryComment(local): - """A thread-local class storing thread-specific state information for - connection management, namely: - - the current thread's query comment. - - a source_name indicating what set the current thread's query comment - """ - - def __init__(self, initial) -> None: - self.query_comment: Optional[str] = initial - self.append: bool = False - - def add(self, sql: str) -> str: - if not self.query_comment: - return sql - - if self.append: - # replace last ';' with ';' - sql = sql.rstrip() - if sql[-1] == ";": - sql = sql[:-1] - return "{}\n/* {} */;".format(sql, self.query_comment.strip()) - - return "{}\n/* {} */".format(sql, self.query_comment.strip()) - - return "/* {} */\n{}".format(self.query_comment.strip(), sql) - - def set(self, comment: Optional[str], append: bool): - if isinstance(comment, str) and "*/" in comment: - # tell the user "no" so they don't hurt themselves by writing - # garbage - raise DbtRuntimeError(f'query comment contains illegal value "*/": {comment}') - self.query_comment = comment - self.append = append - - -QueryStringFunc = Callable[[str, Optional[QueryHeaderContextWrapper]], str] - - -class MacroQueryStringSetter: - def __init__( - self, config: AdapterRequiredConfig, query_header_context: Dict[str, Any] - ) -> None: - self.config = config - self._query_header_context = query_header_context - - comment_macro = self._get_comment_macro() - self.generator: QueryStringFunc = lambda name, model: "" - # if the comment value was None or the empty string, just skip it - if comment_macro: - assert isinstance(comment_macro, str) - macro = "\n".join( - ( - "{%- macro query_comment_macro(connection_name, node) -%}", - comment_macro, - "{% endmacro %}", - ) - ) - ctx = self._get_context() - self.generator = QueryStringGenerator(macro, ctx) - self.comment = _QueryComment(None) - self.reset() - - def _get_comment_macro(self) -> Optional[str]: - return self.config.query_comment.comment - - def _get_context(self) -> Dict[str, Any]: - return self._query_header_context - - def add(self, sql: str) -> str: - return self.comment.add(sql) - - def reset(self): - self.set("master", None) - - def set(self, name: str, query_header_context: Any): - wrapped: Optional[QueryHeaderContextWrapper] = None - if query_header_context is not None: - wrapped = QueryHeaderContextWrapper(query_header_context) - comment_str = self.generator(name, wrapped) - - append = False - if isinstance(self.config.query_comment, QueryComment): - append = self.config.query_comment.append - self.comment.set(comment_str, append) diff --git a/core/dbt/adapters/base/relation.py b/core/dbt/adapters/base/relation.py deleted file mode 100644 index 8dce405bee9..00000000000 --- a/core/dbt/adapters/base/relation.py +++ /dev/null @@ -1,459 +0,0 @@ -from collections.abc import Hashable -from dataclasses import dataclass, field -from typing import Optional, TypeVar, Any, Type, Dict, Iterator, Tuple, Set, Union, FrozenSet - -from dbt.adapters.contracts.relation import ( - RelationConfig, - RelationType, - ComponentName, - HasQuoting, - FakeAPIObject, - Policy, - Path, -) -from dbt.adapters.exceptions import MultipleDatabasesNotAllowedError, ApproximateMatchError -from dbt_common.utils import filter_null_values, deep_merge -from dbt.adapters.utils import classproperty - -import dbt_common.exceptions - - -Self = TypeVar("Self", bound="BaseRelation") -SerializableIterable = Union[Tuple, FrozenSet] - - -@dataclass(frozen=True, eq=False, repr=False) -class BaseRelation(FakeAPIObject, Hashable): - path: Path - type: Optional[RelationType] = None - quote_character: str = '"' - # Python 3.11 requires that these use default_factory instead of simple default - # ValueError: mutable default for field include_policy is not allowed: use default_factory - include_policy: Policy = field(default_factory=lambda: Policy()) - quote_policy: Policy = field(default_factory=lambda: Policy()) - dbt_created: bool = False - limit: Optional[int] = None - - # register relation types that can be renamed for the purpose of replacing relations using stages and backups - # adding a relation type here also requires defining the associated rename macro - # e.g. adding RelationType.View in dbt-postgres requires that you define: - # include/postgres/macros/relations/view/rename.sql::postgres__get_rename_view_sql() - renameable_relations: SerializableIterable = () - - # register relation types that are atomically replaceable, e.g. they have "create or replace" syntax - # adding a relation type here also requires defining the associated replace macro - # e.g. adding RelationType.View in dbt-postgres requires that you define: - # include/postgres/macros/relations/view/replace.sql::postgres__get_replace_view_sql() - replaceable_relations: SerializableIterable = () - - def _is_exactish_match(self, field: ComponentName, value: str) -> bool: - if self.dbt_created and self.quote_policy.get_part(field) is False: - return self.path.get_lowered_part(field) == value.lower() - else: - return self.path.get_part(field) == value - - @classmethod - def _get_field_named(cls, field_name): - for f, _ in cls._get_fields(): - if f.name == field_name: - return f - # this should be unreachable - raise ValueError(f"BaseRelation has no {field_name} field!") - - def __eq__(self, other): - if not isinstance(other, self.__class__): - return False - return self.to_dict(omit_none=True) == other.to_dict(omit_none=True) - - @classmethod - def get_default_quote_policy(cls) -> Policy: - return cls._get_field_named("quote_policy").default_factory() - - @classmethod - def get_default_include_policy(cls) -> Policy: - return cls._get_field_named("include_policy").default_factory() - - def get(self, key, default=None): - """Override `.get` to return a metadata object so we don't break - dbt_utils. - """ - if key == "metadata": - return {"type": self.__class__.__name__} - return super().get(key, default) - - def matches( - self, - database: Optional[str] = None, - schema: Optional[str] = None, - identifier: Optional[str] = None, - ) -> bool: - search = filter_null_values( - { - ComponentName.Database: database, - ComponentName.Schema: schema, - ComponentName.Identifier: identifier, - } - ) - - if not search: - # nothing was passed in - raise dbt_common.exceptions.DbtRuntimeError( - "Tried to match relation, but no search path was passed!" - ) - - exact_match = True - approximate_match = True - - for k, v in search.items(): - if not self._is_exactish_match(k, v): - exact_match = False - if str(self.path.get_lowered_part(k)).strip(self.quote_character) != v.lower().strip( - self.quote_character - ): - approximate_match = False # type: ignore[union-attr] - - if approximate_match and not exact_match: - target = self.create(database=database, schema=schema, identifier=identifier) - raise ApproximateMatchError(target, self) - - return exact_match - - def replace_path(self, **kwargs): - return self.replace(path=self.path.replace(**kwargs)) - - def quote( - self: Self, - database: Optional[bool] = None, - schema: Optional[bool] = None, - identifier: Optional[bool] = None, - ) -> Self: - policy = filter_null_values( - { - ComponentName.Database: database, - ComponentName.Schema: schema, - ComponentName.Identifier: identifier, - } - ) - - new_quote_policy = self.quote_policy.replace_dict(policy) - return self.replace(quote_policy=new_quote_policy) - - def include( - self: Self, - database: Optional[bool] = None, - schema: Optional[bool] = None, - identifier: Optional[bool] = None, - ) -> Self: - policy = filter_null_values( - { - ComponentName.Database: database, - ComponentName.Schema: schema, - ComponentName.Identifier: identifier, - } - ) - - new_include_policy = self.include_policy.replace_dict(policy) - return self.replace(include_policy=new_include_policy) - - def information_schema(self, view_name=None) -> "InformationSchema": - # some of our data comes from jinja, where things can be `Undefined`. - if not isinstance(view_name, str): - view_name = None - - # Kick the user-supplied schema out of the information schema relation - # Instead address this as .information_schema by default - info_schema = InformationSchema.from_relation(self, view_name) - return info_schema.incorporate(path={"schema": None}) - - def information_schema_only(self) -> "InformationSchema": - return self.information_schema() - - def without_identifier(self) -> "BaseRelation": - """Return a form of this relation that only has the database and schema - set to included. To get the appropriately-quoted form the schema out of - the result (for use as part of a query), use `.render()`. To get the - raw database or schema name, use `.database` or `.schema`. - - The hash of the returned object is the result of render(). - """ - return self.include(identifier=False).replace_path(identifier=None) - - def _render_iterator(self) -> Iterator[Tuple[Optional[ComponentName], Optional[str]]]: - for key in ComponentName: - path_part: Optional[str] = None - if self.include_policy.get_part(key): - path_part = self.path.get_part(key) - if path_part is not None and self.quote_policy.get_part(key): - path_part = self.quoted(path_part) - yield key, path_part - - def render(self) -> str: - # if there is nothing set, this will return the empty string. - return ".".join(part for _, part in self._render_iterator() if part is not None) - - def render_limited(self) -> str: - rendered = self.render() - if self.limit is None: - return rendered - elif self.limit == 0: - return f"(select * from {rendered} where false limit 0) _dbt_limit_subq" - else: - return f"(select * from {rendered} limit {self.limit}) _dbt_limit_subq" - - def quoted(self, identifier): - return "{quote_char}{identifier}{quote_char}".format( - quote_char=self.quote_character, - identifier=identifier, - ) - - @staticmethod - def add_ephemeral_prefix(name: str): - return f"__dbt__cte__{name}" - - @classmethod - def create_ephemeral_from( - cls: Type[Self], - relation_config: RelationConfig, - limit: Optional[int] = None, - ) -> Self: - # Note that ephemeral models are based on the name. - identifier = cls.add_ephemeral_prefix(relation_config.name) - return cls.create( - type=cls.CTE, - identifier=identifier, - limit=limit, - ).quote(identifier=False) - - @classmethod - def create_from( - cls: Type[Self], - quoting: HasQuoting, - relation_config: RelationConfig, - **kwargs: Any, - ) -> Self: - quote_policy = kwargs.pop("quote_policy", {}) - - config_quoting = relation_config.quoting_dict - config_quoting.pop("column", None) - # precedence: kwargs quoting > relation config quoting > base quoting > default quoting - quote_policy = deep_merge( - cls.get_default_quote_policy().to_dict(omit_none=True), - quoting.quoting, - config_quoting, - quote_policy, - ) - - return cls.create( - database=relation_config.database, - schema=relation_config.schema, - identifier=relation_config.identifier, - quote_policy=quote_policy, - **kwargs, - ) - - @classmethod - def create( - cls: Type[Self], - database: Optional[str] = None, - schema: Optional[str] = None, - identifier: Optional[str] = None, - type: Optional[RelationType] = None, - **kwargs, - ) -> Self: - kwargs.update( - { - "path": { - "database": database, - "schema": schema, - "identifier": identifier, - }, - "type": type, - } - ) - return cls.from_dict(kwargs) - - @property - def can_be_renamed(self) -> bool: - return self.type in self.renameable_relations - - @property - def can_be_replaced(self) -> bool: - return self.type in self.replaceable_relations - - def __repr__(self) -> str: - return "<{} {}>".format(self.__class__.__name__, self.render()) - - def __hash__(self) -> int: - return hash(self.render()) - - def __str__(self) -> str: - return self.render() if self.limit is None else self.render_limited() - - @property - def database(self) -> Optional[str]: - return self.path.database - - @property - def schema(self) -> Optional[str]: - return self.path.schema - - @property - def identifier(self) -> Optional[str]: - return self.path.identifier - - @property - def table(self) -> Optional[str]: - return self.path.identifier - - # Here for compatibility with old Relation interface - @property - def name(self) -> Optional[str]: - return self.identifier - - @property - def is_table(self) -> bool: - return self.type == RelationType.Table - - @property - def is_cte(self) -> bool: - return self.type == RelationType.CTE - - @property - def is_view(self) -> bool: - return self.type == RelationType.View - - @property - def is_materialized_view(self) -> bool: - return self.type == RelationType.MaterializedView - - @classproperty - def Table(cls) -> str: - return str(RelationType.Table) - - @classproperty - def CTE(cls) -> str: - return str(RelationType.CTE) - - @classproperty - def View(cls) -> str: - return str(RelationType.View) - - @classproperty - def External(cls) -> str: - return str(RelationType.External) - - @classproperty - def MaterializedView(cls) -> str: - return str(RelationType.MaterializedView) - - @classproperty - def get_relation_type(cls) -> Type[RelationType]: - return RelationType - - -Info = TypeVar("Info", bound="InformationSchema") - - -@dataclass(frozen=True, eq=False, repr=False) -class InformationSchema(BaseRelation): - information_schema_view: Optional[str] = None - - def __post_init__(self): - if not isinstance(self.information_schema_view, (type(None), str)): - raise dbt_common.exceptions.CompilationError( - "Got an invalid name: {}".format(self.information_schema_view) - ) - - @classmethod - def get_path(cls, relation: BaseRelation, information_schema_view: Optional[str]) -> Path: - return Path( - database=relation.database, - schema=relation.schema, - identifier="INFORMATION_SCHEMA", - ) - - @classmethod - def get_include_policy( - cls, - relation, - information_schema_view: Optional[str], - ) -> Policy: - return relation.include_policy.replace( - database=relation.database is not None, - schema=False, - identifier=True, - ) - - @classmethod - def get_quote_policy( - cls, - relation, - information_schema_view: Optional[str], - ) -> Policy: - return relation.quote_policy.replace( - identifier=False, - ) - - @classmethod - def from_relation( - cls: Type[Info], - relation: BaseRelation, - information_schema_view: Optional[str], - ) -> Info: - include_policy = cls.get_include_policy(relation, information_schema_view) - quote_policy = cls.get_quote_policy(relation, information_schema_view) - path = cls.get_path(relation, information_schema_view) - return cls( - type=RelationType.View, - path=path, - include_policy=include_policy, - quote_policy=quote_policy, - information_schema_view=information_schema_view, - ) - - def _render_iterator(self): - for k, v in super()._render_iterator(): - yield k, v - yield None, self.information_schema_view - - -class SchemaSearchMap(Dict[InformationSchema, Set[Optional[str]]]): - """A utility class to keep track of what information_schema tables to - search for what schemas. The schema values are all lowercased to avoid - duplication. - """ - - def add(self, relation: BaseRelation): - key = relation.information_schema_only() - if key not in self: - self[key] = set() - schema: Optional[str] = None - if relation.schema is not None: - schema = relation.schema.lower() - self[key].add(schema) - - def search(self) -> Iterator[Tuple[InformationSchema, Optional[str]]]: - for information_schema, schemas in self.items(): - for schema in schemas: - yield information_schema, schema - - def flatten(self, allow_multiple_databases: bool = False) -> "SchemaSearchMap": - new = self.__class__() - - # make sure we don't have multiple databases if allow_multiple_databases is set to False - if not allow_multiple_databases: - seen = {r.database.lower() for r in self if r.database} - if len(seen) > 1: - raise MultipleDatabasesNotAllowedError(seen) - - for information_schema_name, schema in self.search(): - path = {"database": information_schema_name.database, "schema": schema} - new.add( - information_schema_name.incorporate( - path=path, - quote_policy={"database": False}, - include_policy={"database": False}, - ) - ) - - return new diff --git a/core/dbt/adapters/cache.py b/core/dbt/adapters/cache.py deleted file mode 100644 index f7c474bda34..00000000000 --- a/core/dbt/adapters/cache.py +++ /dev/null @@ -1,518 +0,0 @@ -import threading -from copy import deepcopy -from typing import Any, Dict, Iterable, List, Optional, Set, Tuple - -from dbt.adapters.reference_keys import ( - _make_ref_key, - _make_ref_key_dict, - _ReferenceKey, -) -from dbt.adapters.exceptions.cache import ( - NewNameAlreadyInCacheError, - ReferencedLinkNotCachedError, - DependentLinkNotCachedError, - TruncatedModelNameCausedCollisionError, - NoneRelationFoundError, -) -from dbt_common.events.functions import fire_event, fire_event_if -from dbt.adapters.events.types import CacheAction, CacheDumpGraph -from dbt_common.utils.formatting import lowercase - - -def dot_separated(key: _ReferenceKey) -> str: - """Return the key in dot-separated string form. - - :param _ReferenceKey key: The key to stringify. - """ - return ".".join(map(str, key)) - - -class _CachedRelation: - """Nothing about _CachedRelation is guaranteed to be thread-safe! - - :attr str schema: The schema of this relation. - :attr str identifier: The identifier of this relation. - :attr Dict[_ReferenceKey, _CachedRelation] referenced_by: The relations - that refer to this relation. - :attr BaseRelation inner: The underlying dbt relation. - """ - - def __init__(self, inner) -> None: - self.referenced_by: Dict[_ReferenceKey, _CachedRelation] = {} - self.inner = inner - - def __str__(self) -> str: - return ("_CachedRelation(database={}, schema={}, identifier={}, inner={})").format( - self.database, self.schema, self.identifier, self.inner - ) - - @property - def database(self) -> Optional[str]: - return lowercase(self.inner.database) - - @property - def schema(self) -> Optional[str]: - return lowercase(self.inner.schema) - - @property - def identifier(self) -> Optional[str]: - return lowercase(self.inner.identifier) - - def __copy__(self): - new = self.__class__(self.inner) - new.__dict__.update(self.__dict__) - return new - - def __deepcopy__(self, memo): - new = self.__class__(self.inner.incorporate()) - new.__dict__.update(self.__dict__) - new.referenced_by = deepcopy(self.referenced_by, memo) - - def is_referenced_by(self, key): - return key in self.referenced_by - - def key(self): - """Get the _ReferenceKey that represents this relation - - :return _ReferenceKey: A key for this relation. - """ - return _make_ref_key(self) - - def add_reference(self, referrer: "_CachedRelation"): - """Add a reference from referrer to self, indicating that if this node - were drop...cascaded, the referrer would be dropped as well. - - :param _CachedRelation referrer: The node that refers to this node. - """ - self.referenced_by[referrer.key()] = referrer - - def collect_consequences(self): - """Recursively collect a set of _ReferenceKeys that would - consequentially get dropped if this were dropped via - "drop ... cascade". - - :return Set[_ReferenceKey]: All the relations that would be dropped - """ - consequences = {self.key()} - for relation in self.referenced_by.values(): - consequences.update(relation.collect_consequences()) - return consequences - - def release_references(self, keys): - """Non-recursively indicate that an iterable of _ReferenceKey no longer - exist. Unknown keys are ignored. - - :param Iterable[_ReferenceKey] keys: The keys to drop. - """ - keys = set(self.referenced_by) & set(keys) - for key in keys: - self.referenced_by.pop(key) - - def rename(self, new_relation): - """Rename this cached relation to new_relation. - Note that this will change the output of key(), all refs must be - updated! - - :param _CachedRelation new_relation: The new name to apply to the - relation - """ - # Relations store this stuff inside their `path` dict. But they - # also store a table_name, and usually use it in their .render(), - # so we need to update that as well. It doesn't appear that - # table_name is ever anything but the identifier (via .create()) - self.inner = self.inner.incorporate( - path={ - "database": new_relation.inner.database, - "schema": new_relation.inner.schema, - "identifier": new_relation.inner.identifier, - }, - ) - - def rename_key(self, old_key, new_key): - """Rename a reference that may or may not exist. Only handles the - reference itself, so this is the other half of what `rename` does. - - If old_key is not in referenced_by, this is a no-op. - - :param _ReferenceKey old_key: The old key to be renamed. - :param _ReferenceKey new_key: The new key to rename to. - :raises InternalError: If the new key already exists. - """ - if new_key in self.referenced_by: - raise NewNameAlreadyInCacheError(old_key, new_key) - - if old_key not in self.referenced_by: - return - value = self.referenced_by.pop(old_key) - self.referenced_by[new_key] = value - - def dump_graph_entry(self): - """Return a key/value pair representing this key and its referents. - - return List[str]: The dot-separated form of all referent keys. - """ - return [dot_separated(r) for r in self.referenced_by] - - -class RelationsCache: - """A cache of the relations known to dbt. Keeps track of relationships - declared between tables and handles renames/drops as a real database would. - - :attr Dict[_ReferenceKey, _CachedRelation] relations: The known relations. - :attr threading.RLock lock: The lock around relations, held during updates. - The adapters also hold this lock while filling the cache. - :attr Set[str] schemas: The set of known/cached schemas, all lowercased. - """ - - def __init__(self, log_cache_events: bool = False) -> None: - self.relations: Dict[_ReferenceKey, _CachedRelation] = {} - self.lock = threading.RLock() - self.schemas: Set[Tuple[Optional[str], Optional[str]]] = set() - self.log_cache_events = log_cache_events - - def add_schema( - self, - database: Optional[str], - schema: Optional[str], - ) -> None: - """Add a schema to the set of known schemas (case-insensitive) - - :param database: The database name to add. - :param schema: The schema name to add. - """ - self.schemas.add((lowercase(database), lowercase(schema))) - - def drop_schema( - self, - database: Optional[str], - schema: Optional[str], - ) -> None: - """Drop the given schema and remove it from the set of known schemas. - - Then remove all its contents (and their dependents, etc) as well. - """ - key = (lowercase(database), lowercase(schema)) - if key not in self.schemas: - return - - # avoid iterating over self.relations while removing things by - # collecting the list first. - - with self.lock: - to_remove = self._list_relations_in_schema(database, schema) - self._remove_all(to_remove) - # handle a drop_schema race by using discard() over remove() - self.schemas.discard(key) - - def update_schemas(self, schemas: Iterable[Tuple[Optional[str], str]]): - """Add multiple schemas to the set of known schemas (case-insensitive) - - :param schemas: An iterable of the schema names to add. - """ - self.schemas.update((lowercase(d), s.lower()) for (d, s) in schemas) - - def __contains__(self, schema_id: Tuple[Optional[str], str]): - """A schema is 'in' the relations cache if it is in the set of cached - schemas. - - :param schema_id: The db name and schema name to look up. - """ - db, schema = schema_id - return (lowercase(db), schema.lower()) in self.schemas - - def dump_graph(self): - """Dump a key-only representation of the schema to a dictionary. Every - known relation is a key with a value of a list of keys it is referenced - by. - """ - # we have to hold the lock for the entire dump, if other threads modify - # self.relations or any cache entry's referenced_by during iteration - # it's a runtime error! - with self.lock: - return {dot_separated(k): str(v.dump_graph_entry()) for k, v in self.relations.items()} - - def _setdefault(self, relation: _CachedRelation): - """Add a relation to the cache, or return it if it already exists. - - :param _CachedRelation relation: The relation to set or get. - :return _CachedRelation: The relation stored under the given relation's - key - """ - self.add_schema(relation.database, relation.schema) - key = relation.key() - return self.relations.setdefault(key, relation) - - def _add_link(self, referenced_key, dependent_key): - """Add a link between two relations to the database. Both the old and - new entries must alraedy exist in the database. - - :param _ReferenceKey referenced_key: The key identifying the referenced - model (the one that if dropped will drop the dependent model). - :param _ReferenceKey dependent_key: The key identifying the dependent - model. - :raises InternalError: If either entry does not exist. - """ - referenced = self.relations.get(referenced_key) - if referenced is None: - return - if referenced is None: - raise ReferencedLinkNotCachedError(referenced_key) - - dependent = self.relations.get(dependent_key) - if dependent is None: - raise DependentLinkNotCachedError(dependent_key) - - assert dependent is not None # we just raised! - - referenced.add_reference(dependent) - - # This is called in plugins/postgres/dbt/adapters/postgres/impl.py - def add_link(self, referenced, dependent): - """Add a link between two relations to the database. If either relation - does not exist, it will be added as an "external" relation. - - The dependent model refers _to_ the referenced model. So, given - arguments of (jake_test, bar, jake_test, foo): - both values are in the schema jake_test and foo is a view that refers - to bar, so "drop bar cascade" will drop foo and all of foo's - dependents. - - :param BaseRelation referenced: The referenced model. - :param BaseRelation dependent: The dependent model. - :raises InternalError: If either entry does not exist. - """ - ref_key = _make_ref_key(referenced) - dep_key = _make_ref_key(dependent) - if (ref_key.database, ref_key.schema) not in self: - # if we have not cached the referenced schema at all, we must be - # referring to a table outside our control. There's no need to make - # a link - we will never drop the referenced relation during a run. - fire_event( - CacheAction( - ref_key=ref_key._asdict(), - ref_key_2=dep_key._asdict(), - ) - ) - return - if ref_key not in self.relations: - # Insert a dummy "external" relation. - referenced = referenced.replace(type=referenced.External) - self.add(referenced) - if dep_key not in self.relations: - # Insert a dummy "external" relation. - dependent = dependent.replace(type=referenced.External) - self.add(dependent) - fire_event( - CacheAction( - action="add_link", - ref_key=dep_key._asdict(), - ref_key_2=ref_key._asdict(), - ) - ) - with self.lock: - self._add_link(ref_key, dep_key) - - def add(self, relation): - """Add the relation inner to the cache, under the schema schema and - identifier identifier - - :param BaseRelation relation: The underlying relation. - """ - cached = _CachedRelation(relation) - fire_event_if( - self.log_cache_events, - lambda: CacheDumpGraph(before_after="before", action="adding", dump=self.dump_graph()), - ) - fire_event(CacheAction(action="add_relation", ref_key=_make_ref_key_dict(cached))) - - with self.lock: - self._setdefault(cached) - fire_event_if( - self.log_cache_events, - lambda: CacheDumpGraph(before_after="after", action="adding", dump=self.dump_graph()), - ) - - def _remove_refs(self, keys): - """Removes all references to all entries in keys. This does not - cascade! - - :param Iterable[_ReferenceKey] keys: The keys to remove. - """ - # remove direct refs - for key in keys: - del self.relations[key] - # then remove all entries from each child - for cached in self.relations.values(): - cached.release_references(keys) - - def drop(self, relation): - """Drop the named relation and cascade it appropriately to all - dependent relations. - - Because dbt proactively does many `drop relation if exist ... cascade` - that are noops, nonexistent relation drops cause a debug log and no - other actions. - - :param str schema: The schema of the relation to drop. - :param str identifier: The identifier of the relation to drop. - """ - dropped_key = _make_ref_key(relation) - dropped_key_msg = _make_ref_key_dict(relation) - fire_event(CacheAction(action="drop_relation", ref_key=dropped_key_msg)) - with self.lock: - if dropped_key not in self.relations: - fire_event(CacheAction(action="drop_missing_relation", ref_key=dropped_key_msg)) - return - consequences = self.relations[dropped_key].collect_consequences() - # convert from a list of _ReferenceKeys to a list of ReferenceKeyMsgs - consequence_msgs = [key._asdict() for key in consequences] - fire_event( - CacheAction( - action="drop_cascade", ref_key=dropped_key_msg, ref_list=consequence_msgs - ) - ) - self._remove_refs(consequences) - - def _rename_relation(self, old_key, new_relation): - """Rename a relation named old_key to new_key, updating references. - Return whether or not there was a key to rename. - - :param _ReferenceKey old_key: The existing key, to rename from. - :param _CachedRelation new_key: The new relation, to rename to. - """ - # On the database level, a rename updates all values that were - # previously referenced by old_name to be referenced by new_name. - # basically, the name changes but some underlying ID moves. Kind of - # like an object reference! - relation = self.relations.pop(old_key) - new_key = new_relation.key() - - # relation has to rename its innards, so it needs the _CachedRelation. - relation.rename(new_relation) - # update all the relations that refer to it - for cached in self.relations.values(): - if cached.is_referenced_by(old_key): - fire_event( - CacheAction( - action="update_reference", - ref_key=_make_ref_key_dict(old_key), - ref_key_2=_make_ref_key_dict(new_key), - ref_key_3=_make_ref_key_dict(cached.key()), - ) - ) - - cached.rename_key(old_key, new_key) - - self.relations[new_key] = relation - # also fixup the schemas! - self.add_schema(new_key.database, new_key.schema) - - return True - - def _check_rename_constraints(self, old_key, new_key): - """Check the rename constraints, and return whether or not the rename - can proceed. - - If the new key is already present, that is an error. - If the old key is absent, we debug log and return False, assuming it's - a temp table being renamed. - - :param _ReferenceKey old_key: The existing key, to rename from. - :param _ReferenceKey new_key: The new key, to rename to. - :return bool: If the old relation exists for renaming. - :raises InternalError: If the new key is already present. - """ - if new_key in self.relations: - # Tell user when collision caused by model names truncated during - # materialization. - raise TruncatedModelNameCausedCollisionError(new_key, self.relations) - - if old_key not in self.relations: - fire_event(CacheAction(action="temporary_relation", ref_key=old_key._asdict())) - return False - return True - - def rename(self, old, new): - """Rename the old schema/identifier to the new schema/identifier and - update references. - - If the new schema/identifier is already present, that is an error. - If the schema/identifier key is absent, we only debug log and return, - assuming it's a temp table being renamed. - - :param BaseRelation old: The existing relation name information. - :param BaseRelation new: The new relation name information. - :raises InternalError: If the new key is already present. - """ - old_key = _make_ref_key(old) - new_key = _make_ref_key(new) - fire_event( - CacheAction( - action="rename_relation", - ref_key=old_key._asdict(), - ref_key_2=new_key._asdict(), - ) - ) - fire_event_if( - self.log_cache_events, - lambda: CacheDumpGraph(before_after="before", action="rename", dump=self.dump_graph()), - ) - - with self.lock: - if self._check_rename_constraints(old_key, new_key): - self._rename_relation(old_key, _CachedRelation(new)) - else: - self._setdefault(_CachedRelation(new)) - - fire_event_if( - self.log_cache_events, - lambda: CacheDumpGraph(before_after="after", action="rename", dump=self.dump_graph()), - ) - - def get_relations(self, database: Optional[str], schema: Optional[str]) -> List[Any]: - """Case-insensitively yield all relations matching the given schema. - - :param str schema: The case-insensitive schema name to list from. - :return List[BaseRelation]: The list of relations with the given - schema - """ - database = lowercase(database) - schema = lowercase(schema) - with self.lock: - results = [ - r.inner - for r in self.relations.values() - if (lowercase(r.schema) == schema and lowercase(r.database) == database) - ] - - if None in results: - raise NoneRelationFoundError() - return results - - def clear(self): - """Clear the cache""" - with self.lock: - self.relations.clear() - self.schemas.clear() - - def _list_relations_in_schema( - self, database: Optional[str], schema: Optional[str] - ) -> List[_CachedRelation]: - """Get the relations in a schema. Callers should hold the lock.""" - key = (lowercase(database), lowercase(schema)) - - to_remove: List[_CachedRelation] = [] - for cachekey, relation in self.relations.items(): - if (cachekey.database, cachekey.schema) == key: - to_remove.append(relation) - return to_remove - - def _remove_all(self, to_remove: List[_CachedRelation]): - """Remove all the listed relations. Ignore relations that have been - cascaded out. - """ - for relation in to_remove: - # it may have been cascaded out already - drop_key = _make_ref_key(relation) - if drop_key in self.relations: - self.drop(drop_key) diff --git a/core/dbt/adapters/capability.py b/core/dbt/adapters/capability.py deleted file mode 100644 index 745cb27a648..00000000000 --- a/core/dbt/adapters/capability.py +++ /dev/null @@ -1,52 +0,0 @@ -from dataclasses import dataclass -from enum import Enum -from typing import Optional, DefaultDict, Mapping - - -class Capability(str, Enum): - """Enumeration of optional adapter features which can be probed using BaseAdapter.capabilities()""" - - SchemaMetadataByRelations = "SchemaMetadataByRelations" - """Indicates efficient support for retrieving schema metadata for a list of relations, rather than always retrieving - all the relations in a schema.""" - - TableLastModifiedMetadata = "TableLastModifiedMetadata" - """Indicates support for determining the time of the last table modification by querying database metadata.""" - - -class Support(str, Enum): - Unknown = "Unknown" - """The adapter has not declared whether this capability is a feature of the underlying DBMS.""" - - Unsupported = "Unsupported" - """This capability is not possible with the underlying DBMS, so the adapter does not implement related macros.""" - - NotImplemented = "NotImplemented" - """This capability is available in the underlying DBMS, but support has not yet been implemented in the adapter.""" - - Versioned = "Versioned" - """Some versions of the DBMS supported by the adapter support this capability and the adapter has implemented any - macros needed to use it.""" - - Full = "Full" - """All versions of the DBMS supported by the adapter support this capability and the adapter has implemented any - macros needed to use it.""" - - -@dataclass -class CapabilitySupport: - support: Support - first_version: Optional[str] = None - - def __bool__(self): - return self.support == Support.Versioned or self.support == Support.Full - - -class CapabilityDict(DefaultDict[Capability, CapabilitySupport]): - def __init__(self, vals: Mapping[Capability, CapabilitySupport]): - super().__init__(self._default) - self.update(vals) - - @staticmethod - def _default(): - return CapabilitySupport(support=Support.Unknown) diff --git a/core/dbt/adapters/clients/__init__.py b/core/dbt/adapters/clients/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/core/dbt/adapters/clients/jinja.py b/core/dbt/adapters/clients/jinja.py deleted file mode 100644 index c2b6edbbfa7..00000000000 --- a/core/dbt/adapters/clients/jinja.py +++ /dev/null @@ -1,23 +0,0 @@ -from typing import Dict, Any -from dbt_common.clients.jinja import BaseMacroGenerator, get_environment - - -class QueryStringGenerator(BaseMacroGenerator): - def __init__(self, template_str: str, context: Dict[str, Any]) -> None: - super().__init__(context) - self.template_str: str = template_str - env = get_environment() - self.template = env.from_string( - self.template_str, - globals=self.context, - ) - - def get_name(self) -> str: - return "query_comment_macro" - - def get_template(self): - """Don't use the template cache, we don't have a node""" - return self.template - - def __call__(self, connection_name: str, node) -> str: - return str(self.call_macro(connection_name, node)) diff --git a/core/dbt/adapters/contracts/__init__.py b/core/dbt/adapters/contracts/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/core/dbt/adapters/contracts/connection.py b/core/dbt/adapters/contracts/connection.py deleted file mode 100644 index 14e9f07e71d..00000000000 --- a/core/dbt/adapters/contracts/connection.py +++ /dev/null @@ -1,229 +0,0 @@ -import abc -import itertools -from dataclasses import dataclass, field -from typing import ( - Any, - ClassVar, - Dict, - Tuple, - Iterable, - Optional, - List, - Callable, -) -from typing_extensions import Protocol, Annotated - -from mashumaro.jsonschema.annotations import Pattern - -from dbt.adapters.utils import translate_aliases -from dbt_common.exceptions import DbtInternalError -from dbt_common.dataclass_schema import ( - dbtClassMixin, - StrEnum, - ExtensibleDbtClassMixin, - ValidatedStringMixin, -) -from dbt_common.contracts.util import Replaceable -from dbt_common.utils import md5 - -from dbt_common.events.functions import fire_event -from dbt.adapters.events.types import NewConnectionOpening - -# TODO: this is a very bad dependency - shared global state -from dbt_common.events.contextvars import get_node_info - - -class Identifier(ValidatedStringMixin): - ValidationRegex = r"^[A-Za-z_][A-Za-z0-9_]+$" - - -@dataclass -class AdapterResponse(dbtClassMixin): - _message: str - code: Optional[str] = None - rows_affected: Optional[int] = None - - def __str__(self): - return self._message - - -class ConnectionState(StrEnum): - INIT = "init" - OPEN = "open" - CLOSED = "closed" - FAIL = "fail" - - -@dataclass(init=False) -class Connection(ExtensibleDbtClassMixin, Replaceable): - # Annotated is used by mashumaro for jsonschema generation - type: Annotated[Identifier, Pattern(r"^[A-Za-z_][A-Za-z0-9_]+$")] - name: Optional[str] = None - state: ConnectionState = ConnectionState.INIT - transaction_open: bool = False - _handle: Optional[Any] = None - _credentials: Optional[Any] = None - - def __init__( - self, - type: Identifier, - name: Optional[str], - credentials: dbtClassMixin, - state: ConnectionState = ConnectionState.INIT, - transaction_open: bool = False, - handle: Optional[Any] = None, - ) -> None: - self.type = type - self.name = name - self.state = state - self.credentials = credentials - self.transaction_open = transaction_open - self.handle = handle - - @property - def credentials(self): - return self._credentials - - @credentials.setter - def credentials(self, value): - self._credentials = value - - @property - def handle(self): - if isinstance(self._handle, LazyHandle): - try: - # this will actually change 'self._handle'. - self._handle.resolve(self) - except RecursionError as exc: - raise DbtInternalError( - "A connection's open() method attempted to read the handle value" - ) from exc - return self._handle - - @handle.setter - def handle(self, value): - self._handle = value - - -class LazyHandle: - """The opener must be a callable that takes a Connection object and opens the - connection, updating the handle on the Connection. - """ - - def __init__(self, opener: Callable[[Connection], Connection]) -> None: - self.opener = opener - - def resolve(self, connection: Connection) -> Connection: - fire_event( - NewConnectionOpening(connection_state=connection.state, node_info=get_node_info()) - ) - return self.opener(connection) - - -# see https://github.com/python/mypy/issues/4717#issuecomment-373932080 -# and https://github.com/python/mypy/issues/5374 -# for why we have type: ignore. Maybe someday dataclasses + abstract classes -# will work. -@dataclass # type: ignore -class Credentials(ExtensibleDbtClassMixin, Replaceable, metaclass=abc.ABCMeta): - database: str - schema: str - _ALIASES: ClassVar[Dict[str, str]] = field(default={}, init=False) - - @abc.abstractproperty - def type(self) -> str: - raise NotImplementedError("type not implemented for base credentials class") - - @property - def unique_field(self) -> str: - """Hashed and included in anonymous telemetry to track adapter adoption. - Return the field from Credentials that can uniquely identify - one team/organization using this adapter - """ - raise NotImplementedError("unique_field not implemented for base credentials class") - - def hashed_unique_field(self) -> str: - return md5(self.unique_field) - - def connection_info(self, *, with_aliases: bool = False) -> Iterable[Tuple[str, Any]]: - """Return an ordered iterator of key/value pairs for pretty-printing.""" - as_dict = self.to_dict(omit_none=False) - connection_keys = set(self._connection_keys()) - aliases: List[str] = [] - if with_aliases: - aliases = [k for k, v in self._ALIASES.items() if v in connection_keys] - for key in itertools.chain(self._connection_keys(), aliases): - if key in as_dict: - yield key, as_dict[key] - - @abc.abstractmethod - def _connection_keys(self) -> Tuple[str, ...]: - raise NotImplementedError - - @classmethod - def __pre_deserialize__(cls, data): - data = super().__pre_deserialize__(data) - # Need to fixup dbname => database, pass => password - data = cls.translate_aliases(data) - return data - - @classmethod - def translate_aliases(cls, kwargs: Dict[str, Any], recurse: bool = False) -> Dict[str, Any]: - return translate_aliases(kwargs, cls._ALIASES, recurse) - - def __post_serialize__(self, dct): - # no super() -- do we need it? - if self._ALIASES: - dct.update( - { - new_name: dct[canonical_name] - for new_name, canonical_name in self._ALIASES.items() - if canonical_name in dct - } - ) - return dct - - -class HasCredentials(Protocol): - credentials: Credentials - profile_name: str - target_name: str - threads: int - - def to_target_dict(self): - raise NotImplementedError("to_target_dict not implemented") - - -DEFAULT_QUERY_COMMENT = """ -{%- set comment_dict = {} -%} -{%- do comment_dict.update( - app='dbt', - dbt_version=dbt_version, - profile_name=target.get('profile_name'), - target_name=target.get('target_name'), -) -%} -{%- if node is not none -%} - {%- do comment_dict.update( - node_id=node.unique_id, - ) -%} -{% else %} - {# in the node context, the connection name is the node_id #} - {%- do comment_dict.update(connection_name=connection_name) -%} -{%- endif -%} -{{ return(tojson(comment_dict)) }} -""" - - -@dataclass -class QueryComment(dbtClassMixin): - comment: str = DEFAULT_QUERY_COMMENT - append: bool = False - job_label: bool = field(default=False, metadata={"alias": "job-label"}) - - -class AdapterRequiredConfig(HasCredentials, Protocol): - project_name: str - query_comment: QueryComment - cli_vars: Dict[str, Any] - target_path: str - log_cache_events: bool diff --git a/core/dbt/adapters/contracts/macros.py b/core/dbt/adapters/contracts/macros.py deleted file mode 100644 index 6ffe58c1be2..00000000000 --- a/core/dbt/adapters/contracts/macros.py +++ /dev/null @@ -1,11 +0,0 @@ -from typing import Optional -from typing_extensions import Protocol - -from dbt_common.clients.jinja import MacroProtocol - - -class MacroResolverProtocol(Protocol): - def find_macro_by_name( - self, name: str, root_project_name: str, package: Optional[str] - ) -> Optional[MacroProtocol]: - raise NotImplementedError("find_macro_by_name not implemented") diff --git a/core/dbt/adapters/contracts/relation.py b/core/dbt/adapters/contracts/relation.py deleted file mode 100644 index 8fcb8b43edc..00000000000 --- a/core/dbt/adapters/contracts/relation.py +++ /dev/null @@ -1,129 +0,0 @@ -from collections.abc import Mapping -from dataclasses import dataclass -from typing import ( - Optional, - Dict, -) -from typing_extensions import Protocol - -from dbt_common.dataclass_schema import dbtClassMixin, StrEnum - -from dbt_common.contracts.util import Replaceable -from dbt_common.exceptions import CompilationError, DataclassNotDictError -from dbt_common.utils import deep_merge - - -class RelationType(StrEnum): - Table = "table" - View = "view" - CTE = "cte" - MaterializedView = "materialized_view" - External = "external" - Ephemeral = "ephemeral" - - -class RelationConfig(Protocol): - name: str - database: str - schema: str - identifier: str - quoting_dict: Dict[str, bool] - config: Dict[str, str] - - -class ComponentName(StrEnum): - Database = "database" - Schema = "schema" - Identifier = "identifier" - - -class HasQuoting(Protocol): - quoting: Dict[str, bool] - - -class FakeAPIObject(dbtClassMixin, Replaceable, Mapping): - # override the mapping truthiness, len is always >1 - def __bool__(self): - return True - - def __getitem__(self, key): - try: - return getattr(self, key) - except AttributeError: - raise KeyError(key) from None - - def __iter__(self): - raise DataclassNotDictError(self) - - def __len__(self): - raise DataclassNotDictError(self) - - def incorporate(self, **kwargs): - value = self.to_dict(omit_none=True) - value = deep_merge(value, kwargs) - return self.from_dict(value) - - -@dataclass -class Policy(FakeAPIObject): - database: bool = True - schema: bool = True - identifier: bool = True - - def get_part(self, key: ComponentName) -> bool: - if key == ComponentName.Database: - return self.database - elif key == ComponentName.Schema: - return self.schema - elif key == ComponentName.Identifier: - return self.identifier - else: - raise ValueError( - "Got a key of {}, expected one of {}".format(key, list(ComponentName)) - ) - - def replace_dict(self, dct: Dict[ComponentName, bool]): - kwargs: Dict[str, bool] = {} - for k, v in dct.items(): - kwargs[str(k)] = v - return self.replace(**kwargs) - - -@dataclass -class Path(FakeAPIObject): - database: Optional[str] = None - schema: Optional[str] = None - identifier: Optional[str] = None - - def __post_init__(self): - # handle pesky jinja2.Undefined sneaking in here and messing up rende - if not isinstance(self.database, (type(None), str)): - raise CompilationError("Got an invalid path database: {}".format(self.database)) - if not isinstance(self.schema, (type(None), str)): - raise CompilationError("Got an invalid path schema: {}".format(self.schema)) - if not isinstance(self.identifier, (type(None), str)): - raise CompilationError("Got an invalid path identifier: {}".format(self.identifier)) - - def get_lowered_part(self, key: ComponentName) -> Optional[str]: - part = self.get_part(key) - if part is not None: - part = part.lower() - return part - - def get_part(self, key: ComponentName) -> Optional[str]: - if key == ComponentName.Database: - return self.database - elif key == ComponentName.Schema: - return self.schema - elif key == ComponentName.Identifier: - return self.identifier - else: - raise ValueError( - "Got a key of {}, expected one of {}".format(key, list(ComponentName)) - ) - - def replace_dict(self, dct: Dict[ComponentName, str]): - kwargs: Dict[str, str] = {} - for k, v in dct.items(): - kwargs[str(k)] = v - return self.replace(**kwargs) diff --git a/core/dbt/adapters/events/README.md b/core/dbt/adapters/events/README.md deleted file mode 100644 index cdb7852aed6..00000000000 --- a/core/dbt/adapters/events/README.md +++ /dev/null @@ -1,57 +0,0 @@ -# Events Module -The Events module is responsible for communicating internal dbt structures into a consumable interface. Because the "event" classes are based entirely on protobuf definitions, the interface is really clearly defined, whether or not protobufs are used to consume it. We use Betterproto for compiling the protobuf message definitions into Python classes. - -# Using the Events Module -The event module provides types that represent what is happening in dbt in `events.types`. These types are intended to represent an exhaustive list of all things happening within dbt that will need to be logged, streamed, or printed. To fire an event, `common.events.functions::fire_event` is the entry point to the module from everywhere in dbt. - -# Logging -When events are processed via `fire_event`, nearly everything is logged. Whether or not the user has enabled the debug flag, all debug messages are still logged to the file. However, some events are particularly time consuming to construct because they return a huge amount of data. Today, the only messages in this category are cache events and are only logged if the `--log-cache-events` flag is on. This is important because these messages should not be created unless they are going to be logged, because they cause a noticable performance degredation. These events use a "fire_event_if" functions. - -# Adding a New Event -* Add a new message in types.proto, and a second message with the same name + "Msg". The "Msg" message should have two fields, an "info" field of EventInfo, and a "data" field referring to the message name without "Msg" -* run the protoc compiler to update adapter_types_pb2.py: make adapter_proto_types -* Add a wrapping class in core/dbt/adapters/event/types.py with a Level superclass plus code and message methods - -We have switched from using betterproto to using google protobuf, because of a lack of support for Struct fields in betterproto. - -The google protobuf interface is janky and very much non-Pythonic. The "generated" classes in types_pb2.py do not resemble regular Python classes. They do not have normal constructors; they can only be constructed empty. They can be "filled" by setting fields individually or using a json_format method like ParseDict. We have wrapped the logging events with a class (in types.py) which allows using a constructor -- keywords only, no positional parameters. - -## Required for Every Event - -- a method `code`, that's unique across events -- assign a log level by using the Level mixin: `DebugLevel`, `InfoLevel`, `WarnLevel`, or `ErrorLevel` -- a message() - -Example -``` -class PartialParsingDeletedExposure(DebugLevel): - def code(self): - return "I049" - - def message(self) -> str: - return f"Partial parsing: deleted exposure {self.unique_id}" - -``` - - -# Adapter Maintainers -To integrate existing log messages from adapters, you likely have a line of code like this in your adapter already: -```python -from dbt.logger import GLOBAL_LOGGER as logger -``` - -Simply change it to these two lines with your adapter's database name, and all your existing call sites will now use the new system for v1.0: - -```python - -from dbt.adapter.events.logging import AdapterLogger - -logger = AdapterLogger("") -# e.g. AdapterLogger("Snowflake") -``` - -## Compiling types.proto - -After adding a new message in `adapter_types.proto`, either: -- In the repository root directory: `make adapter_proto_types` -- In the `core/dbt/adapters/events` directory: `protoc -I=. --python_out=. types.proto` diff --git a/core/dbt/adapters/events/__init__.py b/core/dbt/adapters/events/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/core/dbt/adapters/events/adapter_types.proto b/core/dbt/adapters/events/adapter_types.proto deleted file mode 100644 index aa0b507c41e..00000000000 --- a/core/dbt/adapters/events/adapter_types.proto +++ /dev/null @@ -1,517 +0,0 @@ -syntax = "proto3"; - -package proto_types; - -import "google/protobuf/timestamp.proto"; -import "google/protobuf/struct.proto"; - -// Common event info -message AdapterCommonEventInfo { - string name = 1; - string code = 2; - string msg = 3; - string level = 4; - string invocation_id = 5; - int32 pid = 6; - string thread = 7; - google.protobuf.Timestamp ts = 8; - map extra = 9; - string category = 10; -} - -// AdapterNodeRelation -message AdapterNodeRelation { - string database = 10; - string schema = 11; - string alias = 12; - string relation_name = 13; -} - -// NodeInfo -message AdapterNodeInfo { - string node_path = 1; - string node_name = 2; - string unique_id = 3; - string resource_type = 4; - string materialized = 5; - string node_status = 6; - string node_started_at = 7; - string node_finished_at = 8; - google.protobuf.Struct meta = 9; - AdapterNodeRelation node_relation = 10; -} - -// ReferenceKey -message ReferenceKeyMsg { - string database = 1; - string schema = 2; - string identifier = 3; -} - -// D - Deprecations - -// D005 -message AdapterDeprecationWarning { - string old_name = 1; - string new_name = 2; -} - -message AdapterDeprecationWarningMsg { - AdapterCommonEventInfo info = 1; - AdapterDeprecationWarning data = 2; -} - -// D012 -message CollectFreshnessReturnSignature { -} - -message CollectFreshnessReturnSignatureMsg { - AdapterCommonEventInfo info = 1; - CollectFreshnessReturnSignature data = 2; -} - -// E - DB Adapter - -// E001 -message AdapterEventDebug { - AdapterNodeInfo node_info = 1; - string name = 2; - string base_msg = 3; - google.protobuf.ListValue args = 4; -} - -message AdapterEventDebugMsg { - AdapterCommonEventInfo info = 1; - AdapterEventDebug data = 2; -} - -// E002 -message AdapterEventInfo { - AdapterNodeInfo node_info = 1; - string name = 2; - string base_msg = 3; - google.protobuf.ListValue args = 4; -} - -message AdapterEventInfoMsg { - AdapterCommonEventInfo info = 1; - AdapterEventInfo data = 2; -} - -// E003 -message AdapterEventWarning { - AdapterNodeInfo node_info = 1; - string name = 2; - string base_msg = 3; - google.protobuf.ListValue args = 4; -} - -message AdapterEventWarningMsg { - AdapterCommonEventInfo info = 1; - AdapterEventWarning data = 2; -} - -// E004 -message AdapterEventError { - AdapterNodeInfo node_info = 1; - string name = 2; - string base_msg = 3; - google.protobuf.ListValue args = 4; - string exc_info = 5; -} - -message AdapterEventErrorMsg { - AdapterCommonEventInfo info = 1; - AdapterEventError data = 2; -} - -// E005 -message NewConnection { - AdapterNodeInfo node_info = 1; - string conn_type = 2; - string conn_name = 3; -} - -message NewConnectionMsg { - AdapterCommonEventInfo info = 1; - NewConnection data = 2; -} - -// E006 -message ConnectionReused { - string conn_name = 1; - string orig_conn_name = 2; -} - -message ConnectionReusedMsg { - AdapterCommonEventInfo info = 1; - ConnectionReused data = 2; -} - -// E007 -message ConnectionLeftOpenInCleanup { - string conn_name = 1; -} - -message ConnectionLeftOpenInCleanupMsg { - AdapterCommonEventInfo info = 1; - ConnectionLeftOpenInCleanup data = 2; -} - -// E008 -message ConnectionClosedInCleanup { - string conn_name = 1; -} - -message ConnectionClosedInCleanupMsg { - AdapterCommonEventInfo info = 1; - ConnectionClosedInCleanup data = 2; -} - -// E009 -message RollbackFailed { - AdapterNodeInfo node_info = 1; - string conn_name = 2; - string exc_info = 3; -} - -message RollbackFailedMsg { - AdapterCommonEventInfo info = 1; - RollbackFailed data = 2; -} - -// E010 -message ConnectionClosed { - AdapterNodeInfo node_info = 1; - string conn_name = 2; -} - -message ConnectionClosedMsg { - AdapterCommonEventInfo info = 1; - ConnectionClosed data = 2; -} - -// E011 -message ConnectionLeftOpen { - AdapterNodeInfo node_info = 1; - string conn_name = 2; -} - -message ConnectionLeftOpenMsg { - AdapterCommonEventInfo info = 1; - ConnectionLeftOpen data = 2; -} - -// E012 -message Rollback { - AdapterNodeInfo node_info = 1; - string conn_name = 2; -} - -message RollbackMsg { - AdapterCommonEventInfo info = 1; - Rollback data = 2; -} - -// E013 -message CacheMiss { - string conn_name = 1; - string database = 2; - string schema = 3; -} - -message CacheMissMsg { - AdapterCommonEventInfo info = 1; - CacheMiss data = 2; -} - -// E014 -message ListRelations { - string database = 1; - string schema = 2; - repeated ReferenceKeyMsg relations = 3; -} - -message ListRelationsMsg { - AdapterCommonEventInfo info = 1; - ListRelations data = 2; -} - -// E015 -message ConnectionUsed { - AdapterNodeInfo node_info = 1; - string conn_type = 2; - string conn_name = 3; -} - -message ConnectionUsedMsg { - AdapterCommonEventInfo info = 1; - ConnectionUsed data = 2; -} - -// E016 -message SQLQuery { - AdapterNodeInfo node_info = 1; - string conn_name = 2; - string sql = 3; -} - -message SQLQueryMsg { - AdapterCommonEventInfo info = 1; - SQLQuery data = 2; -} - -// E017 -message SQLQueryStatus { - AdapterNodeInfo node_info = 1; - string status = 2; - float elapsed = 3; -} - -message SQLQueryStatusMsg { - AdapterCommonEventInfo info = 1; - SQLQueryStatus data = 2; -} - -// E018 -message SQLCommit { - AdapterNodeInfo node_info = 1; - string conn_name = 2; -} - -message SQLCommitMsg { - AdapterCommonEventInfo info = 1; - SQLCommit data = 2; -} - -// E019 -message ColTypeChange { - string orig_type = 1; - string new_type = 2; - ReferenceKeyMsg table = 3; -} - -message ColTypeChangeMsg { - AdapterCommonEventInfo info = 1; - ColTypeChange data = 2; -} - -// E020 -message SchemaCreation { - ReferenceKeyMsg relation = 1; -} - -message SchemaCreationMsg { - AdapterCommonEventInfo info = 1; - SchemaCreation data = 2; -} - -// E021 -message SchemaDrop { - ReferenceKeyMsg relation = 1; -} - -message SchemaDropMsg { - AdapterCommonEventInfo info = 1; - SchemaDrop data = 2; -} - -// E022 -message CacheAction { - string action = 1; - ReferenceKeyMsg ref_key = 2; - ReferenceKeyMsg ref_key_2 = 3; - ReferenceKeyMsg ref_key_3 = 4; - repeated ReferenceKeyMsg ref_list = 5; -} - -message CacheActionMsg { - AdapterCommonEventInfo info = 1; - CacheAction data = 2; -} - -// Skipping E023, E024, E025, E026, E027, E028, E029, E0230 - -// E031 -message CacheDumpGraph { - map dump = 1; - string before_after = 2; - string action = 3; -} - -message CacheDumpGraphMsg { - AdapterCommonEventInfo info = 1; - CacheDumpGraph data = 2; -} - - -// Skipping E032, E033, E034 - - - -// E034 -message AdapterRegistered { - string adapter_name = 1; - string adapter_version = 2; -} - -message AdapterRegisteredMsg { - AdapterCommonEventInfo info = 1; - AdapterRegistered data = 2; -} - -// E035 -message AdapterImportError { - string exc = 1; -} - -message AdapterImportErrorMsg { - AdapterCommonEventInfo info = 1; - AdapterImportError data = 2; -} - -// E036 -message PluginLoadError { - string exc_info = 1; -} - -message PluginLoadErrorMsg { - AdapterCommonEventInfo info = 1; - PluginLoadError data = 2; -} - -// E037 -message NewConnectionOpening { - AdapterNodeInfo node_info = 1; - string connection_state = 2; -} - -message NewConnectionOpeningMsg { - AdapterCommonEventInfo info = 1; - NewConnectionOpening data = 2; -} - -// E038 -message CodeExecution { - string conn_name = 1; - string code_content = 2; -} - -message CodeExecutionMsg { - AdapterCommonEventInfo info = 1; - CodeExecution data = 2; -} - -// E039 -message CodeExecutionStatus { - string status = 1; - float elapsed = 2; -} - -message CodeExecutionStatusMsg { - AdapterCommonEventInfo info = 1; - CodeExecutionStatus data = 2; -} - -// E040 -message CatalogGenerationError { - string exc = 1; -} - -message CatalogGenerationErrorMsg { - AdapterCommonEventInfo info = 1; - CatalogGenerationError data = 2; -} - -// E041 -message WriteCatalogFailure { - int32 num_exceptions = 1; -} - -message WriteCatalogFailureMsg { - AdapterCommonEventInfo info = 1; - WriteCatalogFailure data = 2; -} - -// E042 -message CatalogWritten { - string path = 1; -} - -message CatalogWrittenMsg { - AdapterCommonEventInfo info = 1; - CatalogWritten data = 2; -} - -// E043 -message CannotGenerateDocs { -} - -message CannotGenerateDocsMsg { - AdapterCommonEventInfo info = 1; - CannotGenerateDocs data = 2; -} - -// E044 -message BuildingCatalog { -} - -message BuildingCatalogMsg { - AdapterCommonEventInfo info = 1; - BuildingCatalog data = 2; -} - -// E045 -message DatabaseErrorRunningHook { - string hook_type = 1; -} - -message DatabaseErrorRunningHookMsg { - AdapterCommonEventInfo info = 1; - DatabaseErrorRunningHook data = 2; -} - -// E046 -message HooksRunning { - int32 num_hooks = 1; - string hook_type = 2; -} - -message HooksRunningMsg { - AdapterCommonEventInfo info = 1; - HooksRunning data = 2; -} - -// E047 -message FinishedRunningStats { - string stat_line = 1; - string execution = 2; - float execution_time = 3; -} - -message FinishedRunningStatsMsg { - AdapterCommonEventInfo info = 1; - FinishedRunningStats data = 2; -} - -// E048 -message ConstraintNotEnforced { - string constraint = 1; - string adapter = 2; -} - -message ConstraintNotEnforcedMsg { - AdapterCommonEventInfo info = 1; - ConstraintNotEnforced data = 2; -} - -// E049 -message ConstraintNotSupported { - string constraint = 1; - string adapter = 2; -} - -message ConstraintNotSupportedMsg { - AdapterCommonEventInfo info = 1; - ConstraintNotSupported data = 2; -} diff --git a/core/dbt/adapters/events/adapter_types_pb2.py b/core/dbt/adapters/events/adapter_types_pb2.py deleted file mode 100644 index 59d665dbc1d..00000000000 --- a/core/dbt/adapters/events/adapter_types_pb2.py +++ /dev/null @@ -1,205 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: adapter_types.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x13\x61\x64\x61pter_types.proto\x12\x0bproto_types\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/protobuf/struct.proto\"\xab\x02\n\x16\x41\x64\x61pterCommonEventInfo\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04\x63ode\x18\x02 \x01(\t\x12\x0b\n\x03msg\x18\x03 \x01(\t\x12\r\n\x05level\x18\x04 \x01(\t\x12\x15\n\rinvocation_id\x18\x05 \x01(\t\x12\x0b\n\x03pid\x18\x06 \x01(\x05\x12\x0e\n\x06thread\x18\x07 \x01(\t\x12&\n\x02ts\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12=\n\x05\x65xtra\x18\t \x03(\x0b\x32..proto_types.AdapterCommonEventInfo.ExtraEntry\x12\x10\n\x08\x63\x61tegory\x18\n \x01(\t\x1a,\n\nExtraEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"]\n\x13\x41\x64\x61pterNodeRelation\x12\x10\n\x08\x64\x61tabase\x18\n \x01(\t\x12\x0e\n\x06schema\x18\x0b \x01(\t\x12\r\n\x05\x61lias\x18\x0c \x01(\t\x12\x15\n\rrelation_name\x18\r \x01(\t\"\x9f\x02\n\x0f\x41\x64\x61pterNodeInfo\x12\x11\n\tnode_path\x18\x01 \x01(\t\x12\x11\n\tnode_name\x18\x02 \x01(\t\x12\x11\n\tunique_id\x18\x03 \x01(\t\x12\x15\n\rresource_type\x18\x04 \x01(\t\x12\x14\n\x0cmaterialized\x18\x05 \x01(\t\x12\x13\n\x0bnode_status\x18\x06 \x01(\t\x12\x17\n\x0fnode_started_at\x18\x07 \x01(\t\x12\x18\n\x10node_finished_at\x18\x08 \x01(\t\x12%\n\x04meta\x18\t \x01(\x0b\x32\x17.google.protobuf.Struct\x12\x37\n\rnode_relation\x18\n \x01(\x0b\x32 .proto_types.AdapterNodeRelation\"G\n\x0fReferenceKeyMsg\x12\x10\n\x08\x64\x61tabase\x18\x01 \x01(\t\x12\x0e\n\x06schema\x18\x02 \x01(\t\x12\x12\n\nidentifier\x18\x03 \x01(\t\"?\n\x19\x41\x64\x61pterDeprecationWarning\x12\x10\n\x08old_name\x18\x01 \x01(\t\x12\x10\n\x08new_name\x18\x02 \x01(\t\"\x87\x01\n\x1c\x41\x64\x61pterDeprecationWarningMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x34\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32&.proto_types.AdapterDeprecationWarning\"!\n\x1f\x43ollectFreshnessReturnSignature\"\x93\x01\n\"CollectFreshnessReturnSignatureMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12:\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32,.proto_types.CollectFreshnessReturnSignature\"\x8e\x01\n\x11\x41\x64\x61pterEventDebug\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x62\x61se_msg\x18\x03 \x01(\t\x12(\n\x04\x61rgs\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.ListValue\"w\n\x14\x41\x64\x61pterEventDebugMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.AdapterEventDebug\"\x8d\x01\n\x10\x41\x64\x61pterEventInfo\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x62\x61se_msg\x18\x03 \x01(\t\x12(\n\x04\x61rgs\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.ListValue\"u\n\x13\x41\x64\x61pterEventInfoMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.AdapterEventInfo\"\x90\x01\n\x13\x41\x64\x61pterEventWarning\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x62\x61se_msg\x18\x03 \x01(\t\x12(\n\x04\x61rgs\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.ListValue\"{\n\x16\x41\x64\x61pterEventWarningMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.AdapterEventWarning\"\xa0\x01\n\x11\x41\x64\x61pterEventError\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x62\x61se_msg\x18\x03 \x01(\t\x12(\n\x04\x61rgs\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.ListValue\x12\x10\n\x08\x65xc_info\x18\x05 \x01(\t\"w\n\x14\x41\x64\x61pterEventErrorMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.AdapterEventError\"f\n\rNewConnection\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_type\x18\x02 \x01(\t\x12\x11\n\tconn_name\x18\x03 \x01(\t\"o\n\x10NewConnectionMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.NewConnection\"=\n\x10\x43onnectionReused\x12\x11\n\tconn_name\x18\x01 \x01(\t\x12\x16\n\x0eorig_conn_name\x18\x02 \x01(\t\"u\n\x13\x43onnectionReusedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.ConnectionReused\"0\n\x1b\x43onnectionLeftOpenInCleanup\x12\x11\n\tconn_name\x18\x01 \x01(\t\"\x8b\x01\n\x1e\x43onnectionLeftOpenInCleanupMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x36\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32(.proto_types.ConnectionLeftOpenInCleanup\".\n\x19\x43onnectionClosedInCleanup\x12\x11\n\tconn_name\x18\x01 \x01(\t\"\x87\x01\n\x1c\x43onnectionClosedInCleanupMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x34\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32&.proto_types.ConnectionClosedInCleanup\"f\n\x0eRollbackFailed\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\x12\x10\n\x08\x65xc_info\x18\x03 \x01(\t\"q\n\x11RollbackFailedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.RollbackFailed\"V\n\x10\x43onnectionClosed\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\"u\n\x13\x43onnectionClosedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.ConnectionClosed\"X\n\x12\x43onnectionLeftOpen\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\"y\n\x15\x43onnectionLeftOpenMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.ConnectionLeftOpen\"N\n\x08Rollback\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\"e\n\x0bRollbackMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12#\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x15.proto_types.Rollback\"@\n\tCacheMiss\x12\x11\n\tconn_name\x18\x01 \x01(\t\x12\x10\n\x08\x64\x61tabase\x18\x02 \x01(\t\x12\x0e\n\x06schema\x18\x03 \x01(\t\"g\n\x0c\x43\x61\x63heMissMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12$\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x16.proto_types.CacheMiss\"b\n\rListRelations\x12\x10\n\x08\x64\x61tabase\x18\x01 \x01(\t\x12\x0e\n\x06schema\x18\x02 \x01(\t\x12/\n\trelations\x18\x03 \x03(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"o\n\x10ListRelationsMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.ListRelations\"g\n\x0e\x43onnectionUsed\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_type\x18\x02 \x01(\t\x12\x11\n\tconn_name\x18\x03 \x01(\t\"q\n\x11\x43onnectionUsedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.ConnectionUsed\"[\n\x08SQLQuery\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\x12\x0b\n\x03sql\x18\x03 \x01(\t\"e\n\x0bSQLQueryMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12#\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x15.proto_types.SQLQuery\"b\n\x0eSQLQueryStatus\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0e\n\x06status\x18\x02 \x01(\t\x12\x0f\n\x07\x65lapsed\x18\x03 \x01(\x02\"q\n\x11SQLQueryStatusMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.SQLQueryStatus\"O\n\tSQLCommit\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\"g\n\x0cSQLCommitMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12$\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x16.proto_types.SQLCommit\"a\n\rColTypeChange\x12\x11\n\torig_type\x18\x01 \x01(\t\x12\x10\n\x08new_type\x18\x02 \x01(\t\x12+\n\x05table\x18\x03 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"o\n\x10\x43olTypeChangeMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.ColTypeChange\"@\n\x0eSchemaCreation\x12.\n\x08relation\x18\x01 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"q\n\x11SchemaCreationMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.SchemaCreation\"<\n\nSchemaDrop\x12.\n\x08relation\x18\x01 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"i\n\rSchemaDropMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12%\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x17.proto_types.SchemaDrop\"\xde\x01\n\x0b\x43\x61\x63heAction\x12\x0e\n\x06\x61\x63tion\x18\x01 \x01(\t\x12-\n\x07ref_key\x18\x02 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\x12/\n\tref_key_2\x18\x03 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\x12/\n\tref_key_3\x18\x04 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\x12.\n\x08ref_list\x18\x05 \x03(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"k\n\x0e\x43\x61\x63heActionMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12&\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x18.proto_types.CacheAction\"\x98\x01\n\x0e\x43\x61\x63heDumpGraph\x12\x33\n\x04\x64ump\x18\x01 \x03(\x0b\x32%.proto_types.CacheDumpGraph.DumpEntry\x12\x14\n\x0c\x62\x65\x66ore_after\x18\x02 \x01(\t\x12\x0e\n\x06\x61\x63tion\x18\x03 \x01(\t\x1a+\n\tDumpEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"q\n\x11\x43\x61\x63heDumpGraphMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.CacheDumpGraph\"B\n\x11\x41\x64\x61pterRegistered\x12\x14\n\x0c\x61\x64\x61pter_name\x18\x01 \x01(\t\x12\x17\n\x0f\x61\x64\x61pter_version\x18\x02 \x01(\t\"w\n\x14\x41\x64\x61pterRegisteredMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.AdapterRegistered\"!\n\x12\x41\x64\x61pterImportError\x12\x0b\n\x03\x65xc\x18\x01 \x01(\t\"y\n\x15\x41\x64\x61pterImportErrorMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.AdapterImportError\"#\n\x0fPluginLoadError\x12\x10\n\x08\x65xc_info\x18\x01 \x01(\t\"s\n\x12PluginLoadErrorMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.PluginLoadError\"a\n\x14NewConnectionOpening\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x18\n\x10\x63onnection_state\x18\x02 \x01(\t\"}\n\x17NewConnectionOpeningMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.NewConnectionOpening\"8\n\rCodeExecution\x12\x11\n\tconn_name\x18\x01 \x01(\t\x12\x14\n\x0c\x63ode_content\x18\x02 \x01(\t\"o\n\x10\x43odeExecutionMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.CodeExecution\"6\n\x13\x43odeExecutionStatus\x12\x0e\n\x06status\x18\x01 \x01(\t\x12\x0f\n\x07\x65lapsed\x18\x02 \x01(\x02\"{\n\x16\x43odeExecutionStatusMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.CodeExecutionStatus\"%\n\x16\x43\x61talogGenerationError\x12\x0b\n\x03\x65xc\x18\x01 \x01(\t\"\x81\x01\n\x19\x43\x61talogGenerationErrorMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x31\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32#.proto_types.CatalogGenerationError\"-\n\x13WriteCatalogFailure\x12\x16\n\x0enum_exceptions\x18\x01 \x01(\x05\"{\n\x16WriteCatalogFailureMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.WriteCatalogFailure\"\x1e\n\x0e\x43\x61talogWritten\x12\x0c\n\x04path\x18\x01 \x01(\t\"q\n\x11\x43\x61talogWrittenMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.CatalogWritten\"\x14\n\x12\x43\x61nnotGenerateDocs\"y\n\x15\x43\x61nnotGenerateDocsMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.CannotGenerateDocs\"\x11\n\x0f\x42uildingCatalog\"s\n\x12\x42uildingCatalogMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.BuildingCatalog\"-\n\x18\x44\x61tabaseErrorRunningHook\x12\x11\n\thook_type\x18\x01 \x01(\t\"\x85\x01\n\x1b\x44\x61tabaseErrorRunningHookMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x33\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32%.proto_types.DatabaseErrorRunningHook\"4\n\x0cHooksRunning\x12\x11\n\tnum_hooks\x18\x01 \x01(\x05\x12\x11\n\thook_type\x18\x02 \x01(\t\"m\n\x0fHooksRunningMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\'\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x19.proto_types.HooksRunning\"T\n\x14\x46inishedRunningStats\x12\x11\n\tstat_line\x18\x01 \x01(\t\x12\x11\n\texecution\x18\x02 \x01(\t\x12\x16\n\x0e\x65xecution_time\x18\x03 \x01(\x02\"}\n\x17\x46inishedRunningStatsMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.FinishedRunningStats\"<\n\x15\x43onstraintNotEnforced\x12\x12\n\nconstraint\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x64\x61pter\x18\x02 \x01(\t\"\x7f\n\x18\x43onstraintNotEnforcedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x30\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\".proto_types.ConstraintNotEnforced\"=\n\x16\x43onstraintNotSupported\x12\x12\n\nconstraint\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x64\x61pter\x18\x02 \x01(\t\"\x81\x01\n\x19\x43onstraintNotSupportedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x31\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32#.proto_types.ConstraintNotSupportedb\x06proto3') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'adapter_types_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - _ADAPTERCOMMONEVENTINFO_EXTRAENTRY._options = None - _ADAPTERCOMMONEVENTINFO_EXTRAENTRY._serialized_options = b'8\001' - _CACHEDUMPGRAPH_DUMPENTRY._options = None - _CACHEDUMPGRAPH_DUMPENTRY._serialized_options = b'8\001' - _ADAPTERCOMMONEVENTINFO._serialized_start=100 - _ADAPTERCOMMONEVENTINFO._serialized_end=399 - _ADAPTERCOMMONEVENTINFO_EXTRAENTRY._serialized_start=355 - _ADAPTERCOMMONEVENTINFO_EXTRAENTRY._serialized_end=399 - _ADAPTERNODERELATION._serialized_start=401 - _ADAPTERNODERELATION._serialized_end=494 - _ADAPTERNODEINFO._serialized_start=497 - _ADAPTERNODEINFO._serialized_end=784 - _REFERENCEKEYMSG._serialized_start=786 - _REFERENCEKEYMSG._serialized_end=857 - _ADAPTERDEPRECATIONWARNING._serialized_start=859 - _ADAPTERDEPRECATIONWARNING._serialized_end=922 - _ADAPTERDEPRECATIONWARNINGMSG._serialized_start=925 - _ADAPTERDEPRECATIONWARNINGMSG._serialized_end=1060 - _COLLECTFRESHNESSRETURNSIGNATURE._serialized_start=1062 - _COLLECTFRESHNESSRETURNSIGNATURE._serialized_end=1095 - _COLLECTFRESHNESSRETURNSIGNATUREMSG._serialized_start=1098 - _COLLECTFRESHNESSRETURNSIGNATUREMSG._serialized_end=1245 - _ADAPTEREVENTDEBUG._serialized_start=1248 - _ADAPTEREVENTDEBUG._serialized_end=1390 - _ADAPTEREVENTDEBUGMSG._serialized_start=1392 - _ADAPTEREVENTDEBUGMSG._serialized_end=1511 - _ADAPTEREVENTINFO._serialized_start=1514 - _ADAPTEREVENTINFO._serialized_end=1655 - _ADAPTEREVENTINFOMSG._serialized_start=1657 - _ADAPTEREVENTINFOMSG._serialized_end=1774 - _ADAPTEREVENTWARNING._serialized_start=1777 - _ADAPTEREVENTWARNING._serialized_end=1921 - _ADAPTEREVENTWARNINGMSG._serialized_start=1923 - _ADAPTEREVENTWARNINGMSG._serialized_end=2046 - _ADAPTEREVENTERROR._serialized_start=2049 - _ADAPTEREVENTERROR._serialized_end=2209 - _ADAPTEREVENTERRORMSG._serialized_start=2211 - _ADAPTEREVENTERRORMSG._serialized_end=2330 - _NEWCONNECTION._serialized_start=2332 - _NEWCONNECTION._serialized_end=2434 - _NEWCONNECTIONMSG._serialized_start=2436 - _NEWCONNECTIONMSG._serialized_end=2547 - _CONNECTIONREUSED._serialized_start=2549 - _CONNECTIONREUSED._serialized_end=2610 - _CONNECTIONREUSEDMSG._serialized_start=2612 - _CONNECTIONREUSEDMSG._serialized_end=2729 - _CONNECTIONLEFTOPENINCLEANUP._serialized_start=2731 - _CONNECTIONLEFTOPENINCLEANUP._serialized_end=2779 - _CONNECTIONLEFTOPENINCLEANUPMSG._serialized_start=2782 - _CONNECTIONLEFTOPENINCLEANUPMSG._serialized_end=2921 - _CONNECTIONCLOSEDINCLEANUP._serialized_start=2923 - _CONNECTIONCLOSEDINCLEANUP._serialized_end=2969 - _CONNECTIONCLOSEDINCLEANUPMSG._serialized_start=2972 - _CONNECTIONCLOSEDINCLEANUPMSG._serialized_end=3107 - _ROLLBACKFAILED._serialized_start=3109 - _ROLLBACKFAILED._serialized_end=3211 - _ROLLBACKFAILEDMSG._serialized_start=3213 - _ROLLBACKFAILEDMSG._serialized_end=3326 - _CONNECTIONCLOSED._serialized_start=3328 - _CONNECTIONCLOSED._serialized_end=3414 - _CONNECTIONCLOSEDMSG._serialized_start=3416 - _CONNECTIONCLOSEDMSG._serialized_end=3533 - _CONNECTIONLEFTOPEN._serialized_start=3535 - _CONNECTIONLEFTOPEN._serialized_end=3623 - _CONNECTIONLEFTOPENMSG._serialized_start=3625 - _CONNECTIONLEFTOPENMSG._serialized_end=3746 - _ROLLBACK._serialized_start=3748 - _ROLLBACK._serialized_end=3826 - _ROLLBACKMSG._serialized_start=3828 - _ROLLBACKMSG._serialized_end=3929 - _CACHEMISS._serialized_start=3931 - _CACHEMISS._serialized_end=3995 - _CACHEMISSMSG._serialized_start=3997 - _CACHEMISSMSG._serialized_end=4100 - _LISTRELATIONS._serialized_start=4102 - _LISTRELATIONS._serialized_end=4200 - _LISTRELATIONSMSG._serialized_start=4202 - _LISTRELATIONSMSG._serialized_end=4313 - _CONNECTIONUSED._serialized_start=4315 - _CONNECTIONUSED._serialized_end=4418 - _CONNECTIONUSEDMSG._serialized_start=4420 - _CONNECTIONUSEDMSG._serialized_end=4533 - _SQLQUERY._serialized_start=4535 - _SQLQUERY._serialized_end=4626 - _SQLQUERYMSG._serialized_start=4628 - _SQLQUERYMSG._serialized_end=4729 - _SQLQUERYSTATUS._serialized_start=4731 - _SQLQUERYSTATUS._serialized_end=4829 - _SQLQUERYSTATUSMSG._serialized_start=4831 - _SQLQUERYSTATUSMSG._serialized_end=4944 - _SQLCOMMIT._serialized_start=4946 - _SQLCOMMIT._serialized_end=5025 - _SQLCOMMITMSG._serialized_start=5027 - _SQLCOMMITMSG._serialized_end=5130 - _COLTYPECHANGE._serialized_start=5132 - _COLTYPECHANGE._serialized_end=5229 - _COLTYPECHANGEMSG._serialized_start=5231 - _COLTYPECHANGEMSG._serialized_end=5342 - _SCHEMACREATION._serialized_start=5344 - _SCHEMACREATION._serialized_end=5408 - _SCHEMACREATIONMSG._serialized_start=5410 - _SCHEMACREATIONMSG._serialized_end=5523 - _SCHEMADROP._serialized_start=5525 - _SCHEMADROP._serialized_end=5585 - _SCHEMADROPMSG._serialized_start=5587 - _SCHEMADROPMSG._serialized_end=5692 - _CACHEACTION._serialized_start=5695 - _CACHEACTION._serialized_end=5917 - _CACHEACTIONMSG._serialized_start=5919 - _CACHEACTIONMSG._serialized_end=6026 - _CACHEDUMPGRAPH._serialized_start=6029 - _CACHEDUMPGRAPH._serialized_end=6181 - _CACHEDUMPGRAPH_DUMPENTRY._serialized_start=6138 - _CACHEDUMPGRAPH_DUMPENTRY._serialized_end=6181 - _CACHEDUMPGRAPHMSG._serialized_start=6183 - _CACHEDUMPGRAPHMSG._serialized_end=6296 - _ADAPTERREGISTERED._serialized_start=6298 - _ADAPTERREGISTERED._serialized_end=6364 - _ADAPTERREGISTEREDMSG._serialized_start=6366 - _ADAPTERREGISTEREDMSG._serialized_end=6485 - _ADAPTERIMPORTERROR._serialized_start=6487 - _ADAPTERIMPORTERROR._serialized_end=6520 - _ADAPTERIMPORTERRORMSG._serialized_start=6522 - _ADAPTERIMPORTERRORMSG._serialized_end=6643 - _PLUGINLOADERROR._serialized_start=6645 - _PLUGINLOADERROR._serialized_end=6680 - _PLUGINLOADERRORMSG._serialized_start=6682 - _PLUGINLOADERRORMSG._serialized_end=6797 - _NEWCONNECTIONOPENING._serialized_start=6799 - _NEWCONNECTIONOPENING._serialized_end=6896 - _NEWCONNECTIONOPENINGMSG._serialized_start=6898 - _NEWCONNECTIONOPENINGMSG._serialized_end=7023 - _CODEEXECUTION._serialized_start=7025 - _CODEEXECUTION._serialized_end=7081 - _CODEEXECUTIONMSG._serialized_start=7083 - _CODEEXECUTIONMSG._serialized_end=7194 - _CODEEXECUTIONSTATUS._serialized_start=7196 - _CODEEXECUTIONSTATUS._serialized_end=7250 - _CODEEXECUTIONSTATUSMSG._serialized_start=7252 - _CODEEXECUTIONSTATUSMSG._serialized_end=7375 - _CATALOGGENERATIONERROR._serialized_start=7377 - _CATALOGGENERATIONERROR._serialized_end=7414 - _CATALOGGENERATIONERRORMSG._serialized_start=7417 - _CATALOGGENERATIONERRORMSG._serialized_end=7546 - _WRITECATALOGFAILURE._serialized_start=7548 - _WRITECATALOGFAILURE._serialized_end=7593 - _WRITECATALOGFAILUREMSG._serialized_start=7595 - _WRITECATALOGFAILUREMSG._serialized_end=7718 - _CATALOGWRITTEN._serialized_start=7720 - _CATALOGWRITTEN._serialized_end=7750 - _CATALOGWRITTENMSG._serialized_start=7752 - _CATALOGWRITTENMSG._serialized_end=7865 - _CANNOTGENERATEDOCS._serialized_start=7867 - _CANNOTGENERATEDOCS._serialized_end=7887 - _CANNOTGENERATEDOCSMSG._serialized_start=7889 - _CANNOTGENERATEDOCSMSG._serialized_end=8010 - _BUILDINGCATALOG._serialized_start=8012 - _BUILDINGCATALOG._serialized_end=8029 - _BUILDINGCATALOGMSG._serialized_start=8031 - _BUILDINGCATALOGMSG._serialized_end=8146 - _DATABASEERRORRUNNINGHOOK._serialized_start=8148 - _DATABASEERRORRUNNINGHOOK._serialized_end=8193 - _DATABASEERRORRUNNINGHOOKMSG._serialized_start=8196 - _DATABASEERRORRUNNINGHOOKMSG._serialized_end=8329 - _HOOKSRUNNING._serialized_start=8331 - _HOOKSRUNNING._serialized_end=8383 - _HOOKSRUNNINGMSG._serialized_start=8385 - _HOOKSRUNNINGMSG._serialized_end=8494 - _FINISHEDRUNNINGSTATS._serialized_start=8496 - _FINISHEDRUNNINGSTATS._serialized_end=8580 - _FINISHEDRUNNINGSTATSMSG._serialized_start=8582 - _FINISHEDRUNNINGSTATSMSG._serialized_end=8707 - _CONSTRAINTNOTENFORCED._serialized_start=8709 - _CONSTRAINTNOTENFORCED._serialized_end=8769 - _CONSTRAINTNOTENFORCEDMSG._serialized_start=8771 - _CONSTRAINTNOTENFORCEDMSG._serialized_end=8898 - _CONSTRAINTNOTSUPPORTED._serialized_start=8900 - _CONSTRAINTNOTSUPPORTED._serialized_end=8961 - _CONSTRAINTNOTSUPPORTEDMSG._serialized_start=8964 - _CONSTRAINTNOTSUPPORTEDMSG._serialized_end=9093 -# @@protoc_insertion_point(module_scope) diff --git a/core/dbt/adapters/events/base_types.py b/core/dbt/adapters/events/base_types.py deleted file mode 100644 index 23de6ab2b73..00000000000 --- a/core/dbt/adapters/events/base_types.py +++ /dev/null @@ -1,39 +0,0 @@ -# Aliasing common Level classes in order to make custom, but not overly-verbose versions that have PROTO_TYPES_MODULE set to the adapter-specific generated types_pb2 module -from dbt_common.events.base_types import ( - BaseEvent, - DynamicLevel as CommonDyanicLevel, - TestLevel as CommonTestLevel, - DebugLevel as CommonDebugLevel, - InfoLevel as CommonInfoLevel, - WarnLevel as CommonWarnLevel, - ErrorLevel as CommonErrorLevel, -) -from dbt.adapters.events import adapter_types_pb2 - - -class AdapterBaseEvent(BaseEvent): - PROTO_TYPES_MODULE = adapter_types_pb2 - - -class DynamicLevel(CommonDyanicLevel, AdapterBaseEvent): - pass - - -class TestLevel(CommonTestLevel, AdapterBaseEvent): - pass - - -class DebugLevel(CommonDebugLevel, AdapterBaseEvent): - pass - - -class InfoLevel(CommonInfoLevel, AdapterBaseEvent): - pass - - -class WarnLevel(CommonWarnLevel, AdapterBaseEvent): - pass - - -class ErrorLevel(CommonErrorLevel, AdapterBaseEvent): - pass diff --git a/core/dbt/adapters/events/logging.py b/core/dbt/adapters/events/logging.py deleted file mode 100644 index 93f9d15fce1..00000000000 --- a/core/dbt/adapters/events/logging.py +++ /dev/null @@ -1,67 +0,0 @@ -import traceback -from dataclasses import dataclass - -from dbt.adapters.events.types import ( - AdapterEventDebug, - AdapterEventInfo, - AdapterEventWarning, - AdapterEventError, -) -from dbt_common.events import get_event_manager -from dbt_common.events.contextvars import get_node_info -from dbt_common.events.event_handler import set_package_logging -from dbt_common.events.functions import fire_event - - -@dataclass -class AdapterLogger: - name: str - - def debug(self, msg, *args) -> None: - event = AdapterEventDebug( - name=self.name, base_msg=str(msg), args=list(args), node_info=get_node_info() - ) - fire_event(event) - - def info(self, msg, *args) -> None: - event = AdapterEventInfo( - name=self.name, base_msg=str(msg), args=list(args), node_info=get_node_info() - ) - fire_event(event) - - def warning(self, msg, *args) -> None: - event = AdapterEventWarning( - name=self.name, base_msg=str(msg), args=list(args), node_info=get_node_info() - ) - fire_event(event) - - def error(self, msg, *args) -> None: - event = AdapterEventError( - name=self.name, base_msg=str(msg), args=list(args), node_info=get_node_info() - ) - fire_event(event) - - # The default exc_info=True is what makes this method different - def exception(self, msg, *args) -> None: - exc_info = str(traceback.format_exc()) - event = AdapterEventError( - name=self.name, - base_msg=str(msg), - args=list(args), - node_info=get_node_info(), - exc_info=exc_info, - ) - fire_event(event) - - def critical(self, msg, *args) -> None: - event = AdapterEventError( - name=self.name, base_msg=str(msg), args=list(args), node_info=get_node_info() - ) - fire_event(event) - - @staticmethod - def set_adapter_dependency_log_level(package_name, level): - """By default, dbt suppresses non-dbt package logs. This method allows - you to set the log level for a specific package. - """ - set_package_logging(package_name, level, get_event_manager()) diff --git a/core/dbt/adapters/events/types.py b/core/dbt/adapters/events/types.py deleted file mode 100644 index 99fe1c1bf36..00000000000 --- a/core/dbt/adapters/events/types.py +++ /dev/null @@ -1,417 +0,0 @@ -from dbt.adapters.events.base_types import WarnLevel, InfoLevel, ErrorLevel, DebugLevel -from dbt_common.ui import line_wrap_message, warning_tag - - -def format_adapter_message(name, base_msg, args) -> str: - # only apply formatting if there are arguments to format. - # avoids issues like "dict: {k: v}".format() which results in `KeyError 'k'` - msg = base_msg if len(args) == 0 else base_msg.format(*args) - return f"{name} adapter: {msg}" - - -# ======================================================= -# D - Deprecations -# ======================================================= - - -class CollectFreshnessReturnSignature(WarnLevel): - def code(self) -> str: - return "D012" - - def message(self) -> str: - description = ( - "The 'collect_freshness' macro signature has changed to return the full " - "query result, rather than just a table of values. See the v1.5 migration guide " - "for details on how to update your custom macro: https://docs.getdbt.com/guides/migration/versions/upgrading-to-v1.5" - ) - return line_wrap_message(warning_tag(f"Deprecated functionality\n\n{description}")) - - -class AdapterDeprecationWarning(WarnLevel): - def code(self) -> str: - return "D005" - - def message(self) -> str: - description = ( - f"The adapter function `adapter.{self.old_name}` is deprecated and will be removed in " - f"a future release of dbt. Please use `adapter.{self.new_name}` instead. " - f"\n\nDocumentation for {self.new_name} can be found here:" - f"\n\nhttps://docs.getdbt.com/docs/adapter" - ) - return line_wrap_message(warning_tag(f"Deprecated functionality\n\n{description}")) - - -# ======================================================= -# E - DB Adapter -# ======================================================= - - -class AdapterEventDebug(DebugLevel): - def code(self) -> str: - return "E001" - - def message(self) -> str: - return format_adapter_message(self.name, self.base_msg, self.args) - - -class AdapterEventInfo(InfoLevel): - def code(self) -> str: - return "E002" - - def message(self) -> str: - return format_adapter_message(self.name, self.base_msg, self.args) - - -class AdapterEventWarning(WarnLevel): - def code(self) -> str: - return "E003" - - def message(self) -> str: - return format_adapter_message(self.name, self.base_msg, self.args) - - -class AdapterEventError(ErrorLevel): - def code(self) -> str: - return "E004" - - def message(self) -> str: - return format_adapter_message(self.name, self.base_msg, self.args) - - -class NewConnection(DebugLevel): - def code(self) -> str: - return "E005" - - def message(self) -> str: - return f"Acquiring new {self.conn_type} connection '{self.conn_name}'" - - -class ConnectionReused(DebugLevel): - def code(self) -> str: - return "E006" - - def message(self) -> str: - return f"Re-using an available connection from the pool (formerly {self.orig_conn_name}, now {self.conn_name})" - - -class ConnectionLeftOpenInCleanup(DebugLevel): - def code(self) -> str: - return "E007" - - def message(self) -> str: - return f"Connection '{self.conn_name}' was left open." - - -class ConnectionClosedInCleanup(DebugLevel): - def code(self) -> str: - return "E008" - - def message(self) -> str: - return f"Connection '{self.conn_name}' was properly closed." - - -class RollbackFailed(DebugLevel): - def code(self) -> str: - return "E009" - - def message(self) -> str: - return f"Failed to rollback '{self.conn_name}'" - - -class ConnectionClosed(DebugLevel): - def code(self) -> str: - return "E010" - - def message(self) -> str: - return f"On {self.conn_name}: Close" - - -class ConnectionLeftOpen(DebugLevel): - def code(self) -> str: - return "E011" - - def message(self) -> str: - return f"On {self.conn_name}: No close available on handle" - - -class Rollback(DebugLevel): - def code(self) -> str: - return "E012" - - def message(self) -> str: - return f"On {self.conn_name}: ROLLBACK" - - -class CacheMiss(DebugLevel): - def code(self) -> str: - return "E013" - - def message(self) -> str: - return ( - f'On "{self.conn_name}": cache miss for schema ' - f'"{self.database}.{self.schema}", this is inefficient' - ) - - -class ListRelations(DebugLevel): - def code(self) -> str: - return "E014" - - def message(self) -> str: - identifiers_str = ", ".join(r.identifier for r in self.relations) - return f"While listing relations in database={self.database}, schema={self.schema}, found: {identifiers_str}" - - -class ConnectionUsed(DebugLevel): - def code(self) -> str: - return "E015" - - def message(self) -> str: - return f'Using {self.conn_type} connection "{self.conn_name}"' - - -class SQLQuery(DebugLevel): - def code(self) -> str: - return "E016" - - def message(self) -> str: - return f"On {self.conn_name}: {self.sql}" - - -class SQLQueryStatus(DebugLevel): - def code(self) -> str: - return "E017" - - def message(self) -> str: - return f"SQL status: {self.status} in {self.elapsed} seconds" - - -class SQLCommit(DebugLevel): - def code(self) -> str: - return "E018" - - def message(self) -> str: - return f"On {self.conn_name}: COMMIT" - - -class ColTypeChange(DebugLevel): - def code(self) -> str: - return "E019" - - def message(self) -> str: - return f"Changing col type from {self.orig_type} to {self.new_type} in table {self.table}" - - -class SchemaCreation(DebugLevel): - def code(self) -> str: - return "E020" - - def message(self) -> str: - return f'Creating schema "{self.relation}"' - - -class SchemaDrop(DebugLevel): - def code(self) -> str: - return "E021" - - def message(self) -> str: - return f'Dropping schema "{self.relation}".' - - -class CacheAction(DebugLevel): - def code(self) -> str: - return "E022" - - def format_ref_key(self, ref_key) -> str: - return f"(database={ref_key.database}, schema={ref_key.schema}, identifier={ref_key.identifier})" - - def message(self) -> str: - ref_key = self.format_ref_key(self.ref_key) - ref_key_2 = self.format_ref_key(self.ref_key_2) - ref_key_3 = self.format_ref_key(self.ref_key_3) - ref_list = [] - for rfk in self.ref_list: - ref_list.append(self.format_ref_key(rfk)) - if self.action == "add_link": - return f"adding link, {ref_key} references {ref_key_2}" - elif self.action == "add_relation": - return f"adding relation: {ref_key}" - elif self.action == "drop_missing_relation": - return f"dropped a nonexistent relationship: {ref_key}" - elif self.action == "drop_cascade": - return f"drop {ref_key} is cascading to {ref_list}" - elif self.action == "drop_relation": - return f"Dropping relation: {ref_key}" - elif self.action == "update_reference": - return ( - f"updated reference from {ref_key} -> {ref_key_3} to " - f"{ref_key_2} -> {ref_key_3}" - ) - elif self.action == "temporary_relation": - return f"old key {ref_key} not found in self.relations, assuming temporary" - elif self.action == "rename_relation": - return f"Renaming relation {ref_key} to {ref_key_2}" - elif self.action == "uncached_relation": - return ( - f"{ref_key_2} references {ref_key} " - f"but {self.ref_key.database}.{self.ref_key.schema}" - "is not in the cache, skipping assumed external relation" - ) - else: - return ref_key - - -# Skipping E023, E024, E025, E026, E027, E028, E029, E030 - - -class CacheDumpGraph(DebugLevel): - def code(self) -> str: - return "E031" - - def message(self) -> str: - return f"dump {self.before_after} {self.action} : {self.dump}" - - -# Skipping E032, E033, E034 - - -class AdapterRegistered(InfoLevel): - def code(self) -> str: - return "E034" - - def message(self) -> str: - return f"Registered adapter: {self.adapter_name}{self.adapter_version}" - - -class AdapterImportError(InfoLevel): - def code(self) -> str: - return "E035" - - def message(self) -> str: - return f"Error importing adapter: {self.exc}" - - -class PluginLoadError(DebugLevel): - def code(self) -> str: - return "E036" - - def message(self) -> str: - return f"{self.exc_info}" - - -class NewConnectionOpening(DebugLevel): - def code(self) -> str: - return "E037" - - def message(self) -> str: - return f"Opening a new connection, currently in state {self.connection_state}" - - -class CodeExecution(DebugLevel): - def code(self) -> str: - return "E038" - - def message(self) -> str: - return f"On {self.conn_name}: {self.code_content}" - - -class CodeExecutionStatus(DebugLevel): - def code(self) -> str: - return "E039" - - def message(self) -> str: - return f"Execution status: {self.status} in {self.elapsed} seconds" - - -class CatalogGenerationError(WarnLevel): - def code(self) -> str: - return "E040" - - def message(self) -> str: - return f"Encountered an error while generating catalog: {self.exc}" - - -class WriteCatalogFailure(ErrorLevel): - def code(self) -> str: - return "E041" - - def message(self) -> str: - return ( - f"dbt encountered {self.num_exceptions} failure{(self.num_exceptions != 1) * 's'} " - "while writing the catalog" - ) - - -class CatalogWritten(InfoLevel): - def code(self) -> str: - return "E042" - - def message(self) -> str: - return f"Catalog written to {self.path}" - - -class CannotGenerateDocs(InfoLevel): - def code(self) -> str: - return "E043" - - def message(self) -> str: - return "compile failed, cannot generate docs" - - -class BuildingCatalog(InfoLevel): - def code(self) -> str: - return "E044" - - def message(self) -> str: - return "Building catalog" - - -class DatabaseErrorRunningHook(InfoLevel): - def code(self) -> str: - return "E045" - - def message(self) -> str: - return f"Database error while running {self.hook_type}" - - -class HooksRunning(InfoLevel): - def code(self) -> str: - return "E046" - - def message(self) -> str: - plural = "hook" if self.num_hooks == 1 else "hooks" - return f"Running {self.num_hooks} {self.hook_type} {plural}" - - -class FinishedRunningStats(InfoLevel): - def code(self) -> str: - return "E047" - - def message(self) -> str: - return f"Finished running {self.stat_line}{self.execution} ({self.execution_time:0.2f}s)." - - -class ConstraintNotEnforced(WarnLevel): - def code(self) -> str: - return "E048" - - def message(self) -> str: - msg = ( - f"The constraint type {self.constraint} is not enforced by {self.adapter}. " - "The constraint will be included in this model's DDL statement, but it will not " - "guarantee anything about the underlying data. Set 'warn_unenforced: false' on " - "this constraint to ignore this warning." - ) - return line_wrap_message(warning_tag(msg)) - - -class ConstraintNotSupported(WarnLevel): - def code(self) -> str: - return "E049" - - def message(self) -> str: - msg = ( - f"The constraint type {self.constraint} is not supported by {self.adapter}, and will " - "be ignored. Set 'warn_unsupported: false' on this constraint to ignore this warning." - ) - return line_wrap_message(warning_tag(msg)) diff --git a/core/dbt/adapters/exceptions/__init__.py b/core/dbt/adapters/exceptions/__init__.py deleted file mode 100644 index 393d277d6cf..00000000000 --- a/core/dbt/adapters/exceptions/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -from dbt.adapters.exceptions.compilation import * # noqa -from dbt.adapters.exceptions.alias import * # noqa -from dbt.adapters.exceptions.database import * # noqa -from dbt.adapters.exceptions.connection import * # noqa -from dbt.adapters.exceptions.cache import * # noqa diff --git a/core/dbt/adapters/exceptions/alias.py b/core/dbt/adapters/exceptions/alias.py deleted file mode 100644 index 60426a4cb5d..00000000000 --- a/core/dbt/adapters/exceptions/alias.py +++ /dev/null @@ -1,24 +0,0 @@ -from typing import Mapping, Any - -from dbt_common.exceptions import DbtValidationError - - -class AliasError(DbtValidationError): - pass - - -# core level exceptions -class DuplicateAliasError(AliasError): - def __init__(self, kwargs: Mapping[str, Any], aliases: Mapping[str, str], canonical_key: str): - self.kwargs = kwargs - self.aliases = aliases - self.canonical_key = canonical_key - super().__init__(msg=self.get_message()) - - def get_message(self) -> str: - # dupe found: go through the dict so we can have a nice-ish error - key_names = ", ".join( - "{}".format(k) for k in self.kwargs if self.aliases.get(k) == self.canonical_key - ) - msg = f'Got duplicate keys: ({key_names}) all map to "{self.canonical_key}"' - return msg diff --git a/core/dbt/adapters/exceptions/cache.py b/core/dbt/adapters/exceptions/cache.py deleted file mode 100644 index 9b51f26f9fb..00000000000 --- a/core/dbt/adapters/exceptions/cache.py +++ /dev/null @@ -1,68 +0,0 @@ -import re -from typing import Dict - -from dbt_common.exceptions import DbtInternalError - - -class CacheInconsistencyError(DbtInternalError): - def __init__(self, msg: str): - self.msg = msg - formatted_msg = f"Cache inconsistency detected: {self.msg}" - super().__init__(msg=formatted_msg) - - -class NewNameAlreadyInCacheError(CacheInconsistencyError): - def __init__(self, old_key: str, new_key: str): - self.old_key = old_key - self.new_key = new_key - msg = ( - f'in rename of "{self.old_key}" -> "{self.new_key}", new name is in the cache already' - ) - super().__init__(msg) - - -class ReferencedLinkNotCachedError(CacheInconsistencyError): - def __init__(self, referenced_key: str): - self.referenced_key = referenced_key - msg = f"in add_link, referenced link key {self.referenced_key} not in cache!" - super().__init__(msg) - - -class DependentLinkNotCachedError(CacheInconsistencyError): - def __init__(self, dependent_key: str): - self.dependent_key = dependent_key - msg = f"in add_link, dependent link key {self.dependent_key} not in cache!" - super().__init__(msg) - - -class TruncatedModelNameCausedCollisionError(CacheInconsistencyError): - def __init__(self, new_key, relations: Dict): - self.new_key = new_key - self.relations = relations - super().__init__(self.get_message()) - - def get_message(self) -> str: - # Tell user when collision caused by model names truncated during - # materialization. - match = re.search("__dbt_backup|__dbt_tmp$", self.new_key.identifier) - if match: - truncated_model_name_prefix = self.new_key.identifier[: match.start()] - message_addendum = ( - "\n\nName collisions can occur when the length of two " - "models' names approach your database's builtin limit. " - "Try restructuring your project such that no two models " - f"share the prefix '{truncated_model_name_prefix}'. " - "Then, clean your warehouse of any removed models." - ) - else: - message_addendum = "" - - msg = f"in rename, new key {self.new_key} already in cache: {list(self.relations.keys())}{message_addendum}" - - return msg - - -class NoneRelationFoundError(CacheInconsistencyError): - def __init__(self): - msg = "in get_relations, a None relation was found in the cache!" - super().__init__(msg) diff --git a/core/dbt/adapters/exceptions/compilation.py b/core/dbt/adapters/exceptions/compilation.py deleted file mode 100644 index a66610d54d5..00000000000 --- a/core/dbt/adapters/exceptions/compilation.py +++ /dev/null @@ -1,255 +0,0 @@ -from typing import List, Mapping, Any - -from dbt_common.exceptions import CompilationError, DbtDatabaseError -from dbt_common.ui import line_wrap_message - - -class MissingConfigError(CompilationError): - def __init__(self, unique_id: str, name: str): - self.unique_id = unique_id - self.name = name - msg = ( - f"Model '{self.unique_id}' does not define a required config parameter '{self.name}'." - ) - super().__init__(msg=msg) - - -class MultipleDatabasesNotAllowedError(CompilationError): - def __init__(self, databases): - self.databases = databases - super().__init__(msg=self.get_message()) - - def get_message(self) -> str: - msg = str(self.databases) - return msg - - -class ApproximateMatchError(CompilationError): - def __init__(self, target, relation): - self.target = target - self.relation = relation - super().__init__(msg=self.get_message()) - - def get_message(self) -> str: - msg = ( - "When searching for a relation, dbt found an approximate match. " - "Instead of guessing \nwhich relation to use, dbt will move on. " - f"Please delete {self.relation}, or rename it to be less ambiguous." - f"\nSearched for: {self.target}\nFound: {self.relation}" - ) - - return msg - - -class SnapshotTargetIncompleteError(CompilationError): - def __init__(self, extra: List, missing: List): - self.extra = extra - self.missing = missing - super().__init__(msg=self.get_message()) - - def get_message(self) -> str: - msg = ( - 'Snapshot target has ("{}") but not ("{}") - is it an ' - "unmigrated previous version archive?".format( - '", "'.join(self.extra), '", "'.join(self.missing) - ) - ) - return msg - - -class DuplicateMacroInPackageError(CompilationError): - def __init__(self, macro, macro_mapping: Mapping): - self.macro = macro - self.macro_mapping = macro_mapping - super().__init__(msg=self.get_message()) - - def get_message(self) -> str: - other_path = self.macro_mapping[self.macro.unique_id].original_file_path - # subtract 2 for the "Compilation Error" indent - # note that the line wrap eats newlines, so if you want newlines, - # this is the result :( - msg = line_wrap_message( - f"""\ - dbt found two macros named "{self.macro.name}" in the project - "{self.macro.package_name}". - - - To fix this error, rename or remove one of the following - macros: - - - {self.macro.original_file_path} - - - {other_path} - """, - subtract=2, - ) - return msg - - -class DuplicateMaterializationNameError(CompilationError): - def __init__(self, macro, other_macro): - self.macro = macro - self.other_macro = other_macro - super().__init__(msg=self.get_message()) - - def get_message(self) -> str: - macro_name = self.macro.name - macro_package_name = self.macro.package_name - other_package_name = self.other_macro.macro.package_name - - msg = ( - f"Found two materializations with the name {macro_name} (packages " - f"{macro_package_name} and {other_package_name}). dbt cannot resolve " - "this ambiguity" - ) - return msg - - -class ColumnTypeMissingError(CompilationError): - def __init__(self, column_names: List): - self.column_names = column_names - super().__init__(msg=self.get_message()) - - def get_message(self) -> str: - msg = ( - "Contracted models require data_type to be defined for each column. " - "Please ensure that the column name and data_type are defined within " - f"the YAML configuration for the {self.column_names} column(s)." - ) - return msg - - -class MacroNotFoundError(CompilationError): - def __init__(self, node, target_macro_id: str): - self.node = node - self.target_macro_id = target_macro_id - msg = f"'{self.node.unique_id}' references macro '{self.target_macro_id}' which is not defined!" - - super().__init__(msg=msg) - - -class MissingMaterializationError(CompilationError): - def __init__(self, materialization, adapter_type): - self.materialization = materialization - self.adapter_type = adapter_type - super().__init__(msg=self.get_message()) - - def get_message(self) -> str: - valid_types = "'default'" - - if self.adapter_type != "default": - valid_types = f"'default' and '{self.adapter_type}'" - - msg = f"No materialization '{self.materialization}' was found for adapter {self.adapter_type}! (searched types {valid_types})" - return msg - - -class SnapshotTargetNotSnapshotTableError(CompilationError): - def __init__(self, missing: List): - self.missing = missing - super().__init__(msg=self.get_message()) - - def get_message(self) -> str: - msg = 'Snapshot target is not a snapshot table (missing "{}")'.format( - '", "'.join(self.missing) - ) - return msg - - -class NullRelationDropAttemptedError(CompilationError): - def __init__(self, name: str): - self.name = name - self.msg = f"Attempted to drop a null relation for {self.name}" - super().__init__(msg=self.msg) - - -class NullRelationCacheAttemptedError(CompilationError): - def __init__(self, name: str): - self.name = name - self.msg = f"Attempted to cache a null relation for {self.name}" - super().__init__(msg=self.msg) - - -class RelationTypeNullError(CompilationError): - def __init__(self, relation): - self.relation = relation - self.msg = f"Tried to drop relation {self.relation}, but its type is null." - super().__init__(msg=self.msg) - - -class MaterializationNotAvailableError(CompilationError): - def __init__(self, materialization, adapter_type: str): - self.materialization = materialization - self.adapter_type = adapter_type - super().__init__(msg=self.get_message()) - - def get_message(self) -> str: - msg = f"Materialization '{self.materialization}' is not available for {self.adapter_type}!" - return msg - - -class RelationReturnedMultipleResultsError(CompilationError): - def __init__(self, kwargs: Mapping[str, Any], matches: List): - self.kwargs = kwargs - self.matches = matches - super().__init__(msg=self.get_message()) - - def get_message(self) -> str: - msg = ( - "get_relation returned more than one relation with the given args. " - "Please specify a database or schema to narrow down the result set." - f"\n{self.kwargs}\n\n{self.matches}" - ) - return msg - - -class UnexpectedNonTimestampError(DbtDatabaseError): - def __init__(self, field_name: str, source, dt: Any): - self.field_name = field_name - self.source = source - self.type_name = type(dt).__name__ - msg = ( - f"Expected a timestamp value when querying field '{self.field_name}' of table " - f"{self.source} but received value of type '{self.type_name}' instead" - ) - super().__init__(msg) - - -class RenameToNoneAttemptedError(CompilationError): - def __init__(self, src_name: str, dst_name: str, name: str): - self.src_name = src_name - self.dst_name = dst_name - self.name = name - self.msg = f"Attempted to rename {self.src_name} to {self.dst_name} for {self.name}" - super().__init__(msg=self.msg) - - -class QuoteConfigTypeError(CompilationError): - def __init__(self, quote_config: Any): - self.quote_config = quote_config - super().__init__(msg=self.get_message()) - - def get_message(self) -> str: - msg = ( - 'The seed configuration value of "quote_columns" has an ' - f"invalid type {type(self.quote_config)}" - ) - return msg - - -class RelationWrongTypeError(CompilationError): - def __init__(self, relation, expected_type, model=None): - self.relation = relation - self.expected_type = expected_type - self.model = model - super().__init__(msg=self.get_message()) - - def get_message(self) -> str: - msg = ( - f"Trying to create {self.expected_type} {self.relation}, " - f"but it currently exists as a {self.relation.type}. Either " - f"drop {self.relation} manually, or run dbt with " - "`--full-refresh` and dbt will drop it for you." - ) - - return msg diff --git a/core/dbt/adapters/exceptions/connection.py b/core/dbt/adapters/exceptions/connection.py deleted file mode 100644 index 870794fbe8d..00000000000 --- a/core/dbt/adapters/exceptions/connection.py +++ /dev/null @@ -1,16 +0,0 @@ -from typing import List - -from dbt_common.exceptions import DbtRuntimeError, DbtDatabaseError - - -class InvalidConnectionError(DbtRuntimeError): - def __init__(self, thread_id, known: List) -> None: - self.thread_id = thread_id - self.known = known - super().__init__( - msg=f"connection never acquired for thread {self.thread_id}, have {self.known}" - ) - - -class FailedToConnectError(DbtDatabaseError): - pass diff --git a/core/dbt/adapters/exceptions/database.py b/core/dbt/adapters/exceptions/database.py deleted file mode 100644 index 066016636d6..00000000000 --- a/core/dbt/adapters/exceptions/database.py +++ /dev/null @@ -1,51 +0,0 @@ -from typing import Any - -from dbt_common.exceptions import NotImplementedError, CompilationError - - -class UnexpectedDbReferenceError(NotImplementedError): - def __init__(self, adapter, database, expected): - self.adapter = adapter - self.database = database - self.expected = expected - super().__init__(msg=self.get_message()) - - def get_message(self) -> str: - msg = f"Cross-db references not allowed in {self.adapter} ({self.database} vs {self.expected})" - return msg - - -class CrossDbReferenceProhibitedError(CompilationError): - def __init__(self, adapter, exc_msg: str): - self.adapter = adapter - self.exc_msg = exc_msg - super().__init__(msg=self.get_message()) - - def get_message(self) -> str: - msg = f"Cross-db references not allowed in adapter {self.adapter}: Got {self.exc_msg}" - return msg - - -class IndexConfigNotDictError(CompilationError): - def __init__(self, raw_index: Any): - self.raw_index = raw_index - super().__init__(msg=self.get_message()) - - def get_message(self) -> str: - msg = ( - f"Invalid index config:\n" - f" Got: {self.raw_index}\n" - f' Expected a dictionary with at minimum a "columns" key' - ) - return msg - - -class IndexConfigError(CompilationError): - def __init__(self, exc: TypeError): - self.exc = exc - super().__init__(msg=self.get_message()) - - def get_message(self) -> str: - validator_msg = self.validator_error_message(self.exc) - msg = f"Could not parse index config: {validator_msg}" - return msg diff --git a/core/dbt/adapters/factory.py b/core/dbt/adapters/factory.py deleted file mode 100644 index 8124c5047d8..00000000000 --- a/core/dbt/adapters/factory.py +++ /dev/null @@ -1,239 +0,0 @@ -from multiprocessing.context import SpawnContext - -import threading -import traceback -from contextlib import contextmanager -from importlib import import_module -from pathlib import Path -from typing import Any, Dict, List, Optional, Set, Type - -from dbt.adapters.base.plugin import AdapterPlugin -from dbt.adapters.protocol import AdapterConfig, AdapterProtocol, RelationProtocol -from dbt.adapters.contracts.connection import AdapterRequiredConfig, Credentials -from dbt_common.events.functions import fire_event -from dbt.adapters.events.types import AdapterImportError, PluginLoadError, AdapterRegistered -from dbt_common.exceptions import DbtInternalError, DbtRuntimeError -from dbt.adapters.include.global_project import PACKAGE_PATH as GLOBAL_PROJECT_PATH -from dbt.adapters.include.global_project import PROJECT_NAME as GLOBAL_PROJECT_NAME -from dbt_common.semver import VersionSpecifier - -Adapter = AdapterProtocol - - -class AdapterContainer: - def __init__(self) -> None: - self.lock = threading.Lock() - self.adapters: Dict[str, Adapter] = {} - self.plugins: Dict[str, AdapterPlugin] = {} - # map package names to their include paths - self.packages: Dict[str, Path] = { - GLOBAL_PROJECT_NAME: Path(GLOBAL_PROJECT_PATH), - } - - def get_plugin_by_name(self, name: str) -> AdapterPlugin: - with self.lock: - if name in self.plugins: - return self.plugins[name] - names = ", ".join(self.plugins.keys()) - - message = f"Invalid adapter type {name}! Must be one of {names}" - raise DbtRuntimeError(message) - - def get_adapter_class_by_name(self, name: str) -> Type[Adapter]: - plugin = self.get_plugin_by_name(name) - return plugin.adapter - - def get_relation_class_by_name(self, name: str) -> Type[RelationProtocol]: - adapter = self.get_adapter_class_by_name(name) - return adapter.Relation - - def get_config_class_by_name(self, name: str) -> Type[AdapterConfig]: - adapter = self.get_adapter_class_by_name(name) - return adapter.AdapterSpecificConfigs - - def load_plugin(self, name: str) -> Type[Credentials]: - # this doesn't need a lock: in the worst case we'll overwrite packages - # and adapter_type entries with the same value, as they're all - # singletons - try: - # mypy doesn't think modules have any attributes. - mod: Any = import_module("." + name, "dbt.adapters") - except ModuleNotFoundError as exc: - # if we failed to import the target module in particular, inform - # the user about it via a runtime error - if exc.name == "dbt.adapters." + name: - fire_event(AdapterImportError(exc=str(exc))) - raise DbtRuntimeError(f"Could not find adapter type {name}!") - # otherwise, the error had to have come from some underlying - # library. Log the stack trace. - - fire_event(PluginLoadError(exc_info=traceback.format_exc())) - raise - plugin: AdapterPlugin = mod.Plugin - plugin_type = plugin.adapter.type() - - if plugin_type != name: - raise DbtRuntimeError( - f"Expected to find adapter with type named {name}, got " - f"adapter with type {plugin_type}" - ) - - with self.lock: - # things do hold the lock to iterate over it so we need it to add - self.plugins[name] = plugin - - self.packages[plugin.project_name] = Path(plugin.include_path) - - for dep in plugin.dependencies: - self.load_plugin(dep) - - return plugin.credentials - - def register_adapter(self, config: AdapterRequiredConfig, mp_context: SpawnContext) -> None: - adapter_name = config.credentials.type - adapter_type = self.get_adapter_class_by_name(adapter_name) - adapter_version = import_module(f".{adapter_name}.__version__", "dbt.adapters").version - adapter_version_specifier = VersionSpecifier.from_version_string( - adapter_version - ).to_version_string() - fire_event( - AdapterRegistered(adapter_name=adapter_name, adapter_version=adapter_version_specifier) - ) - with self.lock: - if adapter_name in self.adapters: - # this shouldn't really happen... - return - - adapter: Adapter = adapter_type(config, mp_context) # type: ignore - self.adapters[adapter_name] = adapter - - def lookup_adapter(self, adapter_name: str) -> Adapter: - return self.adapters[adapter_name] - - def reset_adapters(self): - """Clear the adapters. This is useful for tests, which change configs.""" - with self.lock: - for adapter in self.adapters.values(): - adapter.cleanup_connections() - self.adapters.clear() - - def cleanup_connections(self): - """Only clean up the adapter connections list without resetting the - actual adapters. - """ - with self.lock: - for adapter in self.adapters.values(): - adapter.cleanup_connections() - - def get_adapter_plugins(self, name: Optional[str]) -> List[AdapterPlugin]: - """Iterate over the known adapter plugins. If a name is provided, - iterate in dependency order over the named plugin and its dependencies. - """ - if name is None: - return list(self.plugins.values()) - - plugins: List[AdapterPlugin] = [] - seen: Set[str] = set() - plugin_names: List[str] = [name] - while plugin_names: - plugin_name = plugin_names[0] - plugin_names = plugin_names[1:] - try: - plugin = self.plugins[plugin_name] - except KeyError: - raise DbtInternalError(f"No plugin found for {plugin_name}") from None - plugins.append(plugin) - seen.add(plugin_name) - for dep in plugin.dependencies: - if dep not in seen: - plugin_names.append(dep) - return plugins - - def get_adapter_package_names(self, name: Optional[str]) -> List[str]: - package_names: List[str] = [p.project_name for p in self.get_adapter_plugins(name)] - package_names.append(GLOBAL_PROJECT_NAME) - return package_names - - def get_include_paths(self, name: Optional[str]) -> List[Path]: - paths = [] - for package_name in self.get_adapter_package_names(name): - try: - path = self.packages[package_name] - except KeyError: - raise DbtInternalError(f"No internal package listing found for {package_name}") - paths.append(path) - return paths - - def get_adapter_type_names(self, name: Optional[str]) -> List[str]: - return [p.adapter.type() for p in self.get_adapter_plugins(name)] - - def get_adapter_constraint_support(self, name: Optional[str]) -> List[str]: - return self.lookup_adapter(name).CONSTRAINT_SUPPORT # type: ignore - - -FACTORY: AdapterContainer = AdapterContainer() - - -def register_adapter(config: AdapterRequiredConfig, mp_context: SpawnContext) -> None: - FACTORY.register_adapter(config, mp_context) - - -def get_adapter(config: AdapterRequiredConfig): - return FACTORY.lookup_adapter(config.credentials.type) - - -def get_adapter_by_type(adapter_type): - return FACTORY.lookup_adapter(adapter_type) - - -def reset_adapters(): - """Clear the adapters. This is useful for tests, which change configs.""" - FACTORY.reset_adapters() - - -def cleanup_connections(): - """Only clean up the adapter connections list without resetting the actual - adapters. - """ - FACTORY.cleanup_connections() - - -def get_adapter_class_by_name(name: str) -> Type[AdapterProtocol]: - return FACTORY.get_adapter_class_by_name(name) - - -def get_config_class_by_name(name: str) -> Type[AdapterConfig]: - return FACTORY.get_config_class_by_name(name) - - -def get_relation_class_by_name(name: str) -> Type[RelationProtocol]: - return FACTORY.get_relation_class_by_name(name) - - -def load_plugin(name: str) -> Type[Credentials]: - return FACTORY.load_plugin(name) - - -def get_include_paths(name: Optional[str]) -> List[Path]: - return FACTORY.get_include_paths(name) - - -def get_adapter_package_names(name: Optional[str]) -> List[str]: - return FACTORY.get_adapter_package_names(name) - - -def get_adapter_type_names(name: Optional[str]) -> List[str]: - return FACTORY.get_adapter_type_names(name) - - -def get_adapter_constraint_support(name: Optional[str]) -> List[str]: - return FACTORY.get_adapter_constraint_support(name) - - -@contextmanager -def adapter_management(): - reset_adapters() - try: - yield - finally: - cleanup_connections() diff --git a/core/dbt/adapters/include/global_project/__init__.py b/core/dbt/adapters/include/global_project/__init__.py deleted file mode 100644 index 4043ffebb6e..00000000000 --- a/core/dbt/adapters/include/global_project/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -import os - -PACKAGE_PATH = os.path.dirname(__file__) -PROJECT_NAME = "dbt" diff --git a/core/dbt/adapters/include/global_project/dbt_project.yml b/core/dbt/adapters/include/global_project/dbt_project.yml deleted file mode 100644 index fe15d183c58..00000000000 --- a/core/dbt/adapters/include/global_project/dbt_project.yml +++ /dev/null @@ -1,7 +0,0 @@ -config-version: 2 -name: dbt -version: 1.0 - -docs-paths: ["docs"] -macro-paths: ["macros"] -test-paths: ["tests"] diff --git a/core/dbt/adapters/include/global_project/docs/overview.md b/core/dbt/adapters/include/global_project/docs/overview.md deleted file mode 100644 index 36a4c0aa199..00000000000 --- a/core/dbt/adapters/include/global_project/docs/overview.md +++ /dev/null @@ -1,43 +0,0 @@ - -{% docs __overview__ %} - -### Welcome! - -Welcome to the auto-generated documentation for your dbt project! - -### Navigation - -You can use the `Project` and `Database` navigation tabs on the left side of the window to explore the models -in your project. - -#### Project Tab -The `Project` tab mirrors the directory structure of your dbt project. In this tab, you can see all of the -models defined in your dbt project, as well as models imported from dbt packages. - -#### Database Tab -The `Database` tab also exposes your models, but in a format that looks more like a database explorer. This view -shows relations (tables and views) grouped into database schemas. Note that ephemeral models are _not_ shown -in this interface, as they do not exist in the database. - -### Graph Exploration -You can click the blue icon on the bottom-right corner of the page to view the lineage graph of your models. - -On model pages, you'll see the immediate parents and children of the model you're exploring. By clicking the `Expand` -button at the top-right of this lineage pane, you'll be able to see all of the models that are used to build, -or are built from, the model you're exploring. - -Once expanded, you'll be able to use the `--select` and `--exclude` model selection syntax to filter the -models in the graph. For more information on model selection, check out the [dbt docs](https://docs.getdbt.com/docs/model-selection-syntax). - -Note that you can also right-click on models to interactively filter and explore the graph. - ---- - -### More information - -- [What is dbt](https://docs.getdbt.com/docs/introduction)? -- Read the [dbt viewpoint](https://docs.getdbt.com/docs/viewpoint) -- [Installation](https://docs.getdbt.com/docs/installation) -- Join the [dbt Community](https://www.getdbt.com/community/) for questions and discussion - -{% enddocs %} diff --git a/core/dbt/adapters/include/global_project/macros/adapters/apply_grants.sql b/core/dbt/adapters/include/global_project/macros/adapters/apply_grants.sql deleted file mode 100644 index 10906e7ffa7..00000000000 --- a/core/dbt/adapters/include/global_project/macros/adapters/apply_grants.sql +++ /dev/null @@ -1,167 +0,0 @@ -{# ------- BOOLEAN MACROS --------- #} - -{# - -- COPY GRANTS - -- When a relational object (view or table) is replaced in this database, - -- do previous grants carry over to the new object? This may depend on: - -- whether we use alter-rename-swap versus CREATE OR REPLACE - -- user-supplied configuration (e.g. copy_grants on Snowflake) - -- By default, play it safe, assume TRUE: that grants ARE copied over. - -- This means dbt will first "show" current grants and then calculate diffs. - -- It may require an additional query than is strictly necessary, - -- but better safe than sorry. -#} - -{% macro copy_grants() %} - {{ return(adapter.dispatch('copy_grants', 'dbt')()) }} -{% endmacro %} - -{% macro default__copy_grants() %} - {{ return(True) }} -{% endmacro %} - - -{# - -- SUPPORT MULTIPLE GRANTEES PER DCL STATEMENT - -- Does this database support 'grant {privilege} to {grantee_1}, {grantee_2}, ...' - -- Or must these be separate statements: - -- `grant {privilege} to {grantee_1}`; - -- `grant {privilege} to {grantee_2}`; - -- By default, pick the former, because it's what we prefer when available. -#} - -{% macro support_multiple_grantees_per_dcl_statement() %} - {{ return(adapter.dispatch('support_multiple_grantees_per_dcl_statement', 'dbt')()) }} -{% endmacro %} - -{%- macro default__support_multiple_grantees_per_dcl_statement() -%} - {{ return(True) }} -{%- endmacro -%} - - -{% macro should_revoke(existing_relation, full_refresh_mode=True) %} - - {% if not existing_relation %} - {#-- The table doesn't already exist, so no grants to copy over --#} - {{ return(False) }} - {% elif full_refresh_mode %} - {#-- The object is being REPLACED -- whether grants are copied over depends on the value of user config --#} - {{ return(copy_grants()) }} - {% else %} - {#-- The table is being merged/upserted/inserted -- grants will be carried over --#} - {{ return(True) }} - {% endif %} - -{% endmacro %} - -{# ------- DCL STATEMENT TEMPLATES --------- #} - -{% macro get_show_grant_sql(relation) %} - {{ return(adapter.dispatch("get_show_grant_sql", "dbt")(relation)) }} -{% endmacro %} - -{% macro default__get_show_grant_sql(relation) %} - show grants on {{ relation }} -{% endmacro %} - - -{% macro get_grant_sql(relation, privilege, grantees) %} - {{ return(adapter.dispatch('get_grant_sql', 'dbt')(relation, privilege, grantees)) }} -{% endmacro %} - -{%- macro default__get_grant_sql(relation, privilege, grantees) -%} - grant {{ privilege }} on {{ relation }} to {{ grantees | join(', ') }} -{%- endmacro -%} - - -{% macro get_revoke_sql(relation, privilege, grantees) %} - {{ return(adapter.dispatch('get_revoke_sql', 'dbt')(relation, privilege, grantees)) }} -{% endmacro %} - -{%- macro default__get_revoke_sql(relation, privilege, grantees) -%} - revoke {{ privilege }} on {{ relation }} from {{ grantees | join(', ') }} -{%- endmacro -%} - - -{# ------- RUNTIME APPLICATION --------- #} - -{% macro get_dcl_statement_list(relation, grant_config, get_dcl_macro) %} - {{ return(adapter.dispatch('get_dcl_statement_list', 'dbt')(relation, grant_config, get_dcl_macro)) }} -{% endmacro %} - -{%- macro default__get_dcl_statement_list(relation, grant_config, get_dcl_macro) -%} - {# - -- Unpack grant_config into specific privileges and the set of users who need them granted/revoked. - -- Depending on whether this database supports multiple grantees per statement, pass in the list of - -- all grantees per privilege, or (if not) template one statement per privilege-grantee pair. - -- `get_dcl_macro` will be either `get_grant_sql` or `get_revoke_sql` - #} - {%- set dcl_statements = [] -%} - {%- for privilege, grantees in grant_config.items() %} - {%- if support_multiple_grantees_per_dcl_statement() and grantees -%} - {%- set dcl = get_dcl_macro(relation, privilege, grantees) -%} - {%- do dcl_statements.append(dcl) -%} - {%- else -%} - {%- for grantee in grantees -%} - {% set dcl = get_dcl_macro(relation, privilege, [grantee]) %} - {%- do dcl_statements.append(dcl) -%} - {% endfor -%} - {%- endif -%} - {%- endfor -%} - {{ return(dcl_statements) }} -{%- endmacro %} - - -{% macro call_dcl_statements(dcl_statement_list) %} - {{ return(adapter.dispatch("call_dcl_statements", "dbt")(dcl_statement_list)) }} -{% endmacro %} - -{% macro default__call_dcl_statements(dcl_statement_list) %} - {# - -- By default, supply all grant + revoke statements in a single semicolon-separated block, - -- so that they're all processed together. - - -- Some databases do not support this. Those adapters will need to override this macro - -- to run each statement individually. - #} - {% call statement('grants') %} - {% for dcl_statement in dcl_statement_list %} - {{ dcl_statement }}; - {% endfor %} - {% endcall %} -{% endmacro %} - - -{% macro apply_grants(relation, grant_config, should_revoke) %} - {{ return(adapter.dispatch("apply_grants", "dbt")(relation, grant_config, should_revoke)) }} -{% endmacro %} - -{% macro default__apply_grants(relation, grant_config, should_revoke=True) %} - {#-- If grant_config is {} or None, this is a no-op --#} - {% if grant_config %} - {% if should_revoke %} - {#-- We think previous grants may have carried over --#} - {#-- Show current grants and calculate diffs --#} - {% set current_grants_table = run_query(get_show_grant_sql(relation)) %} - {% set current_grants_dict = adapter.standardize_grants_dict(current_grants_table) %} - {% set needs_granting = diff_of_two_dicts(grant_config, current_grants_dict) %} - {% set needs_revoking = diff_of_two_dicts(current_grants_dict, grant_config) %} - {% if not (needs_granting or needs_revoking) %} - {{ log('On ' ~ relation ~': All grants are in place, no revocation or granting needed.')}} - {% endif %} - {% else %} - {#-- We don't think there's any chance of previous grants having carried over. --#} - {#-- Jump straight to granting what the user has configured. --#} - {% set needs_revoking = {} %} - {% set needs_granting = grant_config %} - {% endif %} - {% if needs_granting or needs_revoking %} - {% set revoke_statement_list = get_dcl_statement_list(relation, needs_revoking, get_revoke_sql) %} - {% set grant_statement_list = get_dcl_statement_list(relation, needs_granting, get_grant_sql) %} - {% set dcl_statement_list = revoke_statement_list + grant_statement_list %} - {% if dcl_statement_list %} - {{ call_dcl_statements(dcl_statement_list) }} - {% endif %} - {% endif %} - {% endif %} -{% endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/adapters/columns.sql b/core/dbt/adapters/include/global_project/macros/adapters/columns.sql deleted file mode 100644 index e1099649cf0..00000000000 --- a/core/dbt/adapters/include/global_project/macros/adapters/columns.sql +++ /dev/null @@ -1,137 +0,0 @@ -{% macro get_columns_in_relation(relation) -%} - {{ return(adapter.dispatch('get_columns_in_relation', 'dbt')(relation)) }} -{% endmacro %} - -{% macro default__get_columns_in_relation(relation) -%} - {{ exceptions.raise_not_implemented( - 'get_columns_in_relation macro not implemented for adapter '+adapter.type()) }} -{% endmacro %} - -{# helper for adapter-specific implementations of get_columns_in_relation #} -{% macro sql_convert_columns_in_relation(table) -%} - {% set columns = [] %} - {% for row in table %} - {% do columns.append(api.Column(*row)) %} - {% endfor %} - {{ return(columns) }} -{% endmacro %} - - -{% macro get_empty_subquery_sql(select_sql, select_sql_header=none) -%} - {{ return(adapter.dispatch('get_empty_subquery_sql', 'dbt')(select_sql, select_sql_header)) }} -{% endmacro %} - -{# - Builds a query that results in the same schema as the given select_sql statement, without necessitating a data scan. - Useful for running a query in a 'pre-flight' context, such as model contract enforcement (assert_columns_equivalent macro). -#} -{% macro default__get_empty_subquery_sql(select_sql, select_sql_header=none) %} - {%- if select_sql_header is not none -%} - {{ select_sql_header }} - {%- endif -%} - select * from ( - {{ select_sql }} - ) as __dbt_sbq - where false - limit 0 -{% endmacro %} - -{% macro get_empty_schema_sql(columns) -%} - {{ return(adapter.dispatch('get_empty_schema_sql', 'dbt')(columns)) }} -{% endmacro %} - -{% macro default__get_empty_schema_sql(columns) %} - {%- set col_err = [] -%} - {%- set col_naked_numeric = [] -%} - select - {% for i in columns %} - {%- set col = columns[i] -%} - {%- if col['data_type'] is not defined -%} - {%- do col_err.append(col['name']) -%} - {#-- If this column's type is just 'numeric' then it is missing precision/scale, raise a warning --#} - {%- elif col['data_type'].strip().lower() in ('numeric', 'decimal', 'number') -%} - {%- do col_naked_numeric.append(col['name']) -%} - {%- endif -%} - {% set col_name = adapter.quote(col['name']) if col.get('quote') else col['name'] %} - cast(null as {{ col['data_type'] }}) as {{ col_name }}{{ ", " if not loop.last }} - {%- endfor -%} - {%- if (col_err | length) > 0 -%} - {{ exceptions.column_type_missing(column_names=col_err) }} - {%- elif (col_naked_numeric | length) > 0 -%} - {{ exceptions.warn("Detected columns with numeric type and unspecified precision/scale, this can lead to unintended rounding: " ~ col_naked_numeric ~ "`") }} - {%- endif -%} -{% endmacro %} - -{% macro get_column_schema_from_query(select_sql, select_sql_header=none) -%} - {% set columns = [] %} - {# -- Using an 'empty subquery' here to get the same schema as the given select_sql statement, without necessitating a data scan.#} - {% set sql = get_empty_subquery_sql(select_sql, select_sql_header) %} - {% set column_schema = adapter.get_column_schema_from_query(sql) %} - {{ return(column_schema) }} -{% endmacro %} - --- here for back compat -{% macro get_columns_in_query(select_sql) -%} - {{ return(adapter.dispatch('get_columns_in_query', 'dbt')(select_sql)) }} -{% endmacro %} - -{% macro default__get_columns_in_query(select_sql) %} - {% call statement('get_columns_in_query', fetch_result=True, auto_begin=False) -%} - {{ get_empty_subquery_sql(select_sql) }} - {% endcall %} - {{ return(load_result('get_columns_in_query').table.columns | map(attribute='name') | list) }} -{% endmacro %} - -{% macro alter_column_type(relation, column_name, new_column_type) -%} - {{ return(adapter.dispatch('alter_column_type', 'dbt')(relation, column_name, new_column_type)) }} -{% endmacro %} - -{% macro default__alter_column_type(relation, column_name, new_column_type) -%} - {# - 1. Create a new column (w/ temp name and correct type) - 2. Copy data over to it - 3. Drop the existing column (cascade!) - 4. Rename the new column to existing column - #} - {%- set tmp_column = column_name + "__dbt_alter" -%} - - {% call statement('alter_column_type') %} - alter table {{ relation }} add column {{ adapter.quote(tmp_column) }} {{ new_column_type }}; - update {{ relation }} set {{ adapter.quote(tmp_column) }} = {{ adapter.quote(column_name) }}; - alter table {{ relation }} drop column {{ adapter.quote(column_name) }} cascade; - alter table {{ relation }} rename column {{ adapter.quote(tmp_column) }} to {{ adapter.quote(column_name) }} - {% endcall %} - -{% endmacro %} - - -{% macro alter_relation_add_remove_columns(relation, add_columns = none, remove_columns = none) -%} - {{ return(adapter.dispatch('alter_relation_add_remove_columns', 'dbt')(relation, add_columns, remove_columns)) }} -{% endmacro %} - -{% macro default__alter_relation_add_remove_columns(relation, add_columns, remove_columns) %} - - {% if add_columns is none %} - {% set add_columns = [] %} - {% endif %} - {% if remove_columns is none %} - {% set remove_columns = [] %} - {% endif %} - - {% set sql -%} - - alter {{ relation.type }} {{ relation }} - - {% for column in add_columns %} - add column {{ column.name }} {{ column.data_type }}{{ ',' if not loop.last }} - {% endfor %}{{ ',' if add_columns and remove_columns }} - - {% for column in remove_columns %} - drop column {{ column.name }}{{ ',' if not loop.last }} - {% endfor %} - - {%- endset -%} - - {% do run_query(sql) %} - -{% endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/adapters/freshness.sql b/core/dbt/adapters/include/global_project/macros/adapters/freshness.sql deleted file mode 100644 index f18499a2391..00000000000 --- a/core/dbt/adapters/include/global_project/macros/adapters/freshness.sql +++ /dev/null @@ -1,16 +0,0 @@ -{% macro collect_freshness(source, loaded_at_field, filter) %} - {{ return(adapter.dispatch('collect_freshness', 'dbt')(source, loaded_at_field, filter))}} -{% endmacro %} - -{% macro default__collect_freshness(source, loaded_at_field, filter) %} - {% call statement('collect_freshness', fetch_result=True, auto_begin=False) -%} - select - max({{ loaded_at_field }}) as max_loaded_at, - {{ current_timestamp() }} as snapshotted_at - from {{ source }} - {% if filter %} - where {{ filter }} - {% endif %} - {% endcall %} - {{ return(load_result('collect_freshness')) }} -{% endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/adapters/indexes.sql b/core/dbt/adapters/include/global_project/macros/adapters/indexes.sql deleted file mode 100644 index b8663a7f971..00000000000 --- a/core/dbt/adapters/include/global_project/macros/adapters/indexes.sql +++ /dev/null @@ -1,41 +0,0 @@ -{% macro get_create_index_sql(relation, index_dict) -%} - {{ return(adapter.dispatch('get_create_index_sql', 'dbt')(relation, index_dict)) }} -{% endmacro %} - -{% macro default__get_create_index_sql(relation, index_dict) -%} - {% do return(None) %} -{% endmacro %} - - -{% macro create_indexes(relation) -%} - {{ adapter.dispatch('create_indexes', 'dbt')(relation) }} -{%- endmacro %} - -{% macro default__create_indexes(relation) -%} - {%- set _indexes = config.get('indexes', default=[]) -%} - - {% for _index_dict in _indexes %} - {% set create_index_sql = get_create_index_sql(relation, _index_dict) %} - {% if create_index_sql %} - {% do run_query(create_index_sql) %} - {% endif %} - {% endfor %} -{% endmacro %} - - -{% macro get_drop_index_sql(relation, index_name) -%} - {{ adapter.dispatch('get_drop_index_sql', 'dbt')(relation, index_name) }} -{%- endmacro %} - -{% macro default__get_drop_index_sql(relation, index_name) -%} - {{ exceptions.raise_compiler_error("`get_drop_index_sql has not been implemented for this adapter.") }} -{%- endmacro %} - - -{% macro get_show_indexes_sql(relation) -%} - {{ adapter.dispatch('get_show_indexes_sql', 'dbt')(relation) }} -{%- endmacro %} - -{% macro default__get_show_indexes_sql(relation) -%} - {{ exceptions.raise_compiler_error("`get_show_indexes_sql has not been implemented for this adapter.") }} -{%- endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/adapters/metadata.sql b/core/dbt/adapters/include/global_project/macros/adapters/metadata.sql deleted file mode 100644 index c8e8a4140de..00000000000 --- a/core/dbt/adapters/include/global_project/macros/adapters/metadata.sql +++ /dev/null @@ -1,96 +0,0 @@ -{% macro get_catalog_relations(information_schema, relations) -%} - {{ return(adapter.dispatch('get_catalog_relations', 'dbt')(information_schema, relations)) }} -{%- endmacro %} - -{% macro default__get_catalog_relations(information_schema, relations) -%} - {% set typename = adapter.type() %} - {% set msg -%} - get_catalog_relations not implemented for {{ typename }} - {%- endset %} - - {{ exceptions.raise_compiler_error(msg) }} -{%- endmacro %} - -{% macro get_catalog(information_schema, schemas) -%} - {{ return(adapter.dispatch('get_catalog', 'dbt')(information_schema, schemas)) }} -{%- endmacro %} - -{% macro default__get_catalog(information_schema, schemas) -%} - - {% set typename = adapter.type() %} - {% set msg -%} - get_catalog not implemented for {{ typename }} - {%- endset %} - - {{ exceptions.raise_compiler_error(msg) }} -{% endmacro %} - - -{% macro information_schema_name(database) %} - {{ return(adapter.dispatch('information_schema_name', 'dbt')(database)) }} -{% endmacro %} - -{% macro default__information_schema_name(database) -%} - {%- if database -%} - {{ database }}.INFORMATION_SCHEMA - {%- else -%} - INFORMATION_SCHEMA - {%- endif -%} -{%- endmacro %} - - -{% macro list_schemas(database) -%} - {{ return(adapter.dispatch('list_schemas', 'dbt')(database)) }} -{% endmacro %} - -{% macro default__list_schemas(database) -%} - {% set sql %} - select distinct schema_name - from {{ information_schema_name(database) }}.SCHEMATA - where catalog_name ilike '{{ database }}' - {% endset %} - {{ return(run_query(sql)) }} -{% endmacro %} - - -{% macro check_schema_exists(information_schema, schema) -%} - {{ return(adapter.dispatch('check_schema_exists', 'dbt')(information_schema, schema)) }} -{% endmacro %} - -{% macro default__check_schema_exists(information_schema, schema) -%} - {% set sql -%} - select count(*) - from {{ information_schema.replace(information_schema_view='SCHEMATA') }} - where catalog_name='{{ information_schema.database }}' - and schema_name='{{ schema }}' - {%- endset %} - {{ return(run_query(sql)) }} -{% endmacro %} - - -{% macro list_relations_without_caching(schema_relation) %} - {{ return(adapter.dispatch('list_relations_without_caching', 'dbt')(schema_relation)) }} -{% endmacro %} - -{% macro default__list_relations_without_caching(schema_relation) %} - {{ exceptions.raise_not_implemented( - 'list_relations_without_caching macro not implemented for adapter '+adapter.type()) }} -{% endmacro %} - -{% macro get_relations() %} - {{ return(adapter.dispatch('get_relations', 'dbt')()) }} -{% endmacro %} - -{% macro default__get_relations() %} - {{ exceptions.raise_not_implemented( - 'get_relations macro not implemented for adapter '+adapter.type()) }} -{% endmacro %} - -{% macro get_relation_last_modified(information_schema, relations) %} - {{ return(adapter.dispatch('get_relation_last_modified', 'dbt')(information_schema, relations)) }} -{% endmacro %} - -{% macro default__get_relation_last_modified(information_schema, relations) %} - {{ exceptions.raise_not_implemented( - 'get_relation_last_modified macro not implemented for adapter ' + adapter.type()) }} -{% endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/adapters/persist_docs.sql b/core/dbt/adapters/include/global_project/macros/adapters/persist_docs.sql deleted file mode 100644 index 8749e59f606..00000000000 --- a/core/dbt/adapters/include/global_project/macros/adapters/persist_docs.sql +++ /dev/null @@ -1,33 +0,0 @@ -{% macro alter_column_comment(relation, column_dict) -%} - {{ return(adapter.dispatch('alter_column_comment', 'dbt')(relation, column_dict)) }} -{% endmacro %} - -{% macro default__alter_column_comment(relation, column_dict) -%} - {{ exceptions.raise_not_implemented( - 'alter_column_comment macro not implemented for adapter '+adapter.type()) }} -{% endmacro %} - - -{% macro alter_relation_comment(relation, relation_comment) -%} - {{ return(adapter.dispatch('alter_relation_comment', 'dbt')(relation, relation_comment)) }} -{% endmacro %} - -{% macro default__alter_relation_comment(relation, relation_comment) -%} - {{ exceptions.raise_not_implemented( - 'alter_relation_comment macro not implemented for adapter '+adapter.type()) }} -{% endmacro %} - - -{% macro persist_docs(relation, model, for_relation=true, for_columns=true) -%} - {{ return(adapter.dispatch('persist_docs', 'dbt')(relation, model, for_relation, for_columns)) }} -{% endmacro %} - -{% macro default__persist_docs(relation, model, for_relation, for_columns) -%} - {% if for_relation and config.persist_relation_docs() and model.description %} - {% do run_query(alter_relation_comment(relation, model.description)) %} - {% endif %} - - {% if for_columns and config.persist_column_docs() and model.columns %} - {% do run_query(alter_column_comment(relation, model.columns)) %} - {% endif %} -{% endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/adapters/relation.sql b/core/dbt/adapters/include/global_project/macros/adapters/relation.sql deleted file mode 100644 index 1c2bd880079..00000000000 --- a/core/dbt/adapters/include/global_project/macros/adapters/relation.sql +++ /dev/null @@ -1,79 +0,0 @@ -{% macro make_intermediate_relation(base_relation, suffix='__dbt_tmp') %} - {{ return(adapter.dispatch('make_intermediate_relation', 'dbt')(base_relation, suffix)) }} -{% endmacro %} - -{% macro default__make_intermediate_relation(base_relation, suffix) %} - {{ return(default__make_temp_relation(base_relation, suffix)) }} -{% endmacro %} - -{% macro make_temp_relation(base_relation, suffix='__dbt_tmp') %} - {{ return(adapter.dispatch('make_temp_relation', 'dbt')(base_relation, suffix)) }} -{% endmacro %} - -{% macro default__make_temp_relation(base_relation, suffix) %} - {%- set temp_identifier = base_relation.identifier ~ suffix -%} - {%- set temp_relation = base_relation.incorporate( - path={"identifier": temp_identifier}) -%} - - {{ return(temp_relation) }} -{% endmacro %} - -{% macro make_backup_relation(base_relation, backup_relation_type, suffix='__dbt_backup') %} - {{ return(adapter.dispatch('make_backup_relation', 'dbt')(base_relation, backup_relation_type, suffix)) }} -{% endmacro %} - -{% macro default__make_backup_relation(base_relation, backup_relation_type, suffix) %} - {%- set backup_identifier = base_relation.identifier ~ suffix -%} - {%- set backup_relation = base_relation.incorporate( - path={"identifier": backup_identifier}, - type=backup_relation_type - ) -%} - {{ return(backup_relation) }} -{% endmacro %} - - -{% macro truncate_relation(relation) -%} - {{ return(adapter.dispatch('truncate_relation', 'dbt')(relation)) }} -{% endmacro %} - -{% macro default__truncate_relation(relation) -%} - {% call statement('truncate_relation') -%} - truncate table {{ relation }} - {%- endcall %} -{% endmacro %} - - -{% macro get_or_create_relation(database, schema, identifier, type) -%} - {{ return(adapter.dispatch('get_or_create_relation', 'dbt')(database, schema, identifier, type)) }} -{% endmacro %} - -{% macro default__get_or_create_relation(database, schema, identifier, type) %} - {%- set target_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %} - - {% if target_relation %} - {% do return([true, target_relation]) %} - {% endif %} - - {%- set new_relation = api.Relation.create( - database=database, - schema=schema, - identifier=identifier, - type=type - ) -%} - {% do return([false, new_relation]) %} -{% endmacro %} - - --- a user-friendly interface into adapter.get_relation -{% macro load_cached_relation(relation) %} - {% do return(adapter.get_relation( - database=relation.database, - schema=relation.schema, - identifier=relation.identifier - )) -%} -{% endmacro %} - --- old name for backwards compatibility -{% macro load_relation(relation) %} - {{ return(load_cached_relation(relation)) }} -{% endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/adapters/schema.sql b/core/dbt/adapters/include/global_project/macros/adapters/schema.sql deleted file mode 100644 index 9e0c7559286..00000000000 --- a/core/dbt/adapters/include/global_project/macros/adapters/schema.sql +++ /dev/null @@ -1,20 +0,0 @@ -{% macro create_schema(relation) -%} - {{ adapter.dispatch('create_schema', 'dbt')(relation) }} -{% endmacro %} - -{% macro default__create_schema(relation) -%} - {%- call statement('create_schema') -%} - create schema if not exists {{ relation.without_identifier() }} - {% endcall %} -{% endmacro %} - - -{% macro drop_schema(relation) -%} - {{ adapter.dispatch('drop_schema', 'dbt')(relation) }} -{% endmacro %} - -{% macro default__drop_schema(relation) -%} - {%- call statement('drop_schema') -%} - drop schema if exists {{ relation.without_identifier() }} cascade - {% endcall %} -{% endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/adapters/show.sql b/core/dbt/adapters/include/global_project/macros/adapters/show.sql deleted file mode 100644 index 33a93f3db9d..00000000000 --- a/core/dbt/adapters/include/global_project/macros/adapters/show.sql +++ /dev/null @@ -1,22 +0,0 @@ -{% macro get_show_sql(compiled_code, sql_header, limit) -%} - {%- if sql_header -%} - {{ sql_header }} - {%- endif -%} - {%- if limit is not none -%} - {{ get_limit_subquery_sql(compiled_code, limit) }} - {%- else -%} - {{ compiled_code }} - {%- endif -%} -{% endmacro %} - -{% macro get_limit_subquery_sql(sql, limit) %} - {{ adapter.dispatch('get_limit_subquery_sql', 'dbt')(sql, limit) }} -{% endmacro %} - -{% macro default__get_limit_subquery_sql(sql, limit) %} - select * - from ( - {{ sql }} - ) as model_limit_subq - limit {{ limit }} -{% endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/adapters/timestamps.sql b/core/dbt/adapters/include/global_project/macros/adapters/timestamps.sql deleted file mode 100644 index 64b5fd3ddda..00000000000 --- a/core/dbt/adapters/include/global_project/macros/adapters/timestamps.sql +++ /dev/null @@ -1,44 +0,0 @@ -{%- macro current_timestamp() -%} - {{ adapter.dispatch('current_timestamp', 'dbt')() }} -{%- endmacro -%} - -{% macro default__current_timestamp() -%} - {{ exceptions.raise_not_implemented( - 'current_timestamp macro not implemented for adapter ' + adapter.type()) }} -{%- endmacro %} - -{%- macro snapshot_get_time() -%} - {{ adapter.dispatch('snapshot_get_time', 'dbt')() }} -{%- endmacro -%} - -{% macro default__snapshot_get_time() %} - {{ current_timestamp() }} -{% endmacro %} - ---------------------------------------------- - -/* {# - DEPRECATED: DO NOT USE IN NEW PROJECTS - - This is ONLY to handle the fact that Snowflake + Postgres had functionally - different implementations of {{ dbt.current_timestamp }} + {{ dbt_utils.current_timestamp }} - - If you had a project or package that called {{ dbt_utils.current_timestamp() }}, you should - continue to use this macro to guarantee identical behavior on those two databases. -#} */ - -{% macro current_timestamp_backcompat() %} - {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }} -{% endmacro %} - -{% macro default__current_timestamp_backcompat() %} - current_timestamp::timestamp -{% endmacro %} - -{% macro current_timestamp_in_utc_backcompat() %} - {{ return(adapter.dispatch('current_timestamp_in_utc_backcompat', 'dbt')()) }} -{% endmacro %} - -{% macro default__current_timestamp_in_utc_backcompat() %} - {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }} -{% endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/adapters/validate_sql.sql b/core/dbt/adapters/include/global_project/macros/adapters/validate_sql.sql deleted file mode 100644 index ba01117ecae..00000000000 --- a/core/dbt/adapters/include/global_project/macros/adapters/validate_sql.sql +++ /dev/null @@ -1,10 +0,0 @@ -{% macro validate_sql(sql) -%} - {{ return(adapter.dispatch('validate_sql', 'dbt')(sql)) }} -{% endmacro %} - -{% macro default__validate_sql(sql) -%} - {% call statement('validate_sql') -%} - explain {{ sql }} - {% endcall %} - {{ return(load_result('validate_sql')) }} -{% endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/etc/datetime.sql b/core/dbt/adapters/include/global_project/macros/etc/datetime.sql deleted file mode 100644 index 33c55549794..00000000000 --- a/core/dbt/adapters/include/global_project/macros/etc/datetime.sql +++ /dev/null @@ -1,62 +0,0 @@ -{% macro convert_datetime(date_str, date_fmt) %} - - {% set error_msg -%} - The provided partition date '{{ date_str }}' does not match the expected format '{{ date_fmt }}' - {%- endset %} - - {% set res = try_or_compiler_error(error_msg, modules.datetime.datetime.strptime, date_str.strip(), date_fmt) %} - {{ return(res) }} - -{% endmacro %} - - -{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt="%Y%m%d", out_fmt="%Y%m%d") %} - {% set end_date_str = start_date_str if end_date_str is none else end_date_str %} - - {% set start_date = convert_datetime(start_date_str, in_fmt) %} - {% set end_date = convert_datetime(end_date_str, in_fmt) %} - - {% set day_count = (end_date - start_date).days %} - {% if day_count < 0 %} - {% set msg -%} - Partition start date is after the end date ({{ start_date }}, {{ end_date }}) - {%- endset %} - - {{ exceptions.raise_compiler_error(msg, model) }} - {% endif %} - - {% set date_list = [] %} - {% for i in range(0, day_count + 1) %} - {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %} - {% if not out_fmt %} - {% set _ = date_list.append(the_date) %} - {% else %} - {% set _ = date_list.append(the_date.strftime(out_fmt)) %} - {% endif %} - {% endfor %} - - {{ return(date_list) }} -{% endmacro %} - - -{% macro partition_range(raw_partition_date, date_fmt='%Y%m%d') %} - {% set partition_range = (raw_partition_date | string).split(",") %} - - {% if (partition_range | length) == 1 %} - {% set start_date = partition_range[0] %} - {% set end_date = none %} - {% elif (partition_range | length) == 2 %} - {% set start_date = partition_range[0] %} - {% set end_date = partition_range[1] %} - {% else %} - {{ exceptions.raise_compiler_error("Invalid partition time. Expected format: {Start Date}[,{End Date}]. Got: " ~ raw_partition_date) }} - {% endif %} - - {{ return(dates_in_range(start_date, end_date, in_fmt=date_fmt)) }} -{% endmacro %} - - -{% macro py_current_timestring() %} - {% set dt = modules.datetime.datetime.now() %} - {% do return(dt.strftime("%Y%m%d%H%M%S%f")) %} -{% endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/etc/statement.sql b/core/dbt/adapters/include/global_project/macros/etc/statement.sql deleted file mode 100644 index 8fb98f8c811..00000000000 --- a/core/dbt/adapters/include/global_project/macros/etc/statement.sql +++ /dev/null @@ -1,52 +0,0 @@ -{#-- -The macro override naming method (spark__statement) only works for macros which are called with adapter.dispatch. For macros called directly, you can just redefine them. ---#} -{%- macro statement(name=None, fetch_result=False, auto_begin=True, language='sql') -%} - {%- if execute: -%} - {%- set compiled_code = caller() -%} - - {%- if name == 'main' -%} - {{ log('Writing runtime {} for node "{}"'.format(language, model['unique_id'])) }} - {{ write(compiled_code) }} - {%- endif -%} - {%- if language == 'sql'-%} - {%- set res, table = adapter.execute(compiled_code, auto_begin=auto_begin, fetch=fetch_result) -%} - {%- elif language == 'python' -%} - {%- set res = submit_python_job(model, compiled_code) -%} - {#-- TODO: What should table be for python models? --#} - {%- set table = None -%} - {%- else -%} - {% do exceptions.raise_compiler_error("statement macro didn't get supported language") %} - {%- endif -%} - - {%- if name is not none -%} - {{ store_result(name, response=res, agate_table=table) }} - {%- endif -%} - - {%- endif -%} -{%- endmacro %} - - -{% macro noop_statement(name=None, message=None, code=None, rows_affected=None, res=None) -%} - {%- set sql = caller() -%} - - {%- if name == 'main' -%} - {{ log('Writing runtime SQL for node "{}"'.format(model['unique_id'])) }} - {{ write(sql) }} - {%- endif -%} - - {%- if name is not none -%} - {{ store_raw_result(name, message=message, code=code, rows_affected=rows_affected, agate_table=res) }} - {%- endif -%} - -{%- endmacro %} - - -{# a user-friendly interface into statements #} -{% macro run_query(sql) %} - {% call statement("run_query_statement", fetch_result=true, auto_begin=false) %} - {{ sql }} - {% endcall %} - - {% do return(load_result("run_query_statement").table) %} -{% endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/generic_test_sql/accepted_values.sql b/core/dbt/adapters/include/global_project/macros/generic_test_sql/accepted_values.sql deleted file mode 100644 index 9b5a0b0e4dd..00000000000 --- a/core/dbt/adapters/include/global_project/macros/generic_test_sql/accepted_values.sql +++ /dev/null @@ -1,27 +0,0 @@ -{% macro default__test_accepted_values(model, column_name, values, quote=True) %} - -with all_values as ( - - select - {{ column_name }} as value_field, - count(*) as n_records - - from {{ model }} - group by {{ column_name }} - -) - -select * -from all_values -where value_field not in ( - {% for value in values -%} - {% if quote -%} - '{{ value }}' - {%- else -%} - {{ value }} - {%- endif -%} - {%- if not loop.last -%},{%- endif %} - {%- endfor %} -) - -{% endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/generic_test_sql/not_null.sql b/core/dbt/adapters/include/global_project/macros/generic_test_sql/not_null.sql deleted file mode 100644 index 73e3401f930..00000000000 --- a/core/dbt/adapters/include/global_project/macros/generic_test_sql/not_null.sql +++ /dev/null @@ -1,9 +0,0 @@ -{% macro default__test_not_null(model, column_name) %} - -{% set column_list = '*' if should_store_failures() else column_name %} - -select {{ column_list }} -from {{ model }} -where {{ column_name }} is null - -{% endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/generic_test_sql/relationships.sql b/core/dbt/adapters/include/global_project/macros/generic_test_sql/relationships.sql deleted file mode 100644 index db779a43e7d..00000000000 --- a/core/dbt/adapters/include/global_project/macros/generic_test_sql/relationships.sql +++ /dev/null @@ -1,23 +0,0 @@ -{% macro default__test_relationships(model, column_name, to, field) %} - -with child as ( - select {{ column_name }} as from_field - from {{ model }} - where {{ column_name }} is not null -), - -parent as ( - select {{ field }} as to_field - from {{ to }} -) - -select - from_field - -from child -left join parent - on child.from_field = parent.to_field - -where parent.to_field is null - -{% endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/generic_test_sql/unique.sql b/core/dbt/adapters/include/global_project/macros/generic_test_sql/unique.sql deleted file mode 100644 index ed18c5c93a3..00000000000 --- a/core/dbt/adapters/include/global_project/macros/generic_test_sql/unique.sql +++ /dev/null @@ -1,12 +0,0 @@ -{% macro default__test_unique(model, column_name) %} - -select - {{ column_name }} as unique_field, - count(*) as n_records - -from {{ model }} -where {{ column_name }} is not null -group by {{ column_name }} -having count(*) > 1 - -{% endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/get_custom_name/get_custom_alias.sql b/core/dbt/adapters/include/global_project/macros/get_custom_name/get_custom_alias.sql deleted file mode 100644 index 187d3970d52..00000000000 --- a/core/dbt/adapters/include/global_project/macros/get_custom_name/get_custom_alias.sql +++ /dev/null @@ -1,36 +0,0 @@ - -{# - Renders a alias name given a custom alias name. If the custom - alias name is none, then the resulting alias is just the filename of the - model. If an alias override is specified, then that is used. - - This macro can be overriden in projects to define different semantics - for rendering a alias name. - - Arguments: - custom_alias_name: The custom alias name specified for a model, or none - node: The available node that an alias is being generated for, or none - -#} - -{% macro generate_alias_name(custom_alias_name=none, node=none) -%} - {% do return(adapter.dispatch('generate_alias_name', 'dbt')(custom_alias_name, node)) %} -{%- endmacro %} - -{% macro default__generate_alias_name(custom_alias_name=none, node=none) -%} - - {%- if custom_alias_name -%} - - {{ custom_alias_name | trim }} - - {%- elif node.version -%} - - {{ return(node.name ~ "_v" ~ (node.version | replace(".", "_"))) }} - - {%- else -%} - - {{ node.name }} - - {%- endif -%} - -{%- endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/get_custom_name/get_custom_database.sql b/core/dbt/adapters/include/global_project/macros/get_custom_name/get_custom_database.sql deleted file mode 100644 index 108dc494404..00000000000 --- a/core/dbt/adapters/include/global_project/macros/get_custom_name/get_custom_database.sql +++ /dev/null @@ -1,32 +0,0 @@ -{# - Renders a database name given a custom database name. If the custom - database name is none, then the resulting database is just the "database" - value in the specified target. If a database override is specified, then - the custom database name is used instead of the default "database" value. - - This macro can be overriden in projects to define different semantics - for rendering a database name. - - Arguments: - custom_database_name: The custom database name specified for a model, or none - node: The node the database is being generated for - -#} - -{% macro generate_database_name(custom_database_name=none, node=none) -%} - {% do return(adapter.dispatch('generate_database_name', 'dbt')(custom_database_name, node)) %} -{%- endmacro %} - -{% macro default__generate_database_name(custom_database_name=none, node=none) -%} - {%- set default_database = target.database -%} - {%- if custom_database_name is none -%} - - {{ default_database }} - - {%- else -%} - - {{ custom_database_name }} - - {%- endif -%} - -{%- endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/get_custom_name/get_custom_schema.sql b/core/dbt/adapters/include/global_project/macros/get_custom_name/get_custom_schema.sql deleted file mode 100644 index 20348ec3e7b..00000000000 --- a/core/dbt/adapters/include/global_project/macros/get_custom_name/get_custom_schema.sql +++ /dev/null @@ -1,60 +0,0 @@ - -{# - Renders a schema name given a custom schema name. If the custom - schema name is none, then the resulting schema is just the "schema" - value in the specified target. If a schema override is specified, then - the resulting schema is the default schema concatenated with the - custom schema. - - This macro can be overriden in projects to define different semantics - for rendering a schema name. - - Arguments: - custom_schema_name: The custom schema name specified for a model, or none - node: The node the schema is being generated for - -#} -{% macro generate_schema_name(custom_schema_name=none, node=none) -%} - {{ return(adapter.dispatch('generate_schema_name', 'dbt')(custom_schema_name, node)) }} -{% endmacro %} - -{% macro default__generate_schema_name(custom_schema_name, node) -%} - - {%- set default_schema = target.schema -%} - {%- if custom_schema_name is none -%} - - {{ default_schema }} - - {%- else -%} - - {{ default_schema }}_{{ custom_schema_name | trim }} - - {%- endif -%} - -{%- endmacro %} - - -{# - Renders a schema name given a custom schema name. In production, this macro - will render out the overriden schema name for a model. Otherwise, the default - schema specified in the active target is used. - - Arguments: - custom_schema_name: The custom schema name specified for a model, or none - node: The node the schema is being generated for - -#} -{% macro generate_schema_name_for_env(custom_schema_name, node) -%} - - {%- set default_schema = target.schema -%} - {%- if target.name == 'prod' and custom_schema_name is not none -%} - - {{ custom_schema_name | trim }} - - {%- else -%} - - {{ default_schema }} - - {%- endif -%} - -{%- endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/materializations/configs.sql b/core/dbt/adapters/include/global_project/macros/materializations/configs.sql deleted file mode 100644 index d15ccb8e603..00000000000 --- a/core/dbt/adapters/include/global_project/macros/materializations/configs.sql +++ /dev/null @@ -1,21 +0,0 @@ -{% macro set_sql_header(config) -%} - {{ config.set('sql_header', caller()) }} -{%- endmacro %} - - -{% macro should_full_refresh() %} - {% set config_full_refresh = config.get('full_refresh') %} - {% if config_full_refresh is none %} - {% set config_full_refresh = flags.FULL_REFRESH %} - {% endif %} - {% do return(config_full_refresh) %} -{% endmacro %} - - -{% macro should_store_failures() %} - {% set config_store_failures = config.get('store_failures') %} - {% if config_store_failures is none %} - {% set config_store_failures = flags.STORE_FAILURES %} - {% endif %} - {% do return(config_store_failures) %} -{% endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/materializations/hooks.sql b/core/dbt/adapters/include/global_project/macros/materializations/hooks.sql deleted file mode 100644 index 2e198196c4f..00000000000 --- a/core/dbt/adapters/include/global_project/macros/materializations/hooks.sql +++ /dev/null @@ -1,35 +0,0 @@ -{% macro run_hooks(hooks, inside_transaction=True) %} - {% for hook in hooks | selectattr('transaction', 'equalto', inside_transaction) %} - {% if not inside_transaction and loop.first %} - {% call statement(auto_begin=inside_transaction) %} - commit; - {% endcall %} - {% endif %} - {% set rendered = render(hook.get('sql')) | trim %} - {% if (rendered | length) > 0 %} - {% call statement(auto_begin=inside_transaction) %} - {{ rendered }} - {% endcall %} - {% endif %} - {% endfor %} -{% endmacro %} - - -{% macro make_hook_config(sql, inside_transaction) %} - {{ tojson({"sql": sql, "transaction": inside_transaction}) }} -{% endmacro %} - - -{% macro before_begin(sql) %} - {{ make_hook_config(sql, inside_transaction=False) }} -{% endmacro %} - - -{% macro in_transaction(sql) %} - {{ make_hook_config(sql, inside_transaction=True) }} -{% endmacro %} - - -{% macro after_commit(sql) %} - {{ make_hook_config(sql, inside_transaction=False) }} -{% endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/materializations/models/clone/can_clone_table.sql b/core/dbt/adapters/include/global_project/macros/materializations/models/clone/can_clone_table.sql deleted file mode 100644 index 89628bfab35..00000000000 --- a/core/dbt/adapters/include/global_project/macros/materializations/models/clone/can_clone_table.sql +++ /dev/null @@ -1,7 +0,0 @@ -{% macro can_clone_table() %} - {{ return(adapter.dispatch('can_clone_table', 'dbt')()) }} -{% endmacro %} - -{% macro default__can_clone_table() %} - {{ return(False) }} -{% endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/materializations/models/clone/clone.sql b/core/dbt/adapters/include/global_project/macros/materializations/models/clone/clone.sql deleted file mode 100644 index 01c8c3930df..00000000000 --- a/core/dbt/adapters/include/global_project/macros/materializations/models/clone/clone.sql +++ /dev/null @@ -1,67 +0,0 @@ -{%- materialization clone, default -%} - - {%- set relations = {'relations': []} -%} - - {%- if not defer_relation -%} - -- nothing to do - {{ log("No relation found in state manifest for " ~ model.unique_id, info=True) }} - {{ return(relations) }} - {%- endif -%} - - {%- set existing_relation = load_cached_relation(this) -%} - - {%- if existing_relation and not flags.FULL_REFRESH -%} - -- noop! - {{ log("Relation " ~ existing_relation ~ " already exists", info=True) }} - {{ return(relations) }} - {%- endif -%} - - {%- set other_existing_relation = load_cached_relation(defer_relation) -%} - - -- If this is a database that can do zero-copy cloning of tables, and the other relation is a table, then this will be a table - -- Otherwise, this will be a view - - {% set can_clone_table = can_clone_table() %} - - {%- if other_existing_relation and other_existing_relation.type == 'table' and can_clone_table -%} - - {%- set target_relation = this.incorporate(type='table') -%} - {% if existing_relation is not none and not existing_relation.is_table %} - {{ log("Dropping relation " ~ existing_relation ~ " because it is of type " ~ existing_relation.type) }} - {{ drop_relation_if_exists(existing_relation) }} - {% endif %} - - -- as a general rule, data platforms that can clone tables can also do atomic 'create or replace' - {% call statement('main') %} - {% if target_relation and defer_relation and target_relation == defer_relation %} - {{ log("Target relation and defer relation are the same, skipping clone for relation: " ~ target_relation) }} - {% else %} - {{ create_or_replace_clone(target_relation, defer_relation) }} - {% endif %} - - {% endcall %} - - {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %} - {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %} - {% do persist_docs(target_relation, model) %} - - {{ return({'relations': [target_relation]}) }} - - {%- else -%} - - {%- set target_relation = this.incorporate(type='view') -%} - - -- reuse the view materialization - -- TODO: support actual dispatch for materialization macros - -- Tracking ticket: https://github.com/dbt-labs/dbt-core/issues/7799 - {% set search_name = "materialization_view_" ~ adapter.type() %} - {% if not search_name in context %} - {% set search_name = "materialization_view_default" %} - {% endif %} - {% set materialization_macro = context[search_name] %} - {% set relations = materialization_macro() %} - {{ return(relations) }} - - {%- endif -%} - -{%- endmaterialization -%} diff --git a/core/dbt/adapters/include/global_project/macros/materializations/models/clone/create_or_replace_clone.sql b/core/dbt/adapters/include/global_project/macros/materializations/models/clone/create_or_replace_clone.sql deleted file mode 100644 index 204e9e874e4..00000000000 --- a/core/dbt/adapters/include/global_project/macros/materializations/models/clone/create_or_replace_clone.sql +++ /dev/null @@ -1,7 +0,0 @@ -{% macro create_or_replace_clone(this_relation, defer_relation) %} - {{ return(adapter.dispatch('create_or_replace_clone', 'dbt')(this_relation, defer_relation)) }} -{% endmacro %} - -{% macro default__create_or_replace_clone(this_relation, defer_relation) %} - create or replace table {{ this_relation }} clone {{ defer_relation }} -{% endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/materializations/models/incremental/column_helpers.sql b/core/dbt/adapters/include/global_project/macros/materializations/models/incremental/column_helpers.sql deleted file mode 100644 index 03f9b406a3c..00000000000 --- a/core/dbt/adapters/include/global_project/macros/materializations/models/incremental/column_helpers.sql +++ /dev/null @@ -1,80 +0,0 @@ -/* {# - Helper macros for internal use with incremental materializations. - Use with care if calling elsewhere. -#} */ - - -{% macro get_quoted_csv(column_names) %} - - {% set quoted = [] %} - {% for col in column_names -%} - {%- do quoted.append(adapter.quote(col)) -%} - {%- endfor %} - - {%- set dest_cols_csv = quoted | join(', ') -%} - {{ return(dest_cols_csv) }} - -{% endmacro %} - - -{% macro diff_columns(source_columns, target_columns) %} - - {% set result = [] %} - {% set source_names = source_columns | map(attribute = 'column') | list %} - {% set target_names = target_columns | map(attribute = 'column') | list %} - - {# --check whether the name attribute exists in the target - this does not perform a data type check #} - {% for sc in source_columns %} - {% if sc.name not in target_names %} - {{ result.append(sc) }} - {% endif %} - {% endfor %} - - {{ return(result) }} - -{% endmacro %} - - -{% macro diff_column_data_types(source_columns, target_columns) %} - - {% set result = [] %} - {% for sc in source_columns %} - {% set tc = target_columns | selectattr("name", "equalto", sc.name) | list | first %} - {% if tc %} - {% if sc.data_type != tc.data_type and not sc.can_expand_to(other_column=tc) %} - {{ result.append( { 'column_name': tc.name, 'new_type': sc.data_type } ) }} - {% endif %} - {% endif %} - {% endfor %} - - {{ return(result) }} - -{% endmacro %} - -{% macro get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %} - {{ return(adapter.dispatch('get_merge_update_columns', 'dbt')(merge_update_columns, merge_exclude_columns, dest_columns)) }} -{% endmacro %} - -{% macro default__get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %} - {%- set default_cols = dest_columns | map(attribute="quoted") | list -%} - - {%- if merge_update_columns and merge_exclude_columns -%} - {{ exceptions.raise_compiler_error( - 'Model cannot specify merge_update_columns and merge_exclude_columns. Please update model to use only one config' - )}} - {%- elif merge_update_columns -%} - {%- set update_columns = merge_update_columns -%} - {%- elif merge_exclude_columns -%} - {%- set update_columns = [] -%} - {%- for column in dest_columns -%} - {% if column.column | lower not in merge_exclude_columns | map("lower") | list %} - {%- do update_columns.append(column.quoted) -%} - {% endif %} - {%- endfor -%} - {%- else -%} - {%- set update_columns = default_cols -%} - {%- endif -%} - - {{ return(update_columns) }} - -{% endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/materializations/models/incremental/incremental.sql b/core/dbt/adapters/include/global_project/macros/materializations/models/incremental/incremental.sql deleted file mode 100644 index e8ff5c1ea4f..00000000000 --- a/core/dbt/adapters/include/global_project/macros/materializations/models/incremental/incremental.sql +++ /dev/null @@ -1,92 +0,0 @@ - -{% materialization incremental, default -%} - - -- relations - {%- set existing_relation = load_cached_relation(this) -%} - {%- set target_relation = this.incorporate(type='table') -%} - {%- set temp_relation = make_temp_relation(target_relation)-%} - {%- set intermediate_relation = make_intermediate_relation(target_relation)-%} - {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%} - {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%} - - -- configs - {%- set unique_key = config.get('unique_key') -%} - {%- set full_refresh_mode = (should_full_refresh() or existing_relation.is_view) -%} - {%- set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') -%} - - -- the temp_ and backup_ relations should not already exist in the database; get_relation - -- will return None in that case. Otherwise, we get a relation that we can drop - -- later, before we try to use this name for the current operation. This has to happen before - -- BEGIN, in a separate transaction - {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation)-%} - {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%} - -- grab current tables grants config for comparision later on - {% set grant_config = config.get('grants') %} - {{ drop_relation_if_exists(preexisting_intermediate_relation) }} - {{ drop_relation_if_exists(preexisting_backup_relation) }} - - {{ run_hooks(pre_hooks, inside_transaction=False) }} - - -- `BEGIN` happens here: - {{ run_hooks(pre_hooks, inside_transaction=True) }} - - {% set to_drop = [] %} - - {% if existing_relation is none %} - {% set build_sql = get_create_table_as_sql(False, target_relation, sql) %} - {% elif full_refresh_mode %} - {% set build_sql = get_create_table_as_sql(False, intermediate_relation, sql) %} - {% set need_swap = true %} - {% else %} - {% do run_query(get_create_table_as_sql(True, temp_relation, sql)) %} - {% do adapter.expand_target_column_types( - from_relation=temp_relation, - to_relation=target_relation) %} - {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#} - {% set dest_columns = process_schema_changes(on_schema_change, temp_relation, existing_relation) %} - {% if not dest_columns %} - {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %} - {% endif %} - - {#-- Get the incremental_strategy, the macro to use for the strategy, and build the sql --#} - {% set incremental_strategy = config.get('incremental_strategy') or 'default' %} - {% set incremental_predicates = config.get('predicates', none) or config.get('incremental_predicates', none) %} - {% set strategy_sql_macro_func = adapter.get_incremental_strategy_macro(context, incremental_strategy) %} - {% set strategy_arg_dict = ({'target_relation': target_relation, 'temp_relation': temp_relation, 'unique_key': unique_key, 'dest_columns': dest_columns, 'incremental_predicates': incremental_predicates }) %} - {% set build_sql = strategy_sql_macro_func(strategy_arg_dict) %} - - {% endif %} - - {% call statement("main") %} - {{ build_sql }} - {% endcall %} - - {% if need_swap %} - {% do adapter.rename_relation(target_relation, backup_relation) %} - {% do adapter.rename_relation(intermediate_relation, target_relation) %} - {% do to_drop.append(backup_relation) %} - {% endif %} - - {% set should_revoke = should_revoke(existing_relation, full_refresh_mode) %} - {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %} - - {% do persist_docs(target_relation, model) %} - - {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %} - {% do create_indexes(target_relation) %} - {% endif %} - - {{ run_hooks(post_hooks, inside_transaction=True) }} - - -- `COMMIT` happens here - {% do adapter.commit() %} - - {% for rel in to_drop %} - {% do adapter.drop_relation(rel) %} - {% endfor %} - - {{ run_hooks(post_hooks, inside_transaction=False) }} - - {{ return({'relations': [target_relation]}) }} - -{%- endmaterialization %} diff --git a/core/dbt/adapters/include/global_project/macros/materializations/models/incremental/is_incremental.sql b/core/dbt/adapters/include/global_project/macros/materializations/models/incremental/is_incremental.sql deleted file mode 100644 index 10f45e0238a..00000000000 --- a/core/dbt/adapters/include/global_project/macros/materializations/models/incremental/is_incremental.sql +++ /dev/null @@ -1,13 +0,0 @@ - -{% macro is_incremental() %} - {#-- do not run introspective queries in parsing #} - {% if not execute %} - {{ return(False) }} - {% else %} - {% set relation = adapter.get_relation(this.database, this.schema, this.table) %} - {{ return(relation is not none - and relation.type == 'table' - and model.config.materialized == 'incremental' - and not should_full_refresh()) }} - {% endif %} -{% endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/materializations/models/incremental/merge.sql b/core/dbt/adapters/include/global_project/macros/materializations/models/incremental/merge.sql deleted file mode 100644 index ca972c9f258..00000000000 --- a/core/dbt/adapters/include/global_project/macros/materializations/models/incremental/merge.sql +++ /dev/null @@ -1,131 +0,0 @@ -{% macro get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%} - -- back compat for old kwarg name - {% set incremental_predicates = kwargs.get('predicates', incremental_predicates) %} - {{ adapter.dispatch('get_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }} -{%- endmacro %} - -{% macro default__get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%} - {%- set predicates = [] if incremental_predicates is none else [] + incremental_predicates -%} - {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute="name")) -%} - {%- set merge_update_columns = config.get('merge_update_columns') -%} - {%- set merge_exclude_columns = config.get('merge_exclude_columns') -%} - {%- set update_columns = get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) -%} - {%- set sql_header = config.get('sql_header', none) -%} - - {% if unique_key %} - {% if unique_key is sequence and unique_key is not mapping and unique_key is not string %} - {% for key in unique_key %} - {% set this_key_match %} - DBT_INTERNAL_SOURCE.{{ key }} = DBT_INTERNAL_DEST.{{ key }} - {% endset %} - {% do predicates.append(this_key_match) %} - {% endfor %} - {% else %} - {% set unique_key_match %} - DBT_INTERNAL_SOURCE.{{ unique_key }} = DBT_INTERNAL_DEST.{{ unique_key }} - {% endset %} - {% do predicates.append(unique_key_match) %} - {% endif %} - {% else %} - {% do predicates.append('FALSE') %} - {% endif %} - - {{ sql_header if sql_header is not none }} - - merge into {{ target }} as DBT_INTERNAL_DEST - using {{ source }} as DBT_INTERNAL_SOURCE - on {{"(" ~ predicates | join(") and (") ~ ")"}} - - {% if unique_key %} - when matched then update set - {% for column_name in update_columns -%} - {{ column_name }} = DBT_INTERNAL_SOURCE.{{ column_name }} - {%- if not loop.last %}, {%- endif %} - {%- endfor %} - {% endif %} - - when not matched then insert - ({{ dest_cols_csv }}) - values - ({{ dest_cols_csv }}) - -{% endmacro %} - - -{% macro get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%} - {{ adapter.dispatch('get_delete_insert_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }} -{%- endmacro %} - -{% macro default__get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%} - - {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute="name")) -%} - - {% if unique_key %} - {% if unique_key is sequence and unique_key is not string %} - delete from {{target }} - using {{ source }} - where ( - {% for key in unique_key %} - {{ source }}.{{ key }} = {{ target }}.{{ key }} - {{ "and " if not loop.last}} - {% endfor %} - {% if incremental_predicates %} - {% for predicate in incremental_predicates %} - and {{ predicate }} - {% endfor %} - {% endif %} - ); - {% else %} - delete from {{ target }} - where ( - {{ unique_key }}) in ( - select ({{ unique_key }}) - from {{ source }} - ) - {%- if incremental_predicates %} - {% for predicate in incremental_predicates %} - and {{ predicate }} - {% endfor %} - {%- endif -%}; - - {% endif %} - {% endif %} - - insert into {{ target }} ({{ dest_cols_csv }}) - ( - select {{ dest_cols_csv }} - from {{ source }} - ) - -{%- endmacro %} - - -{% macro get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header=false) -%} - {{ adapter.dispatch('get_insert_overwrite_merge_sql', 'dbt')(target, source, dest_columns, predicates, include_sql_header) }} -{%- endmacro %} - -{% macro default__get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header) -%} - {#-- The only time include_sql_header is True: --#} - {#-- BigQuery + insert_overwrite strategy + "static" partitions config --#} - {#-- We should consider including the sql header at the materialization level instead --#} - - {%- set predicates = [] if predicates is none else [] + predicates -%} - {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute="name")) -%} - {%- set sql_header = config.get('sql_header', none) -%} - - {{ sql_header if sql_header is not none and include_sql_header }} - - merge into {{ target }} as DBT_INTERNAL_DEST - using {{ source }} as DBT_INTERNAL_SOURCE - on FALSE - - when not matched by source - {% if predicates %} and {{ predicates | join(' and ') }} {% endif %} - then delete - - when not matched then insert - ({{ dest_cols_csv }}) - values - ({{ dest_cols_csv }}) - -{% endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/materializations/models/incremental/on_schema_change.sql b/core/dbt/adapters/include/global_project/macros/materializations/models/incremental/on_schema_change.sql deleted file mode 100644 index 76fe372f41b..00000000000 --- a/core/dbt/adapters/include/global_project/macros/materializations/models/incremental/on_schema_change.sql +++ /dev/null @@ -1,144 +0,0 @@ -{% macro incremental_validate_on_schema_change(on_schema_change, default='ignore') %} - - {% if on_schema_change not in ['sync_all_columns', 'append_new_columns', 'fail', 'ignore'] %} - - {% set log_message = 'Invalid value for on_schema_change (%s) specified. Setting default value of %s.' % (on_schema_change, default) %} - {% do log(log_message) %} - - {{ return(default) }} - - {% else %} - - {{ return(on_schema_change) }} - - {% endif %} - -{% endmacro %} - - -{% macro check_for_schema_changes(source_relation, target_relation) %} - - {% set schema_changed = False %} - - {%- set source_columns = adapter.get_columns_in_relation(source_relation) -%} - {%- set target_columns = adapter.get_columns_in_relation(target_relation) -%} - {%- set source_not_in_target = diff_columns(source_columns, target_columns) -%} - {%- set target_not_in_source = diff_columns(target_columns, source_columns) -%} - - {% set new_target_types = diff_column_data_types(source_columns, target_columns) %} - - {% if source_not_in_target != [] %} - {% set schema_changed = True %} - {% elif target_not_in_source != [] or new_target_types != [] %} - {% set schema_changed = True %} - {% elif new_target_types != [] %} - {% set schema_changed = True %} - {% endif %} - - {% set changes_dict = { - 'schema_changed': schema_changed, - 'source_not_in_target': source_not_in_target, - 'target_not_in_source': target_not_in_source, - 'source_columns': source_columns, - 'target_columns': target_columns, - 'new_target_types': new_target_types - } %} - - {% set msg %} - In {{ target_relation }}: - Schema changed: {{ schema_changed }} - Source columns not in target: {{ source_not_in_target }} - Target columns not in source: {{ target_not_in_source }} - New column types: {{ new_target_types }} - {% endset %} - - {% do log(msg) %} - - {{ return(changes_dict) }} - -{% endmacro %} - - -{% macro sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %} - - {%- set add_to_target_arr = schema_changes_dict['source_not_in_target'] -%} - - {%- if on_schema_change == 'append_new_columns'-%} - {%- if add_to_target_arr | length > 0 -%} - {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, none) -%} - {%- endif -%} - - {% elif on_schema_change == 'sync_all_columns' %} - {%- set remove_from_target_arr = schema_changes_dict['target_not_in_source'] -%} - {%- set new_target_types = schema_changes_dict['new_target_types'] -%} - - {% if add_to_target_arr | length > 0 or remove_from_target_arr | length > 0 %} - {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, remove_from_target_arr) -%} - {% endif %} - - {% if new_target_types != [] %} - {% for ntt in new_target_types %} - {% set column_name = ntt['column_name'] %} - {% set new_type = ntt['new_type'] %} - {% do alter_column_type(target_relation, column_name, new_type) %} - {% endfor %} - {% endif %} - - {% endif %} - - {% set schema_change_message %} - In {{ target_relation }}: - Schema change approach: {{ on_schema_change }} - Columns added: {{ add_to_target_arr }} - Columns removed: {{ remove_from_target_arr }} - Data types changed: {{ new_target_types }} - {% endset %} - - {% do log(schema_change_message) %} - -{% endmacro %} - - -{% macro process_schema_changes(on_schema_change, source_relation, target_relation) %} - - {% if on_schema_change == 'ignore' %} - - {{ return({}) }} - - {% else %} - - {% set schema_changes_dict = check_for_schema_changes(source_relation, target_relation) %} - - {% if schema_changes_dict['schema_changed'] %} - - {% if on_schema_change == 'fail' %} - - {% set fail_msg %} - The source and target schemas on this incremental model are out of sync! - They can be reconciled in several ways: - - set the `on_schema_change` config to either append_new_columns or sync_all_columns, depending on your situation. - - Re-run the incremental model with `full_refresh: True` to update the target schema. - - update the schema manually and re-run the process. - - Additional troubleshooting context: - Source columns not in target: {{ schema_changes_dict['source_not_in_target'] }} - Target columns not in source: {{ schema_changes_dict['target_not_in_source'] }} - New column types: {{ schema_changes_dict['new_target_types'] }} - {% endset %} - - {% do exceptions.raise_compiler_error(fail_msg) %} - - {# -- unless we ignore, run the sync operation per the config #} - {% else %} - - {% do sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %} - - {% endif %} - - {% endif %} - - {{ return(schema_changes_dict['source_columns']) }} - - {% endif %} - -{% endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/materializations/models/incremental/strategies.sql b/core/dbt/adapters/include/global_project/macros/materializations/models/incremental/strategies.sql deleted file mode 100644 index 72082ccad32..00000000000 --- a/core/dbt/adapters/include/global_project/macros/materializations/models/incremental/strategies.sql +++ /dev/null @@ -1,79 +0,0 @@ -{% macro get_incremental_append_sql(arg_dict) %} - - {{ return(adapter.dispatch('get_incremental_append_sql', 'dbt')(arg_dict)) }} - -{% endmacro %} - - -{% macro default__get_incremental_append_sql(arg_dict) %} - - {% do return(get_insert_into_sql(arg_dict["target_relation"], arg_dict["temp_relation"], arg_dict["dest_columns"])) %} - -{% endmacro %} - - -{# snowflake #} -{% macro get_incremental_delete_insert_sql(arg_dict) %} - - {{ return(adapter.dispatch('get_incremental_delete_insert_sql', 'dbt')(arg_dict)) }} - -{% endmacro %} - -{% macro default__get_incremental_delete_insert_sql(arg_dict) %} - - {% do return(get_delete_insert_merge_sql(arg_dict["target_relation"], arg_dict["temp_relation"], arg_dict["unique_key"], arg_dict["dest_columns"], arg_dict["incremental_predicates"])) %} - -{% endmacro %} - - -{# snowflake, bigquery, spark #} -{% macro get_incremental_merge_sql(arg_dict) %} - - {{ return(adapter.dispatch('get_incremental_merge_sql', 'dbt')(arg_dict)) }} - -{% endmacro %} - -{% macro default__get_incremental_merge_sql(arg_dict) %} - - {% do return(get_merge_sql(arg_dict["target_relation"], arg_dict["temp_relation"], arg_dict["unique_key"], arg_dict["dest_columns"], arg_dict["incremental_predicates"])) %} - -{% endmacro %} - - -{% macro get_incremental_insert_overwrite_sql(arg_dict) %} - - {{ return(adapter.dispatch('get_incremental_insert_overwrite_sql', 'dbt')(arg_dict)) }} - -{% endmacro %} - -{% macro default__get_incremental_insert_overwrite_sql(arg_dict) %} - - {% do return(get_insert_overwrite_merge_sql(arg_dict["target_relation"], arg_dict["temp_relation"], arg_dict["dest_columns"], arg_dict["incremental_predicates"])) %} - -{% endmacro %} - - -{% macro get_incremental_default_sql(arg_dict) %} - - {{ return(adapter.dispatch('get_incremental_default_sql', 'dbt')(arg_dict)) }} - -{% endmacro %} - -{% macro default__get_incremental_default_sql(arg_dict) %} - - {% do return(get_incremental_append_sql(arg_dict)) %} - -{% endmacro %} - - -{% macro get_insert_into_sql(target_relation, temp_relation, dest_columns) %} - - {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute="name")) -%} - - insert into {{ target_relation }} ({{ dest_cols_csv }}) - ( - select {{ dest_cols_csv }} - from {{ temp_relation }} - ) - -{% endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/materializations/models/materialized_view.sql b/core/dbt/adapters/include/global_project/macros/materializations/models/materialized_view.sql deleted file mode 100644 index 6dc30bf9a9a..00000000000 --- a/core/dbt/adapters/include/global_project/macros/materializations/models/materialized_view.sql +++ /dev/null @@ -1,121 +0,0 @@ -{% materialization materialized_view, default %} - {% set existing_relation = load_cached_relation(this) %} - {% set target_relation = this.incorporate(type=this.MaterializedView) %} - {% set intermediate_relation = make_intermediate_relation(target_relation) %} - {% set backup_relation_type = target_relation.MaterializedView if existing_relation is none else existing_relation.type %} - {% set backup_relation = make_backup_relation(target_relation, backup_relation_type) %} - - {{ materialized_view_setup(backup_relation, intermediate_relation, pre_hooks) }} - - {% set build_sql = materialized_view_get_build_sql(existing_relation, target_relation, backup_relation, intermediate_relation) %} - - {% if build_sql == '' %} - {{ materialized_view_execute_no_op(target_relation) }} - {% else %} - {{ materialized_view_execute_build_sql(build_sql, existing_relation, target_relation, post_hooks) }} - {% endif %} - - {{ materialized_view_teardown(backup_relation, intermediate_relation, post_hooks) }} - - {{ return({'relations': [target_relation]}) }} - -{% endmaterialization %} - - -{% macro materialized_view_setup(backup_relation, intermediate_relation, pre_hooks) %} - - -- backup_relation and intermediate_relation should not already exist in the database - -- it's possible these exist because of a previous run that exited unexpectedly - {% set preexisting_backup_relation = load_cached_relation(backup_relation) %} - {% set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) %} - - -- drop the temp relations if they exist already in the database - {{ drop_relation_if_exists(preexisting_backup_relation) }} - {{ drop_relation_if_exists(preexisting_intermediate_relation) }} - - {{ run_hooks(pre_hooks, inside_transaction=False) }} - -{% endmacro %} - - -{% macro materialized_view_teardown(backup_relation, intermediate_relation, post_hooks) %} - - -- drop the temp relations if they exist to leave the database clean for the next run - {{ drop_relation_if_exists(backup_relation) }} - {{ drop_relation_if_exists(intermediate_relation) }} - - {{ run_hooks(post_hooks, inside_transaction=False) }} - -{% endmacro %} - - -{% macro materialized_view_get_build_sql(existing_relation, target_relation, backup_relation, intermediate_relation) %} - - {% set full_refresh_mode = should_full_refresh() %} - - -- determine the scenario we're in: create, full_refresh, alter, refresh data - {% if existing_relation is none %} - {% set build_sql = get_create_materialized_view_as_sql(target_relation, sql) %} - {% elif full_refresh_mode or not existing_relation.is_materialized_view %} - {% set build_sql = get_replace_sql(existing_relation, target_relation, sql) %} - {% else %} - - -- get config options - {% set on_configuration_change = config.get('on_configuration_change') %} - {% set configuration_changes = get_materialized_view_configuration_changes(existing_relation, config) %} - - {% if configuration_changes is none %} - {% set build_sql = refresh_materialized_view(target_relation) %} - - {% elif on_configuration_change == 'apply' %} - {% set build_sql = get_alter_materialized_view_as_sql(target_relation, configuration_changes, sql, existing_relation, backup_relation, intermediate_relation) %} - {% elif on_configuration_change == 'continue' %} - {% set build_sql = '' %} - {{ exceptions.warn("Configuration changes were identified and `on_configuration_change` was set to `continue` for `" ~ target_relation ~ "`") }} - {% elif on_configuration_change == 'fail' %} - {{ exceptions.raise_fail_fast_error("Configuration changes were identified and `on_configuration_change` was set to `fail` for `" ~ target_relation ~ "`") }} - - {% else %} - -- this only happens if the user provides a value other than `apply`, 'skip', 'fail' - {{ exceptions.raise_compiler_error("Unexpected configuration scenario") }} - - {% endif %} - - {% endif %} - - {% do return(build_sql) %} - -{% endmacro %} - - -{% macro materialized_view_execute_no_op(target_relation) %} - {% do store_raw_result( - name="main", - message="skip " ~ target_relation, - code="skip", - rows_affected="-1" - ) %} -{% endmacro %} - - -{% macro materialized_view_execute_build_sql(build_sql, existing_relation, target_relation, post_hooks) %} - - -- `BEGIN` happens here: - {{ run_hooks(pre_hooks, inside_transaction=True) }} - - {% set grant_config = config.get('grants') %} - - {% call statement(name="main") %} - {{ build_sql }} - {% endcall %} - - {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %} - {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %} - - {% do persist_docs(target_relation, model) %} - - {{ run_hooks(post_hooks, inside_transaction=True) }} - - {{ adapter.commit() }} - -{% endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/materializations/models/table.sql b/core/dbt/adapters/include/global_project/macros/materializations/models/table.sql deleted file mode 100644 index 3d1122efab8..00000000000 --- a/core/dbt/adapters/include/global_project/macros/materializations/models/table.sql +++ /dev/null @@ -1,64 +0,0 @@ -{% materialization table, default %} - - {%- set existing_relation = load_cached_relation(this) -%} - {%- set target_relation = this.incorporate(type='table') %} - {%- set intermediate_relation = make_intermediate_relation(target_relation) -%} - -- the intermediate_relation should not already exist in the database; get_relation - -- will return None in that case. Otherwise, we get a relation that we can drop - -- later, before we try to use this name for the current operation - {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%} - /* - See ../view/view.sql for more information about this relation. - */ - {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%} - {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%} - -- as above, the backup_relation should not already exist - {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%} - -- grab current tables grants config for comparision later on - {% set grant_config = config.get('grants') %} - - -- drop the temp relations if they exist already in the database - {{ drop_relation_if_exists(preexisting_intermediate_relation) }} - {{ drop_relation_if_exists(preexisting_backup_relation) }} - - {{ run_hooks(pre_hooks, inside_transaction=False) }} - - -- `BEGIN` happens here: - {{ run_hooks(pre_hooks, inside_transaction=True) }} - - -- build model - {% call statement('main') -%} - {{ get_create_table_as_sql(False, intermediate_relation, sql) }} - {%- endcall %} - - -- cleanup - {% if existing_relation is not none %} - /* Do the equivalent of rename_if_exists. 'existing_relation' could have been dropped - since the variable was first set. */ - {% set existing_relation = load_cached_relation(existing_relation) %} - {% if existing_relation is not none %} - {{ adapter.rename_relation(existing_relation, backup_relation) }} - {% endif %} - {% endif %} - - {{ adapter.rename_relation(intermediate_relation, target_relation) }} - - {% do create_indexes(target_relation) %} - - {{ run_hooks(post_hooks, inside_transaction=True) }} - - {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %} - {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %} - - {% do persist_docs(target_relation, model) %} - - -- `COMMIT` happens here - {{ adapter.commit() }} - - -- finally, drop the existing/backup relation after the commit - {{ drop_relation_if_exists(backup_relation) }} - - {{ run_hooks(post_hooks, inside_transaction=False) }} - - {{ return({'relations': [target_relation]}) }} -{% endmaterialization %} diff --git a/core/dbt/adapters/include/global_project/macros/materializations/models/view.sql b/core/dbt/adapters/include/global_project/macros/materializations/models/view.sql deleted file mode 100644 index 59ac6c4b976..00000000000 --- a/core/dbt/adapters/include/global_project/macros/materializations/models/view.sql +++ /dev/null @@ -1,72 +0,0 @@ -{%- materialization view, default -%} - - {%- set existing_relation = load_cached_relation(this) -%} - {%- set target_relation = this.incorporate(type='view') -%} - {%- set intermediate_relation = make_intermediate_relation(target_relation) -%} - - -- the intermediate_relation should not already exist in the database; get_relation - -- will return None in that case. Otherwise, we get a relation that we can drop - -- later, before we try to use this name for the current operation - {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%} - /* - This relation (probably) doesn't exist yet. If it does exist, it's a leftover from - a previous run, and we're going to try to drop it immediately. At the end of this - materialization, we're going to rename the "existing_relation" to this identifier, - and then we're going to drop it. In order to make sure we run the correct one of: - - drop view ... - - drop table ... - - We need to set the type of this relation to be the type of the existing_relation, if it exists, - or else "view" as a sane default if it does not. Note that if the existing_relation does not - exist, then there is nothing to move out of the way and subsequentally drop. In that case, - this relation will be effectively unused. - */ - {%- set backup_relation_type = 'view' if existing_relation is none else existing_relation.type -%} - {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%} - -- as above, the backup_relation should not already exist - {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%} - -- grab current tables grants config for comparision later on - {% set grant_config = config.get('grants') %} - - {{ run_hooks(pre_hooks, inside_transaction=False) }} - - -- drop the temp relations if they exist already in the database - {{ drop_relation_if_exists(preexisting_intermediate_relation) }} - {{ drop_relation_if_exists(preexisting_backup_relation) }} - - -- `BEGIN` happens here: - {{ run_hooks(pre_hooks, inside_transaction=True) }} - - -- build model - {% call statement('main') -%} - {{ get_create_view_as_sql(intermediate_relation, sql) }} - {%- endcall %} - - -- cleanup - -- move the existing view out of the way - {% if existing_relation is not none %} - /* Do the equivalent of rename_if_exists. 'existing_relation' could have been dropped - since the variable was first set. */ - {% set existing_relation = load_cached_relation(existing_relation) %} - {% if existing_relation is not none %} - {{ adapter.rename_relation(existing_relation, backup_relation) }} - {% endif %} - {% endif %} - {{ adapter.rename_relation(intermediate_relation, target_relation) }} - - {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %} - {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %} - - {% do persist_docs(target_relation, model) %} - - {{ run_hooks(post_hooks, inside_transaction=True) }} - - {{ adapter.commit() }} - - {{ drop_relation_if_exists(backup_relation) }} - - {{ run_hooks(post_hooks, inside_transaction=False) }} - - {{ return({'relations': [target_relation]}) }} - -{%- endmaterialization -%} diff --git a/core/dbt/adapters/include/global_project/macros/materializations/seeds/helpers.sql b/core/dbt/adapters/include/global_project/macros/materializations/seeds/helpers.sql deleted file mode 100644 index 44dbf370dcb..00000000000 --- a/core/dbt/adapters/include/global_project/macros/materializations/seeds/helpers.sql +++ /dev/null @@ -1,128 +0,0 @@ - -{% macro create_csv_table(model, agate_table) -%} - {{ adapter.dispatch('create_csv_table', 'dbt')(model, agate_table) }} -{%- endmacro %} - -{% macro default__create_csv_table(model, agate_table) %} - {%- set column_override = model['config'].get('column_types', {}) -%} - {%- set quote_seed_column = model['config'].get('quote_columns', None) -%} - - {% set sql %} - create table {{ this.render() }} ( - {%- for col_name in agate_table.column_names -%} - {%- set inferred_type = adapter.convert_type(agate_table, loop.index0) -%} - {%- set type = column_override.get(col_name, inferred_type) -%} - {%- set column_name = (col_name | string) -%} - {{ adapter.quote_seed_column(column_name, quote_seed_column) }} {{ type }} {%- if not loop.last -%}, {%- endif -%} - {%- endfor -%} - ) - {% endset %} - - {% call statement('_') -%} - {{ sql }} - {%- endcall %} - - {{ return(sql) }} -{% endmacro %} - - -{% macro reset_csv_table(model, full_refresh, old_relation, agate_table) -%} - {{ adapter.dispatch('reset_csv_table', 'dbt')(model, full_refresh, old_relation, agate_table) }} -{%- endmacro %} - -{% macro default__reset_csv_table(model, full_refresh, old_relation, agate_table) %} - {% set sql = "" %} - {% if full_refresh %} - {{ adapter.drop_relation(old_relation) }} - {% set sql = create_csv_table(model, agate_table) %} - {% else %} - {{ adapter.truncate_relation(old_relation) }} - {% set sql = "truncate table " ~ old_relation %} - {% endif %} - - {{ return(sql) }} -{% endmacro %} - - -{% macro get_csv_sql(create_or_truncate_sql, insert_sql) %} - {{ adapter.dispatch('get_csv_sql', 'dbt')(create_or_truncate_sql, insert_sql) }} -{% endmacro %} - -{% macro default__get_csv_sql(create_or_truncate_sql, insert_sql) %} - {{ create_or_truncate_sql }}; - -- dbt seed -- - {{ insert_sql }} -{% endmacro %} - - -{% macro get_binding_char() -%} - {{ adapter.dispatch('get_binding_char', 'dbt')() }} -{%- endmacro %} - -{% macro default__get_binding_char() %} - {{ return('%s') }} -{% endmacro %} - - -{% macro get_batch_size() -%} - {{ return(adapter.dispatch('get_batch_size', 'dbt')()) }} -{%- endmacro %} - -{% macro default__get_batch_size() %} - {{ return(10000) }} -{% endmacro %} - - -{% macro get_seed_column_quoted_csv(model, column_names) %} - {%- set quote_seed_column = model['config'].get('quote_columns', None) -%} - {% set quoted = [] %} - {% for col in column_names -%} - {%- do quoted.append(adapter.quote_seed_column(col, quote_seed_column)) -%} - {%- endfor %} - - {%- set dest_cols_csv = quoted | join(', ') -%} - {{ return(dest_cols_csv) }} -{% endmacro %} - - -{% macro load_csv_rows(model, agate_table) -%} - {{ adapter.dispatch('load_csv_rows', 'dbt')(model, agate_table) }} -{%- endmacro %} - -{% macro default__load_csv_rows(model, agate_table) %} - - {% set batch_size = get_batch_size() %} - - {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %} - {% set bindings = [] %} - - {% set statements = [] %} - - {% for chunk in agate_table.rows | batch(batch_size) %} - {% set bindings = [] %} - - {% for row in chunk %} - {% do bindings.extend(row) %} - {% endfor %} - - {% set sql %} - insert into {{ this.render() }} ({{ cols_sql }}) values - {% for row in chunk -%} - ({%- for column in agate_table.column_names -%} - {{ get_binding_char() }} - {%- if not loop.last%},{%- endif %} - {%- endfor -%}) - {%- if not loop.last%},{%- endif %} - {%- endfor %} - {% endset %} - - {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %} - - {% if loop.index0 == 0 %} - {% do statements.append(sql) %} - {% endif %} - {% endfor %} - - {# Return SQL so we can render it out into the compiled files #} - {{ return(statements[0]) }} -{% endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/materializations/seeds/seed.sql b/core/dbt/adapters/include/global_project/macros/materializations/seeds/seed.sql deleted file mode 100644 index 3b66252da96..00000000000 --- a/core/dbt/adapters/include/global_project/macros/materializations/seeds/seed.sql +++ /dev/null @@ -1,60 +0,0 @@ -{% materialization seed, default %} - - {%- set identifier = model['alias'] -%} - {%- set full_refresh_mode = (should_full_refresh()) -%} - - {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%} - - {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%} - {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%} - - {%- set grant_config = config.get('grants') -%} - {%- set agate_table = load_agate_table() -%} - -- grab current tables grants config for comparison later on - - {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%} - - {{ run_hooks(pre_hooks, inside_transaction=False) }} - - -- `BEGIN` happens here: - {{ run_hooks(pre_hooks, inside_transaction=True) }} - - -- build model - {% set create_table_sql = "" %} - {% if exists_as_view %} - {{ exceptions.raise_compiler_error("Cannot seed to '{}', it is a view".format(old_relation)) }} - {% elif exists_as_table %} - {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %} - {% else %} - {% set create_table_sql = create_csv_table(model, agate_table) %} - {% endif %} - - {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %} - {% set rows_affected = (agate_table.rows | length) %} - {% set sql = load_csv_rows(model, agate_table) %} - - {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %} - {{ get_csv_sql(create_table_sql, sql) }}; - {% endcall %} - - {% set target_relation = this.incorporate(type='table') %} - - {% set should_revoke = should_revoke(old_relation, full_refresh_mode) %} - {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %} - - {% do persist_docs(target_relation, model) %} - - {% if full_refresh_mode or not exists_as_table %} - {% do create_indexes(target_relation) %} - {% endif %} - - {{ run_hooks(post_hooks, inside_transaction=True) }} - - -- `COMMIT` happens here - {{ adapter.commit() }} - - {{ run_hooks(post_hooks, inside_transaction=False) }} - - {{ return({'relations': [target_relation]}) }} - -{% endmaterialization %} diff --git a/core/dbt/adapters/include/global_project/macros/materializations/snapshots/helpers.sql b/core/dbt/adapters/include/global_project/macros/materializations/snapshots/helpers.sql deleted file mode 100644 index 7fd4bfd5186..00000000000 --- a/core/dbt/adapters/include/global_project/macros/materializations/snapshots/helpers.sql +++ /dev/null @@ -1,181 +0,0 @@ -{# - Add new columns to the table if applicable -#} -{% macro create_columns(relation, columns) %} - {{ adapter.dispatch('create_columns', 'dbt')(relation, columns) }} -{% endmacro %} - -{% macro default__create_columns(relation, columns) %} - {% for column in columns %} - {% call statement() %} - alter table {{ relation }} add column "{{ column.name }}" {{ column.data_type }}; - {% endcall %} - {% endfor %} -{% endmacro %} - - -{% macro post_snapshot(staging_relation) %} - {{ adapter.dispatch('post_snapshot', 'dbt')(staging_relation) }} -{% endmacro %} - -{% macro default__post_snapshot(staging_relation) %} - {# no-op #} -{% endmacro %} - -{% macro get_true_sql() %} - {{ adapter.dispatch('get_true_sql', 'dbt')() }} -{% endmacro %} - -{% macro default__get_true_sql() %} - {{ return('TRUE') }} -{% endmacro %} - -{% macro snapshot_staging_table(strategy, source_sql, target_relation) -%} - {{ adapter.dispatch('snapshot_staging_table', 'dbt')(strategy, source_sql, target_relation) }} -{% endmacro %} - -{% macro default__snapshot_staging_table(strategy, source_sql, target_relation) -%} - - with snapshot_query as ( - - {{ source_sql }} - - ), - - snapshotted_data as ( - - select *, - {{ strategy.unique_key }} as dbt_unique_key - - from {{ target_relation }} - where dbt_valid_to is null - - ), - - insertions_source_data as ( - - select - *, - {{ strategy.unique_key }} as dbt_unique_key, - {{ strategy.updated_at }} as dbt_updated_at, - {{ strategy.updated_at }} as dbt_valid_from, - nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to, - {{ strategy.scd_id }} as dbt_scd_id - - from snapshot_query - ), - - updates_source_data as ( - - select - *, - {{ strategy.unique_key }} as dbt_unique_key, - {{ strategy.updated_at }} as dbt_updated_at, - {{ strategy.updated_at }} as dbt_valid_from, - {{ strategy.updated_at }} as dbt_valid_to - - from snapshot_query - ), - - {%- if strategy.invalidate_hard_deletes %} - - deletes_source_data as ( - - select - *, - {{ strategy.unique_key }} as dbt_unique_key - from snapshot_query - ), - {% endif %} - - insertions as ( - - select - 'insert' as dbt_change_type, - source_data.* - - from insertions_source_data as source_data - left outer join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key - where snapshotted_data.dbt_unique_key is null - or ( - snapshotted_data.dbt_unique_key is not null - and ( - {{ strategy.row_changed }} - ) - ) - - ), - - updates as ( - - select - 'update' as dbt_change_type, - source_data.*, - snapshotted_data.dbt_scd_id - - from updates_source_data as source_data - join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key - where ( - {{ strategy.row_changed }} - ) - ) - - {%- if strategy.invalidate_hard_deletes -%} - , - - deletes as ( - - select - 'delete' as dbt_change_type, - source_data.*, - {{ snapshot_get_time() }} as dbt_valid_from, - {{ snapshot_get_time() }} as dbt_updated_at, - {{ snapshot_get_time() }} as dbt_valid_to, - snapshotted_data.dbt_scd_id - - from snapshotted_data - left join deletes_source_data as source_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key - where source_data.dbt_unique_key is null - ) - {%- endif %} - - select * from insertions - union all - select * from updates - {%- if strategy.invalidate_hard_deletes %} - union all - select * from deletes - {%- endif %} - -{%- endmacro %} - - -{% macro build_snapshot_table(strategy, sql) -%} - {{ adapter.dispatch('build_snapshot_table', 'dbt')(strategy, sql) }} -{% endmacro %} - -{% macro default__build_snapshot_table(strategy, sql) %} - - select *, - {{ strategy.scd_id }} as dbt_scd_id, - {{ strategy.updated_at }} as dbt_updated_at, - {{ strategy.updated_at }} as dbt_valid_from, - nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to - from ( - {{ sql }} - ) sbq - -{% endmacro %} - - -{% macro build_snapshot_staging_table(strategy, sql, target_relation) %} - {% set temp_relation = make_temp_relation(target_relation) %} - - {% set select = snapshot_staging_table(strategy, sql, target_relation) %} - - {% call statement('build_snapshot_staging_relation') %} - {{ create_table_as(True, temp_relation, select) }} - {% endcall %} - - {% do return(temp_relation) %} -{% endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/materializations/snapshots/snapshot.sql b/core/dbt/adapters/include/global_project/macros/materializations/snapshots/snapshot.sql deleted file mode 100644 index b0fe9222ab6..00000000000 --- a/core/dbt/adapters/include/global_project/macros/materializations/snapshots/snapshot.sql +++ /dev/null @@ -1,99 +0,0 @@ -{% materialization snapshot, default %} - {%- set config = model['config'] -%} - - {%- set target_table = model.get('alias', model.get('name')) -%} - - {%- set strategy_name = config.get('strategy') -%} - {%- set unique_key = config.get('unique_key') %} - -- grab current tables grants config for comparision later on - {%- set grant_config = config.get('grants') -%} - - {% set target_relation_exists, target_relation = get_or_create_relation( - database=model.database, - schema=model.schema, - identifier=target_table, - type='table') -%} - - {%- if not target_relation.is_table -%} - {% do exceptions.relation_wrong_type(target_relation, 'table') %} - {%- endif -%} - - - {{ run_hooks(pre_hooks, inside_transaction=False) }} - - {{ run_hooks(pre_hooks, inside_transaction=True) }} - - {% set strategy_macro = strategy_dispatch(strategy_name) %} - {% set strategy = strategy_macro(model, "snapshotted_data", "source_data", config, target_relation_exists) %} - - {% if not target_relation_exists %} - - {% set build_sql = build_snapshot_table(strategy, model['compiled_code']) %} - {% set final_sql = create_table_as(False, target_relation, build_sql) %} - - {% else %} - - {{ adapter.valid_snapshot_target(target_relation) }} - - {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %} - - -- this may no-op if the database does not require column expansion - {% do adapter.expand_target_column_types(from_relation=staging_table, - to_relation=target_relation) %} - - {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation) - | rejectattr('name', 'equalto', 'dbt_change_type') - | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE') - | rejectattr('name', 'equalto', 'dbt_unique_key') - | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY') - | list %} - - {% do create_columns(target_relation, missing_columns) %} - - {% set source_columns = adapter.get_columns_in_relation(staging_table) - | rejectattr('name', 'equalto', 'dbt_change_type') - | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE') - | rejectattr('name', 'equalto', 'dbt_unique_key') - | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY') - | list %} - - {% set quoted_source_columns = [] %} - {% for column in source_columns %} - {% do quoted_source_columns.append(adapter.quote(column.name)) %} - {% endfor %} - - {% set final_sql = snapshot_merge_sql( - target = target_relation, - source = staging_table, - insert_cols = quoted_source_columns - ) - %} - - {% endif %} - - {% call statement('main') %} - {{ final_sql }} - {% endcall %} - - {% set should_revoke = should_revoke(target_relation_exists, full_refresh_mode=False) %} - {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %} - - {% do persist_docs(target_relation, model) %} - - {% if not target_relation_exists %} - {% do create_indexes(target_relation) %} - {% endif %} - - {{ run_hooks(post_hooks, inside_transaction=True) }} - - {{ adapter.commit() }} - - {% if staging_table is defined %} - {% do post_snapshot(staging_table) %} - {% endif %} - - {{ run_hooks(post_hooks, inside_transaction=False) }} - - {{ return({'relations': [target_relation]}) }} - -{% endmaterialization %} diff --git a/core/dbt/adapters/include/global_project/macros/materializations/snapshots/snapshot_merge.sql b/core/dbt/adapters/include/global_project/macros/materializations/snapshots/snapshot_merge.sql deleted file mode 100644 index 6bc50fd3bf4..00000000000 --- a/core/dbt/adapters/include/global_project/macros/materializations/snapshots/snapshot_merge.sql +++ /dev/null @@ -1,25 +0,0 @@ - -{% macro snapshot_merge_sql(target, source, insert_cols) -%} - {{ adapter.dispatch('snapshot_merge_sql', 'dbt')(target, source, insert_cols) }} -{%- endmacro %} - - -{% macro default__snapshot_merge_sql(target, source, insert_cols) -%} - {%- set insert_cols_csv = insert_cols | join(', ') -%} - - merge into {{ target }} as DBT_INTERNAL_DEST - using {{ source }} as DBT_INTERNAL_SOURCE - on DBT_INTERNAL_SOURCE.dbt_scd_id = DBT_INTERNAL_DEST.dbt_scd_id - - when matched - and DBT_INTERNAL_DEST.dbt_valid_to is null - and DBT_INTERNAL_SOURCE.dbt_change_type in ('update', 'delete') - then update - set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to - - when not matched - and DBT_INTERNAL_SOURCE.dbt_change_type = 'insert' - then insert ({{ insert_cols_csv }}) - values ({{ insert_cols_csv }}) - -{% endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/materializations/snapshots/strategies.sql b/core/dbt/adapters/include/global_project/macros/materializations/snapshots/strategies.sql deleted file mode 100644 index d22cc33636f..00000000000 --- a/core/dbt/adapters/include/global_project/macros/materializations/snapshots/strategies.sql +++ /dev/null @@ -1,174 +0,0 @@ -{# - Dispatch strategies by name, optionally qualified to a package -#} -{% macro strategy_dispatch(name) -%} -{% set original_name = name %} - {% if '.' in name %} - {% set package_name, name = name.split(".", 1) %} - {% else %} - {% set package_name = none %} - {% endif %} - - {% if package_name is none %} - {% set package_context = context %} - {% elif package_name in context %} - {% set package_context = context[package_name] %} - {% else %} - {% set error_msg %} - Could not find package '{{package_name}}', called with '{{original_name}}' - {% endset %} - {{ exceptions.raise_compiler_error(error_msg | trim) }} - {% endif %} - - {%- set search_name = 'snapshot_' ~ name ~ '_strategy' -%} - - {% if search_name not in package_context %} - {% set error_msg %} - The specified strategy macro '{{name}}' was not found in package '{{ package_name }}' - {% endset %} - {{ exceptions.raise_compiler_error(error_msg | trim) }} - {% endif %} - {{ return(package_context[search_name]) }} -{%- endmacro %} - - -{# - Create SCD Hash SQL fields cross-db -#} -{% macro snapshot_hash_arguments(args) -%} - {{ adapter.dispatch('snapshot_hash_arguments', 'dbt')(args) }} -{%- endmacro %} - -{% macro default__snapshot_hash_arguments(args) -%} - md5({%- for arg in args -%} - coalesce(cast({{ arg }} as varchar ), '') - {% if not loop.last %} || '|' || {% endif %} - {%- endfor -%}) -{%- endmacro %} - -{# - Core strategy definitions -#} -{% macro snapshot_timestamp_strategy(node, snapshotted_rel, current_rel, config, target_exists) %} - {% set primary_key = config['unique_key'] %} - {% set updated_at = config['updated_at'] %} - {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %} - - {#/* - The snapshot relation might not have an {{ updated_at }} value if the - snapshot strategy is changed from `check` to `timestamp`. We - should use a dbt-created column for the comparison in the snapshot - table instead of assuming that the user-supplied {{ updated_at }} - will be present in the historical data. - - See https://github.com/dbt-labs/dbt-core/issues/2350 - */ #} - {% set row_changed_expr -%} - ({{ snapshotted_rel }}.dbt_valid_from < {{ current_rel }}.{{ updated_at }}) - {%- endset %} - - {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %} - - {% do return({ - "unique_key": primary_key, - "updated_at": updated_at, - "row_changed": row_changed_expr, - "scd_id": scd_id_expr, - "invalidate_hard_deletes": invalidate_hard_deletes - }) %} -{% endmacro %} - - -{% macro snapshot_string_as_time(timestamp) -%} - {{ adapter.dispatch('snapshot_string_as_time', 'dbt')(timestamp) }} -{%- endmacro %} - -{% macro default__snapshot_string_as_time(timestamp) %} - {% do exceptions.raise_not_implemented( - 'snapshot_string_as_time macro not implemented for adapter '+adapter.type() - ) %} -{% endmacro %} - - -{% macro snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) -%} - {%- if not target_exists -%} - {#-- no table yet -> return whatever the query does --#} - {{ return((false, query_columns)) }} - {%- endif -%} - - {#-- handle any schema changes --#} - {%- set target_relation = adapter.get_relation(database=node.database, schema=node.schema, identifier=node.alias) -%} - - {% if check_cols_config == 'all' %} - {%- set query_columns = get_columns_in_query(node['compiled_code']) -%} - - {% elif check_cols_config is iterable and (check_cols_config | length) > 0 %} - {#-- query for proper casing/quoting, to support comparison below --#} - {%- set select_check_cols_from_target -%} - {#-- N.B. The whitespace below is necessary to avoid edge case issue with comments --#} - {#-- See: https://github.com/dbt-labs/dbt-core/issues/6781 --#} - select {{ check_cols_config | join(', ') }} from ( - {{ node['compiled_code'] }} - ) subq - {%- endset -%} - {% set query_columns = get_columns_in_query(select_check_cols_from_target) %} - - {% else %} - {% do exceptions.raise_compiler_error("Invalid value for 'check_cols': " ~ check_cols_config) %} - {% endif %} - - {%- set existing_cols = adapter.get_columns_in_relation(target_relation) | map(attribute = 'name') | list -%} - {%- set ns = namespace() -%} {#-- handle for-loop scoping with a namespace --#} - {%- set ns.column_added = false -%} - - {%- set intersection = [] -%} - {%- for col in query_columns -%} - {%- if col in existing_cols -%} - {%- do intersection.append(adapter.quote(col)) -%} - {%- else -%} - {% set ns.column_added = true %} - {%- endif -%} - {%- endfor -%} - {{ return((ns.column_added, intersection)) }} -{%- endmacro %} - - -{% macro snapshot_check_strategy(node, snapshotted_rel, current_rel, config, target_exists) %} - {% set check_cols_config = config['check_cols'] %} - {% set primary_key = config['unique_key'] %} - {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %} - {% set updated_at = config.get('updated_at', snapshot_get_time()) %} - - {% set column_added = false %} - - {% set column_added, check_cols = snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) %} - - {%- set row_changed_expr -%} - ( - {%- if column_added -%} - {{ get_true_sql() }} - {%- else -%} - {%- for col in check_cols -%} - {{ snapshotted_rel }}.{{ col }} != {{ current_rel }}.{{ col }} - or - ( - (({{ snapshotted_rel }}.{{ col }} is null) and not ({{ current_rel }}.{{ col }} is null)) - or - ((not {{ snapshotted_rel }}.{{ col }} is null) and ({{ current_rel }}.{{ col }} is null)) - ) - {%- if not loop.last %} or {% endif -%} - {%- endfor -%} - {%- endif -%} - ) - {%- endset %} - - {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %} - - {% do return({ - "unique_key": primary_key, - "updated_at": updated_at, - "row_changed": row_changed_expr, - "scd_id": scd_id_expr, - "invalidate_hard_deletes": invalidate_hard_deletes - }) %} -{% endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/materializations/tests/helpers.sql b/core/dbt/adapters/include/global_project/macros/materializations/tests/helpers.sql deleted file mode 100644 index 579def4f17c..00000000000 --- a/core/dbt/adapters/include/global_project/macros/materializations/tests/helpers.sql +++ /dev/null @@ -1,42 +0,0 @@ -{% macro get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%} - {{ adapter.dispatch('get_test_sql', 'dbt')(main_sql, fail_calc, warn_if, error_if, limit) }} -{%- endmacro %} - -{% macro default__get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%} - select - {{ fail_calc }} as failures, - {{ fail_calc }} {{ warn_if }} as should_warn, - {{ fail_calc }} {{ error_if }} as should_error - from ( - {{ main_sql }} - {{ "limit " ~ limit if limit != none }} - ) dbt_internal_test -{%- endmacro %} - - -{% macro get_unit_test_sql(main_sql, expected_fixture_sql, expected_column_names) -%} - {{ adapter.dispatch('get_unit_test_sql', 'dbt')(main_sql, expected_fixture_sql, expected_column_names) }} -{%- endmacro %} - -{% macro default__get_unit_test_sql(main_sql, expected_fixture_sql, expected_column_names) -%} --- Build actual result given inputs -with dbt_internal_unit_test_actual AS ( - select - {% for expected_column_name in expected_column_names %}{{expected_column_name}}{% if not loop.last -%},{% endif %}{%- endfor -%}, {{ dbt.string_literal("actual") }} as actual_or_expected - from ( - {{ main_sql }} - ) _dbt_internal_unit_test_actual -), --- Build expected result -dbt_internal_unit_test_expected AS ( - select - {% for expected_column_name in expected_column_names %}{{expected_column_name}}{% if not loop.last -%}, {% endif %}{%- endfor -%}, {{ dbt.string_literal("expected") }} as actual_or_expected - from ( - {{ expected_fixture_sql }} - ) _dbt_internal_unit_test_expected -) --- Union actual and expected results -select * from dbt_internal_unit_test_actual -union all -select * from dbt_internal_unit_test_expected -{%- endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/materializations/tests/test.sql b/core/dbt/adapters/include/global_project/macros/materializations/tests/test.sql deleted file mode 100644 index ba205a9b295..00000000000 --- a/core/dbt/adapters/include/global_project/macros/materializations/tests/test.sql +++ /dev/null @@ -1,60 +0,0 @@ -{%- materialization test, default -%} - - {% set relations = [] %} - - {% if should_store_failures() %} - - {% set identifier = model['alias'] %} - {% set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %} - - {% set store_failures_as = config.get('store_failures_as') %} - -- if `--store-failures` is invoked via command line and `store_failures_as` is not set, - -- config.get('store_failures_as', 'table') returns None, not 'table' - {% if store_failures_as == none %}{% set store_failures_as = 'table' %}{% endif %} - {% if store_failures_as not in ['table', 'view'] %} - {{ exceptions.raise_compiler_error( - "'" ~ store_failures_as ~ "' is not a valid value for `store_failures_as`. " - "Accepted values are: ['ephemeral', 'table', 'view']" - ) }} - {% endif %} - - {% set target_relation = api.Relation.create( - identifier=identifier, schema=schema, database=database, type=store_failures_as) -%} %} - - {% if old_relation %} - {% do adapter.drop_relation(old_relation) %} - {% endif %} - - {% call statement(auto_begin=True) %} - {{ get_create_sql(target_relation, sql) }} - {% endcall %} - - {% do relations.append(target_relation) %} - - {% set main_sql %} - select * - from {{ target_relation }} - {% endset %} - - {{ adapter.commit() }} - - {% else %} - - {% set main_sql = sql %} - - {% endif %} - - {% set limit = config.get('limit') %} - {% set fail_calc = config.get('fail_calc') %} - {% set warn_if = config.get('warn_if') %} - {% set error_if = config.get('error_if') %} - - {% call statement('main', fetch_result=True) -%} - - {{ get_test_sql(main_sql, fail_calc, warn_if, error_if, limit)}} - - {%- endcall %} - - {{ return({'relations': relations}) }} - -{%- endmaterialization -%} diff --git a/core/dbt/adapters/include/global_project/macros/materializations/tests/unit.sql b/core/dbt/adapters/include/global_project/macros/materializations/tests/unit.sql deleted file mode 100644 index 79d5631be56..00000000000 --- a/core/dbt/adapters/include/global_project/macros/materializations/tests/unit.sql +++ /dev/null @@ -1,29 +0,0 @@ -{%- materialization unit, default -%} - - {% set relations = [] %} - - {% set expected_rows = config.get('expected_rows') %} - {% set tested_expected_column_names = expected_rows[0].keys() if (expected_rows | length ) > 0 else get_columns_in_query(sql) %} %} - - {%- set target_relation = this.incorporate(type='table') -%} - {%- set temp_relation = make_temp_relation(target_relation)-%} - {% do run_query(get_create_table_as_sql(True, temp_relation, get_empty_subquery_sql(sql))) %} - {%- set columns_in_relation = adapter.get_columns_in_relation(temp_relation) -%} - {%- set column_name_to_data_types = {} -%} - {%- for column in columns_in_relation -%} - {%- do column_name_to_data_types.update({column.name: column.dtype}) -%} - {%- endfor -%} - - {% set unit_test_sql = get_unit_test_sql(sql, get_expected_sql(expected_rows, column_name_to_data_types), tested_expected_column_names) %} - - {% call statement('main', fetch_result=True) -%} - - {{ unit_test_sql }} - - {%- endcall %} - - {% do adapter.drop_relation(temp_relation) %} - - {{ return({'relations': relations}) }} - -{%- endmaterialization -%} diff --git a/core/dbt/adapters/include/global_project/macros/materializations/tests/where_subquery.sql b/core/dbt/adapters/include/global_project/macros/materializations/tests/where_subquery.sql deleted file mode 100644 index 332c537a3c9..00000000000 --- a/core/dbt/adapters/include/global_project/macros/materializations/tests/where_subquery.sql +++ /dev/null @@ -1,15 +0,0 @@ -{% macro get_where_subquery(relation) -%} - {% do return(adapter.dispatch('get_where_subquery', 'dbt')(relation)) %} -{%- endmacro %} - -{% macro default__get_where_subquery(relation) -%} - {% set where = config.get('where', '') %} - {% if where %} - {%- set filtered -%} - (select * from {{ relation }} where {{ where }}) dbt_subquery - {%- endset -%} - {% do return(filtered) %} - {%- else -%} - {% do return(relation) %} - {%- endif -%} -{%- endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/python_model/python.sql b/core/dbt/adapters/include/global_project/macros/python_model/python.sql deleted file mode 100644 index d658ff185b2..00000000000 --- a/core/dbt/adapters/include/global_project/macros/python_model/python.sql +++ /dev/null @@ -1,103 +0,0 @@ -{% macro resolve_model_name(input_model_name) %} - {{ return(adapter.dispatch('resolve_model_name', 'dbt')(input_model_name)) }} -{% endmacro %} - -{%- macro default__resolve_model_name(input_model_name) -%} - {{ input_model_name | string | replace('"', '\"') }} -{%- endmacro -%} - -{% macro build_ref_function(model) %} - - {%- set ref_dict = {} -%} - {%- for _ref in model.refs -%} - {% set _ref_args = [_ref.get('package'), _ref['name']] if _ref.get('package') else [_ref['name'],] %} - {%- set resolved = ref(*_ref_args, v=_ref.get('version')) -%} - {%- if _ref.get('version') -%} - {% do _ref_args.extend(["v" ~ _ref['version']]) %} - {%- endif -%} - {%- do ref_dict.update({_ref_args | join('.'): resolve_model_name(resolved)}) -%} - {%- endfor -%} - -def ref(*args, **kwargs): - refs = {{ ref_dict | tojson }} - key = '.'.join(args) - version = kwargs.get("v") or kwargs.get("version") - if version: - key += f".v{version}" - dbt_load_df_function = kwargs.get("dbt_load_df_function") - return dbt_load_df_function(refs[key]) - -{% endmacro %} - -{% macro build_source_function(model) %} - - {%- set source_dict = {} -%} - {%- for _source in model.sources -%} - {%- set resolved = source(*_source) -%} - {%- do source_dict.update({_source | join('.'): resolve_model_name(resolved)}) -%} - {%- endfor -%} - -def source(*args, dbt_load_df_function): - sources = {{ source_dict | tojson }} - key = '.'.join(args) - return dbt_load_df_function(sources[key]) - -{% endmacro %} - -{% macro build_config_dict(model) %} - {%- set config_dict = {} -%} - {% set config_dbt_used = zip(model.config.config_keys_used, model.config.config_keys_defaults) | list %} - {%- for key, default in config_dbt_used -%} - {# weird type testing with enum, would be much easier to write this logic in Python! #} - {%- if key == "language" -%} - {%- set value = "python" -%} - {%- endif -%} - {%- set value = model.config.get(key, default) -%} - {%- do config_dict.update({key: value}) -%} - {%- endfor -%} -config_dict = {{ config_dict }} -{% endmacro %} - -{% macro py_script_postfix(model) %} -# This part is user provided model code -# you will need to copy the next section to run the code -# COMMAND ---------- -# this part is dbt logic for get ref work, do not modify - -{{ build_ref_function(model ) }} -{{ build_source_function(model ) }} -{{ build_config_dict(model) }} - -class config: - def __init__(self, *args, **kwargs): - pass - - @staticmethod - def get(key, default=None): - return config_dict.get(key, default) - -class this: - """dbt.this() or dbt.this.identifier""" - database = "{{ this.database }}" - schema = "{{ this.schema }}" - identifier = "{{ this.identifier }}" - {% set this_relation_name = resolve_model_name(this) %} - def __repr__(self): - return '{{ this_relation_name }}' - - -class dbtObj: - def __init__(self, load_df_function) -> None: - self.source = lambda *args: source(*args, dbt_load_df_function=load_df_function) - self.ref = lambda *args, **kwargs: ref(*args, **kwargs, dbt_load_df_function=load_df_function) - self.config = config - self.this = this() - self.is_incremental = {{ is_incremental() }} - -# COMMAND ---------- -{{py_script_comment()}} -{% endmacro %} - -{#-- entry point for add instuctions for running compiled_code --#} -{%macro py_script_comment()%} -{%endmacro%} diff --git a/core/dbt/adapters/include/global_project/macros/relations/column/columns_spec_ddl.sql b/core/dbt/adapters/include/global_project/macros/relations/column/columns_spec_ddl.sql deleted file mode 100644 index 7a56d09e189..00000000000 --- a/core/dbt/adapters/include/global_project/macros/relations/column/columns_spec_ddl.sql +++ /dev/null @@ -1,89 +0,0 @@ -{%- macro get_table_columns_and_constraints() -%} - {{ adapter.dispatch('get_table_columns_and_constraints', 'dbt')() }} -{%- endmacro -%} - -{% macro default__get_table_columns_and_constraints() -%} - {{ return(table_columns_and_constraints()) }} -{%- endmacro %} - -{% macro table_columns_and_constraints() %} - {# loop through user_provided_columns to create DDL with data types and constraints #} - {%- set raw_column_constraints = adapter.render_raw_columns_constraints(raw_columns=model['columns']) -%} - {%- set raw_model_constraints = adapter.render_raw_model_constraints(raw_constraints=model['constraints']) -%} - ( - {% for c in raw_column_constraints -%} - {{ c }}{{ "," if not loop.last or raw_model_constraints }} - {% endfor %} - {% for c in raw_model_constraints -%} - {{ c }}{{ "," if not loop.last }} - {% endfor -%} - ) -{% endmacro %} - -{%- macro get_assert_columns_equivalent(sql) -%} - {{ adapter.dispatch('get_assert_columns_equivalent', 'dbt')(sql) }} -{%- endmacro -%} - -{% macro default__get_assert_columns_equivalent(sql) -%} - {{ return(assert_columns_equivalent(sql)) }} -{%- endmacro %} - -{# - Compares the column schema provided by a model's sql file to the column schema provided by a model's schema file. - If any differences in name, data_type or number of columns exist between the two schemas, raises a compiler error -#} -{% macro assert_columns_equivalent(sql) %} - - {#-- First ensure the user has defined 'columns' in yaml specification --#} - {%- set user_defined_columns = model['columns'] -%} - {%- if not user_defined_columns -%} - {{ exceptions.raise_contract_error([], []) }} - {%- endif -%} - - {#-- Obtain the column schema provided by sql file. #} - {%- set sql_file_provided_columns = get_column_schema_from_query(sql, config.get('sql_header', none)) -%} - {#--Obtain the column schema provided by the schema file by generating an 'empty schema' query from the model's columns. #} - {%- set schema_file_provided_columns = get_column_schema_from_query(get_empty_schema_sql(user_defined_columns)) -%} - - {#-- create dictionaries with name and formatted data type and strings for exception #} - {%- set sql_columns = format_columns(sql_file_provided_columns) -%} - {%- set yaml_columns = format_columns(schema_file_provided_columns) -%} - - {%- if sql_columns|length != yaml_columns|length -%} - {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%} - {%- endif -%} - - {%- for sql_col in sql_columns -%} - {%- set yaml_col = [] -%} - {%- for this_col in yaml_columns -%} - {%- if this_col['name'] == sql_col['name'] -%} - {%- do yaml_col.append(this_col) -%} - {%- break -%} - {%- endif -%} - {%- endfor -%} - {%- if not yaml_col -%} - {#-- Column with name not found in yaml #} - {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%} - {%- endif -%} - {%- if sql_col['formatted'] != yaml_col[0]['formatted'] -%} - {#-- Column data types don't match #} - {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%} - {%- endif -%} - {%- endfor -%} - -{% endmacro %} - -{% macro format_columns(columns) %} - {% set formatted_columns = [] %} - {% for column in columns %} - {%- set formatted_column = adapter.dispatch('format_column', 'dbt')(column) -%} - {%- do formatted_columns.append(formatted_column) -%} - {% endfor %} - {{ return(formatted_columns) }} -{% endmacro %} - -{% macro default__format_column(column) -%} - {% set data_type = column.dtype %} - {% set formatted = column.column.lower() ~ " " ~ data_type %} - {{ return({'name': column.name, 'data_type': data_type, 'formatted': formatted}) }} -{%- endmacro -%} diff --git a/core/dbt/adapters/include/global_project/macros/relations/create.sql b/core/dbt/adapters/include/global_project/macros/relations/create.sql deleted file mode 100644 index 3522392d2cb..00000000000 --- a/core/dbt/adapters/include/global_project/macros/relations/create.sql +++ /dev/null @@ -1,23 +0,0 @@ -{%- macro get_create_sql(relation, sql) -%} - {{- log('Applying CREATE to: ' ~ relation) -}} - {{- adapter.dispatch('get_create_sql', 'dbt')(relation, sql) -}} -{%- endmacro -%} - - -{%- macro default__get_create_sql(relation, sql) -%} - - {%- if relation.is_view -%} - {{ get_create_view_as_sql(relation, sql) }} - - {%- elif relation.is_table -%} - {{ get_create_table_as_sql(False, relation, sql) }} - - {%- elif relation.is_materialized_view -%} - {{ get_create_materialized_view_as_sql(relation, sql) }} - - {%- else -%} - {{- exceptions.raise_compiler_error("`get_create_sql` has not been implemented for: " ~ relation.type ) -}} - - {%- endif -%} - -{%- endmacro -%} diff --git a/core/dbt/adapters/include/global_project/macros/relations/create_backup.sql b/core/dbt/adapters/include/global_project/macros/relations/create_backup.sql deleted file mode 100644 index f3fe39a6f32..00000000000 --- a/core/dbt/adapters/include/global_project/macros/relations/create_backup.sql +++ /dev/null @@ -1,17 +0,0 @@ -{%- macro get_create_backup_sql(relation) -%} - {{- log('Applying CREATE BACKUP to: ' ~ relation) -}} - {{- adapter.dispatch('get_create_backup_sql', 'dbt')(relation) -}} -{%- endmacro -%} - - -{%- macro default__get_create_backup_sql(relation) -%} - - -- get the standard backup name - {% set backup_relation = make_backup_relation(relation, relation.type) %} - - -- drop any pre-existing backup - {{ get_drop_sql(backup_relation) }}; - - {{ get_rename_sql(relation, backup_relation.identifier) }} - -{%- endmacro -%} diff --git a/core/dbt/adapters/include/global_project/macros/relations/create_intermediate.sql b/core/dbt/adapters/include/global_project/macros/relations/create_intermediate.sql deleted file mode 100644 index a6bbadc42e1..00000000000 --- a/core/dbt/adapters/include/global_project/macros/relations/create_intermediate.sql +++ /dev/null @@ -1,17 +0,0 @@ -{%- macro get_create_intermediate_sql(relation, sql) -%} - {{- log('Applying CREATE INTERMEDIATE to: ' ~ relation) -}} - {{- adapter.dispatch('get_create_intermediate_sql', 'dbt')(relation, sql) -}} -{%- endmacro -%} - - -{%- macro default__get_create_intermediate_sql(relation, sql) -%} - - -- get the standard intermediate name - {% set intermediate_relation = make_intermediate_relation(relation) %} - - -- drop any pre-existing intermediate - {{ get_drop_sql(intermediate_relation) }}; - - {{ get_create_sql(intermediate_relation, sql) }} - -{%- endmacro -%} diff --git a/core/dbt/adapters/include/global_project/macros/relations/drop.sql b/core/dbt/adapters/include/global_project/macros/relations/drop.sql deleted file mode 100644 index 58abd14d9aa..00000000000 --- a/core/dbt/adapters/include/global_project/macros/relations/drop.sql +++ /dev/null @@ -1,41 +0,0 @@ -{%- macro get_drop_sql(relation) -%} - {{- log('Applying DROP to: ' ~ relation) -}} - {{- adapter.dispatch('get_drop_sql', 'dbt')(relation) -}} -{%- endmacro -%} - - -{%- macro default__get_drop_sql(relation) -%} - - {%- if relation.is_view -%} - {{ drop_view(relation) }} - - {%- elif relation.is_table -%} - {{ drop_table(relation) }} - - {%- elif relation.is_materialized_view -%} - {{ drop_materialized_view(relation) }} - - {%- else -%} - drop {{ relation.type }} if exists {{ relation }} cascade - - {%- endif -%} - -{%- endmacro -%} - - -{% macro drop_relation(relation) -%} - {{ return(adapter.dispatch('drop_relation', 'dbt')(relation)) }} -{% endmacro %} - -{% macro default__drop_relation(relation) -%} - {% call statement('drop_relation', auto_begin=False) -%} - {{ get_drop_sql(relation) }} - {%- endcall %} -{% endmacro %} - - -{% macro drop_relation_if_exists(relation) %} - {% if relation is not none %} - {{ adapter.drop_relation(relation) }} - {% endif %} -{% endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/relations/drop_backup.sql b/core/dbt/adapters/include/global_project/macros/relations/drop_backup.sql deleted file mode 100644 index e70f7981e77..00000000000 --- a/core/dbt/adapters/include/global_project/macros/relations/drop_backup.sql +++ /dev/null @@ -1,14 +0,0 @@ -{%- macro get_drop_backup_sql(relation) -%} - {{- log('Applying DROP BACKUP to: ' ~ relation) -}} - {{- adapter.dispatch('get_drop_backup_sql', 'dbt')(relation) -}} -{%- endmacro -%} - - -{%- macro default__get_drop_backup_sql(relation) -%} - - -- get the standard backup name - {% set backup_relation = make_backup_relation(relation, relation.type) %} - - {{ get_drop_sql(backup_relation) }} - -{%- endmacro -%} diff --git a/core/dbt/adapters/include/global_project/macros/relations/materialized_view/alter.sql b/core/dbt/adapters/include/global_project/macros/relations/materialized_view/alter.sql deleted file mode 100644 index 3952ae88919..00000000000 --- a/core/dbt/adapters/include/global_project/macros/relations/materialized_view/alter.sql +++ /dev/null @@ -1,55 +0,0 @@ -{% macro get_alter_materialized_view_as_sql( - relation, - configuration_changes, - sql, - existing_relation, - backup_relation, - intermediate_relation -) %} - {{- log('Applying ALTER to: ' ~ relation) -}} - {{- adapter.dispatch('get_alter_materialized_view_as_sql', 'dbt')( - relation, - configuration_changes, - sql, - existing_relation, - backup_relation, - intermediate_relation - ) -}} -{% endmacro %} - - -{% macro default__get_alter_materialized_view_as_sql( - relation, - configuration_changes, - sql, - existing_relation, - backup_relation, - intermediate_relation -) %} - {{ exceptions.raise_compiler_error("Materialized views have not been implemented for this adapter.") }} -{% endmacro %} - - -{% macro get_materialized_view_configuration_changes(existing_relation, new_config) %} - /* {# - It's recommended that configuration changes be formatted as follows: - {"": [{"action": "", "context": ...}]} - - For example: - { - "indexes": [ - {"action": "drop", "context": "index_abc"}, - {"action": "create", "context": {"columns": ["column_1", "column_2"], "type": "hash", "unique": True}}, - ], - } - - Either way, `get_materialized_view_configuration_changes` needs to align with `get_alter_materialized_view_as_sql`. - #} */ - {{- log('Determining configuration changes on: ' ~ existing_relation) -}} - {%- do return(adapter.dispatch('get_materialized_view_configuration_changes', 'dbt')(existing_relation, new_config)) -%} -{% endmacro %} - - -{% macro default__get_materialized_view_configuration_changes(existing_relation, new_config) %} - {{ exceptions.raise_compiler_error("Materialized views have not been implemented for this adapter.") }} -{% endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/relations/materialized_view/create.sql b/core/dbt/adapters/include/global_project/macros/relations/materialized_view/create.sql deleted file mode 100644 index a0f2614c9ca..00000000000 --- a/core/dbt/adapters/include/global_project/macros/relations/materialized_view/create.sql +++ /dev/null @@ -1,10 +0,0 @@ -{% macro get_create_materialized_view_as_sql(relation, sql) -%} - {{- adapter.dispatch('get_create_materialized_view_as_sql', 'dbt')(relation, sql) -}} -{%- endmacro %} - - -{% macro default__get_create_materialized_view_as_sql(relation, sql) -%} - {{ exceptions.raise_compiler_error( - "`get_create_materialized_view_as_sql` has not been implemented for this adapter." - ) }} -{% endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/relations/materialized_view/drop.sql b/core/dbt/adapters/include/global_project/macros/relations/materialized_view/drop.sql deleted file mode 100644 index e60e1dc24d0..00000000000 --- a/core/dbt/adapters/include/global_project/macros/relations/materialized_view/drop.sql +++ /dev/null @@ -1,14 +0,0 @@ -{# /* -This was already implemented. Instead of creating a new macro that aligns with the standard, -this was reused and the default was maintained. This gets called by `drop_relation`, which -actually executes the drop, and `get_drop_sql`, which returns the template. -*/ #} - -{% macro drop_materialized_view(relation) -%} - {{ return(adapter.dispatch('drop_materialized_view', 'dbt')(relation)) }} -{%- endmacro %} - - -{% macro default__drop_materialized_view(relation) -%} - drop materialized view if exists {{ relation }} cascade -{%- endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/relations/materialized_view/refresh.sql b/core/dbt/adapters/include/global_project/macros/relations/materialized_view/refresh.sql deleted file mode 100644 index d6b2732107a..00000000000 --- a/core/dbt/adapters/include/global_project/macros/relations/materialized_view/refresh.sql +++ /dev/null @@ -1,9 +0,0 @@ -{% macro refresh_materialized_view(relation) %} - {{- log('Applying REFRESH to: ' ~ relation) -}} - {{- adapter.dispatch('refresh_materialized_view', 'dbt')(relation) -}} -{% endmacro %} - - -{% macro default__refresh_materialized_view(relation) %} - {{ exceptions.raise_compiler_error("`refresh_materialized_view` has not been implemented for this adapter.") }} -{% endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/relations/materialized_view/rename.sql b/core/dbt/adapters/include/global_project/macros/relations/materialized_view/rename.sql deleted file mode 100644 index abd5babf68e..00000000000 --- a/core/dbt/adapters/include/global_project/macros/relations/materialized_view/rename.sql +++ /dev/null @@ -1,10 +0,0 @@ -{% macro get_rename_materialized_view_sql(relation, new_name) %} - {{- adapter.dispatch('get_rename_materialized_view_sql', 'dbt')(relation, new_name) -}} -{% endmacro %} - - -{% macro default__get_rename_materialized_view_sql(relation, new_name) %} - {{ exceptions.raise_compiler_error( - "`get_rename_materialized_view_sql` has not been implemented for this adapter." - ) }} -{% endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/relations/materialized_view/replace.sql b/core/dbt/adapters/include/global_project/macros/relations/materialized_view/replace.sql deleted file mode 100644 index 0660f86b564..00000000000 --- a/core/dbt/adapters/include/global_project/macros/relations/materialized_view/replace.sql +++ /dev/null @@ -1,10 +0,0 @@ -{% macro get_replace_materialized_view_sql(relation, sql) %} - {{- adapter.dispatch('get_replace_materialized_view_sql', 'dbt')(relation, sql) -}} -{% endmacro %} - - -{% macro default__get_replace_materialized_view_sql(relation, sql) %} - {{ exceptions.raise_compiler_error( - "`get_replace_materialized_view_sql` has not been implemented for this adapter." - ) }} -{% endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/relations/rename.sql b/core/dbt/adapters/include/global_project/macros/relations/rename.sql deleted file mode 100644 index d7f3a72e29a..00000000000 --- a/core/dbt/adapters/include/global_project/macros/relations/rename.sql +++ /dev/null @@ -1,35 +0,0 @@ -{%- macro get_rename_sql(relation, new_name) -%} - {{- log('Applying RENAME to: ' ~ relation) -}} - {{- adapter.dispatch('get_rename_sql', 'dbt')(relation, new_name) -}} -{%- endmacro -%} - - -{%- macro default__get_rename_sql(relation, new_name) -%} - - {%- if relation.is_view -%} - {{ get_rename_view_sql(relation, new_name) }} - - {%- elif relation.is_table -%} - {{ get_rename_table_sql(relation, new_name) }} - - {%- elif relation.is_materialized_view -%} - {{ get_rename_materialized_view_sql(relation, new_name) }} - - {%- else -%} - {{- exceptions.raise_compiler_error("`get_rename_sql` has not been implemented for: " ~ relation.type ) -}} - - {%- endif -%} - -{%- endmacro -%} - - -{% macro rename_relation(from_relation, to_relation) -%} - {{ return(adapter.dispatch('rename_relation', 'dbt')(from_relation, to_relation)) }} -{% endmacro %} - -{% macro default__rename_relation(from_relation, to_relation) -%} - {% set target_name = adapter.quote_as_configured(to_relation.identifier, 'identifier') %} - {% call statement('rename_relation') -%} - alter table {{ from_relation }} rename to {{ target_name }} - {%- endcall %} -{% endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/relations/rename_intermediate.sql b/core/dbt/adapters/include/global_project/macros/relations/rename_intermediate.sql deleted file mode 100644 index 5df238b67d7..00000000000 --- a/core/dbt/adapters/include/global_project/macros/relations/rename_intermediate.sql +++ /dev/null @@ -1,14 +0,0 @@ -{%- macro get_rename_intermediate_sql(relation) -%} - {{- log('Applying RENAME INTERMEDIATE to: ' ~ relation) -}} - {{- adapter.dispatch('get_rename_intermediate_sql', 'dbt')(relation) -}} -{%- endmacro -%} - - -{%- macro default__get_rename_intermediate_sql(relation) -%} - - -- get the standard intermediate name - {% set intermediate_relation = make_intermediate_relation(relation) %} - - {{ get_rename_sql(intermediate_relation, relation.identifier) }} - -{%- endmacro -%} diff --git a/core/dbt/adapters/include/global_project/macros/relations/replace.sql b/core/dbt/adapters/include/global_project/macros/relations/replace.sql deleted file mode 100644 index adba77dc769..00000000000 --- a/core/dbt/adapters/include/global_project/macros/relations/replace.sql +++ /dev/null @@ -1,50 +0,0 @@ -{% macro get_replace_sql(existing_relation, target_relation, sql) %} - {{- log('Applying REPLACE to: ' ~ existing_relation) -}} - {{- adapter.dispatch('get_replace_sql', 'dbt')(existing_relation, target_relation, sql) -}} -{% endmacro %} - - -{% macro default__get_replace_sql(existing_relation, target_relation, sql) %} - - {# /* use a create or replace statement if possible */ #} - - {% set is_replaceable = existing_relation.type == target_relation_type and existing_relation.can_be_replaced %} - - {% if is_replaceable and existing_relation.is_view %} - {{ get_replace_view_sql(target_relation, sql) }} - - {% elif is_replaceable and existing_relation.is_table %} - {{ get_replace_table_sql(target_relation, sql) }} - - {% elif is_replaceable and existing_relation.is_materialized_view %} - {{ get_replace_materialized_view_sql(target_relation, sql) }} - - {# /* a create or replace statement is not possible, so try to stage and/or backup to be safe */ #} - - {# /* create target_relation as an intermediate relation, then swap it out with the existing one using a backup */ #} - {%- elif target_relation.can_be_renamed and existing_relation.can_be_renamed -%} - {{ get_create_intermediate_sql(target_relation, sql) }}; - {{ get_create_backup_sql(existing_relation) }}; - {{ get_rename_intermediate_sql(target_relation) }}; - {{ get_drop_backup_sql(existing_relation) }} - - {# /* create target_relation as an intermediate relation, then swap it out with the existing one without using a backup */ #} - {%- elif target_relation.can_be_renamed -%} - {{ get_create_intermediate_sql(target_relation, sql) }}; - {{ get_drop_sql(existing_relation) }}; - {{ get_rename_intermediate_sql(target_relation) }} - - {# /* create target_relation in place by first backing up the existing relation */ #} - {%- elif existing_relation.can_be_renamed -%} - {{ get_create_backup_sql(existing_relation) }}; - {{ get_create_sql(target_relation, sql) }}; - {{ get_drop_backup_sql(existing_relation) }} - - {# /* no renaming is allowed, so just drop and create */ #} - {%- else -%} - {{ get_drop_sql(existing_relation) }}; - {{ get_create_sql(target_relation, sql) }} - - {%- endif -%} - -{% endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/relations/schema.sql b/core/dbt/adapters/include/global_project/macros/relations/schema.sql deleted file mode 100644 index 55aa596ca9d..00000000000 --- a/core/dbt/adapters/include/global_project/macros/relations/schema.sql +++ /dev/null @@ -1,8 +0,0 @@ -{% macro drop_schema_named(schema_name) %} - {{ return(adapter.dispatch('drop_schema_named', 'dbt') (schema_name)) }} -{% endmacro %} - -{% macro default__drop_schema_named(schema_name) %} - {% set schema_relation = api.Relation.create(schema=schema_name) %} - {{ adapter.drop_schema(schema_relation) }} -{% endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/relations/table/create.sql b/core/dbt/adapters/include/global_project/macros/relations/table/create.sql deleted file mode 100644 index 7b50195b177..00000000000 --- a/core/dbt/adapters/include/global_project/macros/relations/table/create.sql +++ /dev/null @@ -1,60 +0,0 @@ -{% macro get_create_table_as_sql(temporary, relation, sql) -%} - {{ adapter.dispatch('get_create_table_as_sql', 'dbt')(temporary, relation, sql) }} -{%- endmacro %} - -{% macro default__get_create_table_as_sql(temporary, relation, sql) -%} - {{ return(create_table_as(temporary, relation, sql)) }} -{% endmacro %} - - -/* {# keep logic under old macro name for backwards compatibility #} */ -{% macro create_table_as(temporary, relation, compiled_code, language='sql') -%} - {# backward compatibility for create_table_as that does not support language #} - {% if language == "sql" %} - {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code)}} - {% else %} - {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code, language) }} - {% endif %} - -{%- endmacro %} - -{% macro default__create_table_as(temporary, relation, sql) -%} - {%- set sql_header = config.get('sql_header', none) -%} - - {{ sql_header if sql_header is not none }} - - create {% if temporary: -%}temporary{%- endif %} table - {{ relation.include(database=(not temporary), schema=(not temporary)) }} - {% set contract_config = config.get('contract') %} - {% if contract_config.enforced and (not temporary) %} - {{ get_assert_columns_equivalent(sql) }} - {{ get_table_columns_and_constraints() }} - {%- set sql = get_select_subquery(sql) %} - {% endif %} - as ( - {{ sql }} - ); -{%- endmacro %} - - -{% macro default__get_column_names() %} - {#- loop through user_provided_columns to get column names -#} - {%- set user_provided_columns = model['columns'] -%} - {%- for i in user_provided_columns %} - {%- set col = user_provided_columns[i] -%} - {%- set col_name = adapter.quote(col['name']) if col.get('quote') else col['name'] -%} - {{ col_name }}{{ ", " if not loop.last }} - {%- endfor -%} -{% endmacro %} - - -{% macro get_select_subquery(sql) %} - {{ return(adapter.dispatch('get_select_subquery', 'dbt')(sql)) }} -{% endmacro %} - -{% macro default__get_select_subquery(sql) %} - select {{ adapter.dispatch('get_column_names', 'dbt')() }} - from ( - {{ sql }} - ) as model_subq -{%- endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/relations/table/drop.sql b/core/dbt/adapters/include/global_project/macros/relations/table/drop.sql deleted file mode 100644 index 359bab66db5..00000000000 --- a/core/dbt/adapters/include/global_project/macros/relations/table/drop.sql +++ /dev/null @@ -1,14 +0,0 @@ -{# /* -This was already implemented. Instead of creating a new macro that aligns with the standard, -this was reused and the default was maintained. This gets called by `drop_relation`, which -actually executes the drop, and `get_drop_sql`, which returns the template. -*/ #} - -{% macro drop_table(relation) -%} - {{ return(adapter.dispatch('drop_table', 'dbt')(relation)) }} -{%- endmacro %} - - -{% macro default__drop_table(relation) -%} - drop table if exists {{ relation }} cascade -{%- endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/relations/table/rename.sql b/core/dbt/adapters/include/global_project/macros/relations/table/rename.sql deleted file mode 100644 index 4688780d68d..00000000000 --- a/core/dbt/adapters/include/global_project/macros/relations/table/rename.sql +++ /dev/null @@ -1,10 +0,0 @@ -{% macro get_rename_table_sql(relation, new_name) %} - {{- adapter.dispatch('get_rename_table_sql', 'dbt')(relation, new_name) -}} -{% endmacro %} - - -{% macro default__get_rename_table_sql(relation, new_name) %} - {{ exceptions.raise_compiler_error( - "`get_rename_table_sql` has not been implemented for this adapter." - ) }} -{% endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/relations/table/replace.sql b/core/dbt/adapters/include/global_project/macros/relations/table/replace.sql deleted file mode 100644 index 69bfa2deeb8..00000000000 --- a/core/dbt/adapters/include/global_project/macros/relations/table/replace.sql +++ /dev/null @@ -1,10 +0,0 @@ -{% macro get_replace_table_sql(relation, sql) %} - {{- adapter.dispatch('get_replace_table_sql', 'dbt')(relation, sql) -}} -{% endmacro %} - - -{% macro default__get_replace_table_sql(relation, sql) %} - {{ exceptions.raise_compiler_error( - "`get_replace_table_sql` has not been implemented for this adapter." - ) }} -{% endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/relations/view/create.sql b/core/dbt/adapters/include/global_project/macros/relations/view/create.sql deleted file mode 100644 index 41cd196c310..00000000000 --- a/core/dbt/adapters/include/global_project/macros/relations/view/create.sql +++ /dev/null @@ -1,27 +0,0 @@ -{% macro get_create_view_as_sql(relation, sql) -%} - {{ adapter.dispatch('get_create_view_as_sql', 'dbt')(relation, sql) }} -{%- endmacro %} - -{% macro default__get_create_view_as_sql(relation, sql) -%} - {{ return(create_view_as(relation, sql)) }} -{% endmacro %} - - -/* {# keep logic under old name for backwards compatibility #} */ -{% macro create_view_as(relation, sql) -%} - {{ adapter.dispatch('create_view_as', 'dbt')(relation, sql) }} -{%- endmacro %} - -{% macro default__create_view_as(relation, sql) -%} - {%- set sql_header = config.get('sql_header', none) -%} - - {{ sql_header if sql_header is not none }} - create view {{ relation }} - {% set contract_config = config.get('contract') %} - {% if contract_config.enforced %} - {{ get_assert_columns_equivalent(sql) }} - {%- endif %} - as ( - {{ sql }} - ); -{%- endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/relations/view/drop.sql b/core/dbt/adapters/include/global_project/macros/relations/view/drop.sql deleted file mode 100644 index c905f8da059..00000000000 --- a/core/dbt/adapters/include/global_project/macros/relations/view/drop.sql +++ /dev/null @@ -1,14 +0,0 @@ -{# /* -This was already implemented. Instead of creating a new macro that aligns with the standard, -this was reused and the default was maintained. This gets called by `drop_relation`, which -actually executes the drop, and `get_drop_sql`, which returns the template. -*/ #} - -{% macro drop_view(relation) -%} - {{ return(adapter.dispatch('drop_view', 'dbt')(relation)) }} -{%- endmacro %} - - -{% macro default__drop_view(relation) -%} - drop view if exists {{ relation }} cascade -{%- endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/relations/view/rename.sql b/core/dbt/adapters/include/global_project/macros/relations/view/rename.sql deleted file mode 100644 index 165dd514e40..00000000000 --- a/core/dbt/adapters/include/global_project/macros/relations/view/rename.sql +++ /dev/null @@ -1,10 +0,0 @@ -{% macro get_rename_view_sql(relation, new_name) %} - {{- adapter.dispatch('get_rename_view_sql', 'dbt')(relation, new_name) -}} -{% endmacro %} - - -{% macro default__get_rename_view_sql(relation, new_name) %} - {{ exceptions.raise_compiler_error( - "`get_rename_view_sql` has not been implemented for this adapter." - ) }} -{% endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/relations/view/replace.sql b/core/dbt/adapters/include/global_project/macros/relations/view/replace.sql deleted file mode 100644 index 1da061347db..00000000000 --- a/core/dbt/adapters/include/global_project/macros/relations/view/replace.sql +++ /dev/null @@ -1,66 +0,0 @@ -{% macro get_replace_view_sql(relation, sql) %} - {{- adapter.dispatch('get_replace_view_sql', 'dbt')(relation, sql) -}} -{% endmacro %} - - -{% macro default__get_replace_view_sql(relation, sql) %} - {{ exceptions.raise_compiler_error( - "`get_replace_view_sql` has not been implemented for this adapter." - ) }} -{% endmacro %} - - -/* {# - Core materialization implementation. BigQuery and Snowflake are similar - because both can use `create or replace view` where the resulting view's columns - are not necessarily the same as those of the existing view. On Redshift, this would - result in: ERROR: cannot change number of columns in view - - This implementation is superior to the create_temp, swap_with_existing, drop_old - paradigm because transactions don't run DDL queries atomically on Snowflake. By using - `create or replace view`, the materialization becomes atomic in nature. -#} */ - -{% macro create_or_replace_view() %} - {%- set identifier = model['alias'] -%} - - {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%} - {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%} - - {%- set target_relation = api.Relation.create( - identifier=identifier, schema=schema, database=database, - type='view') -%} - {% set grant_config = config.get('grants') %} - - {{ run_hooks(pre_hooks) }} - - -- If there's a table with the same name and we weren't told to full refresh, - -- that's an error. If we were told to full refresh, drop it. This behavior differs - -- for Snowflake and BigQuery, so multiple dispatch is used. - {%- if old_relation is not none and old_relation.is_table -%} - {{ handle_existing_table(should_full_refresh(), old_relation) }} - {%- endif -%} - - -- build model - {% call statement('main') -%} - {{ get_create_view_as_sql(target_relation, sql) }} - {%- endcall %} - - {% set should_revoke = should_revoke(exists_as_view, full_refresh_mode=True) %} - {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %} - - {{ run_hooks(post_hooks) }} - - {{ return({'relations': [target_relation]}) }} - -{% endmacro %} - - -{% macro handle_existing_table(full_refresh, old_relation) %} - {{ adapter.dispatch('handle_existing_table', 'dbt')(full_refresh, old_relation) }} -{% endmacro %} - -{% macro default__handle_existing_table(full_refresh, old_relation) %} - {{ log("Dropping relation " ~ old_relation ~ " because it is of type " ~ old_relation.type) }} - {{ adapter.drop_relation(old_relation) }} -{% endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/unit_test_sql/get_fixture_sql.sql b/core/dbt/adapters/include/global_project/macros/unit_test_sql/get_fixture_sql.sql deleted file mode 100644 index 2f90a561d91..00000000000 --- a/core/dbt/adapters/include/global_project/macros/unit_test_sql/get_fixture_sql.sql +++ /dev/null @@ -1,76 +0,0 @@ -{% macro get_fixture_sql(rows, column_name_to_data_types) %} --- Fixture for {{ model.name }} -{% set default_row = {} %} - -{%- if not column_name_to_data_types -%} -{%- set columns_in_relation = adapter.get_columns_in_relation(this) -%} -{%- set column_name_to_data_types = {} -%} -{%- for column in columns_in_relation -%} -{%- do column_name_to_data_types.update({column.name: column.dtype}) -%} -{%- endfor -%} -{%- endif -%} - -{%- if not column_name_to_data_types -%} - {{ exceptions.raise_compiler_error("Not able to get columns for unit test '" ~ model.name ~ "' from relation " ~ this) }} -{%- endif -%} - -{%- for column_name, column_type in column_name_to_data_types.items() -%} - {%- do default_row.update({column_name: (safe_cast("null", column_type) | trim )}) -%} -{%- endfor -%} - -{%- for row in rows -%} -{%- do format_row(row, column_name_to_data_types) -%} -{%- set default_row_copy = default_row.copy() -%} -{%- do default_row_copy.update(row) -%} -select -{%- for column_name, column_value in default_row_copy.items() %} {{ column_value }} AS {{ column_name }}{% if not loop.last -%}, {%- endif %} -{%- endfor %} -{%- if not loop.last %} -union all -{% endif %} -{%- endfor -%} - -{%- if (rows | length) == 0 -%} - select - {%- for column_name, column_value in default_row.items() %} {{ column_value }} AS {{ column_name }}{% if not loop.last -%},{%- endif %} - {%- endfor %} - limit 0 -{%- endif -%} -{% endmacro %} - - -{% macro get_expected_sql(rows, column_name_to_data_types) %} - -{%- if (rows | length) == 0 -%} - select * FROM dbt_internal_unit_test_actual - limit 0 -{%- else -%} -{%- for row in rows -%} -{%- do format_row(row, column_name_to_data_types) -%} -select -{%- for column_name, column_value in row.items() %} {{ column_value }} AS {{ column_name }}{% if not loop.last -%}, {%- endif %} -{%- endfor %} -{%- if not loop.last %} -union all -{% endif %} -{%- endfor -%} -{%- endif -%} - -{% endmacro %} - -{%- macro format_row(row, column_name_to_data_types) -%} - -{#-- wrap yaml strings in quotes, apply cast --#} -{%- for column_name, column_value in row.items() -%} -{% set row_update = {column_name: column_value} %} -{%- if column_value is string -%} -{%- set row_update = {column_name: safe_cast(dbt.string_literal(column_value), column_name_to_data_types[column_name]) } -%} -{%- elif column_value is none -%} -{%- set row_update = {column_name: safe_cast('null', column_name_to_data_types[column_name]) } -%} -{%- else -%} -{%- set row_update = {column_name: safe_cast(column_value, column_name_to_data_types[column_name]) } -%} -{%- endif -%} -{%- do row.update(row_update) -%} -{%- endfor -%} - -{%- endmacro -%} diff --git a/core/dbt/adapters/include/global_project/macros/utils/any_value.sql b/core/dbt/adapters/include/global_project/macros/utils/any_value.sql deleted file mode 100644 index a47292524ae..00000000000 --- a/core/dbt/adapters/include/global_project/macros/utils/any_value.sql +++ /dev/null @@ -1,9 +0,0 @@ -{% macro any_value(expression) -%} - {{ return(adapter.dispatch('any_value', 'dbt') (expression)) }} -{% endmacro %} - -{% macro default__any_value(expression) -%} - - any_value({{ expression }}) - -{%- endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/utils/array_append.sql b/core/dbt/adapters/include/global_project/macros/utils/array_append.sql deleted file mode 100644 index a7d3959b84f..00000000000 --- a/core/dbt/adapters/include/global_project/macros/utils/array_append.sql +++ /dev/null @@ -1,8 +0,0 @@ -{% macro array_append(array, new_element) -%} - {{ return(adapter.dispatch('array_append', 'dbt')(array, new_element)) }} -{%- endmacro %} - -{# new_element must be the same data type as elements in array to match postgres functionality #} -{% macro default__array_append(array, new_element) -%} - array_append({{ array }}, {{ new_element }}) -{%- endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/utils/array_concat.sql b/core/dbt/adapters/include/global_project/macros/utils/array_concat.sql deleted file mode 100644 index b27ba8d7f3a..00000000000 --- a/core/dbt/adapters/include/global_project/macros/utils/array_concat.sql +++ /dev/null @@ -1,7 +0,0 @@ -{% macro array_concat(array_1, array_2) -%} - {{ return(adapter.dispatch('array_concat', 'dbt')(array_1, array_2)) }} -{%- endmacro %} - -{% macro default__array_concat(array_1, array_2) -%} - array_cat({{ array_1 }}, {{ array_2 }}) -{%- endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/utils/array_construct.sql b/core/dbt/adapters/include/global_project/macros/utils/array_construct.sql deleted file mode 100644 index 2e503a37365..00000000000 --- a/core/dbt/adapters/include/global_project/macros/utils/array_construct.sql +++ /dev/null @@ -1,12 +0,0 @@ -{% macro array_construct(inputs=[], data_type=api.Column.translate_type('integer')) -%} - {{ return(adapter.dispatch('array_construct', 'dbt')(inputs, data_type)) }} -{%- endmacro %} - -{# all inputs must be the same data type to match postgres functionality #} -{% macro default__array_construct(inputs, data_type) -%} - {% if inputs|length > 0 %} - array[ {{ inputs|join(' , ') }} ] - {% else %} - array[]::{{data_type}}[] - {% endif %} -{%- endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/utils/bool_or.sql b/core/dbt/adapters/include/global_project/macros/utils/bool_or.sql deleted file mode 100644 index 34e59d999ed..00000000000 --- a/core/dbt/adapters/include/global_project/macros/utils/bool_or.sql +++ /dev/null @@ -1,9 +0,0 @@ -{% macro bool_or(expression) -%} - {{ return(adapter.dispatch('bool_or', 'dbt') (expression)) }} -{% endmacro %} - -{% macro default__bool_or(expression) -%} - - bool_or({{ expression }}) - -{%- endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/utils/cast_bool_to_text.sql b/core/dbt/adapters/include/global_project/macros/utils/cast_bool_to_text.sql deleted file mode 100644 index 5f5c0331623..00000000000 --- a/core/dbt/adapters/include/global_project/macros/utils/cast_bool_to_text.sql +++ /dev/null @@ -1,7 +0,0 @@ -{% macro cast_bool_to_text(field) %} - {{ adapter.dispatch('cast_bool_to_text', 'dbt') (field) }} -{% endmacro %} - -{% macro default__cast_bool_to_text(field) %} - cast({{ field }} as {{ api.Column.translate_type('string') }}) -{% endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/utils/concat.sql b/core/dbt/adapters/include/global_project/macros/utils/concat.sql deleted file mode 100644 index 27bf3c9b4e9..00000000000 --- a/core/dbt/adapters/include/global_project/macros/utils/concat.sql +++ /dev/null @@ -1,7 +0,0 @@ -{% macro concat(fields) -%} - {{ return(adapter.dispatch('concat', 'dbt')(fields)) }} -{%- endmacro %} - -{% macro default__concat(fields) -%} - {{ fields|join(' || ') }} -{%- endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/utils/data_types.sql b/core/dbt/adapters/include/global_project/macros/utils/data_types.sql deleted file mode 100644 index c4508ff3066..00000000000 --- a/core/dbt/adapters/include/global_project/macros/utils/data_types.sql +++ /dev/null @@ -1,129 +0,0 @@ -{# string ------------------------------------------------- #} - -{%- macro type_string() -%} - {{ return(adapter.dispatch('type_string', 'dbt')()) }} -{%- endmacro -%} - -{% macro default__type_string() %} - {{ return(api.Column.translate_type("string")) }} -{% endmacro %} - --- This will return 'text' by default --- On Postgres + Snowflake, that's equivalent to varchar (no size) --- Redshift will treat that as varchar(256) - - -{# timestamp ------------------------------------------------- #} - -{%- macro type_timestamp() -%} - {{ return(adapter.dispatch('type_timestamp', 'dbt')()) }} -{%- endmacro -%} - -{% macro default__type_timestamp() %} - {{ return(api.Column.translate_type("timestamp")) }} -{% endmacro %} - -/* -POSTGRES -https://www.postgresql.org/docs/current/datatype-datetime.html: -The SQL standard requires that writing just `timestamp` -be equivalent to `timestamp without time zone`, and -PostgreSQL honors that behavior. -`timestamptz` is accepted as an abbreviation for `timestamp with time zone`; -this is a PostgreSQL extension. - -SNOWFLAKE -https://docs.snowflake.com/en/sql-reference/data-types-datetime.html#timestamp -The TIMESTAMP_* variation associated with TIMESTAMP is specified by the -TIMESTAMP_TYPE_MAPPING session parameter. The default is TIMESTAMP_NTZ. - -BIGQUERY -TIMESTAMP means 'timestamp with time zone' -DATETIME means 'timestamp without time zone' -TODO: shouldn't this return DATETIME instead of TIMESTAMP, for consistency with other databases? -e.g. dateadd returns a DATETIME - -/* Snowflake: -https://docs.snowflake.com/en/sql-reference/data-types-datetime.html#timestamp -The TIMESTAMP_* variation associated with TIMESTAMP is specified by the TIMESTAMP_TYPE_MAPPING session parameter. The default is TIMESTAMP_NTZ. -*/ - - -{# float ------------------------------------------------- #} - -{%- macro type_float() -%} - {{ return(adapter.dispatch('type_float', 'dbt')()) }} -{%- endmacro -%} - -{% macro default__type_float() %} - {{ return(api.Column.translate_type("float")) }} -{% endmacro %} - -{# numeric ------------------------------------------------- #} - -{%- macro type_numeric() -%} - {{ return(adapter.dispatch('type_numeric', 'dbt')()) }} -{%- endmacro -%} - -/* -This one can't be just translate_type, since precision/scale make it a bit more complicated. - -On most databases, the default (precision, scale) is something like: - Redshift: (18, 0) - Snowflake: (38, 0) - Postgres: (<=131072, 0) - -https://www.postgresql.org/docs/current/datatype-numeric.html: -Specifying NUMERIC without any precision or scale creates an “unconstrained numeric” -column in which numeric values of any length can be stored, up to the implementation limits. -A column of this kind will not coerce input values to any particular scale, -whereas numeric columns with a declared scale will coerce input values to that scale. -(The SQL standard requires a default scale of 0, i.e., coercion to integer precision. -We find this a bit useless. If you're concerned about portability, always specify -the precision and scale explicitly.) -*/ - -{% macro default__type_numeric() %} - {{ return(api.Column.numeric_type("numeric", 28, 6)) }} -{% endmacro %} - - -{# bigint ------------------------------------------------- #} - -{%- macro type_bigint() -%} - {{ return(adapter.dispatch('type_bigint', 'dbt')()) }} -{%- endmacro -%} - --- We don't have a conversion type for 'bigint' in TYPE_LABELS, --- so this actually just returns the string 'bigint' - -{% macro default__type_bigint() %} - {{ return(api.Column.translate_type("bigint")) }} -{% endmacro %} - --- Good news: BigQuery now supports 'bigint' (and 'int') as an alias for 'int64' - -{# int ------------------------------------------------- #} - -{%- macro type_int() -%} - {{ return(adapter.dispatch('type_int', 'dbt')()) }} -{%- endmacro -%} - -{%- macro default__type_int() -%} - {{ return(api.Column.translate_type("integer")) }} -{%- endmacro -%} - --- returns 'int' everywhere, except BigQuery, where it returns 'int64' --- (but BigQuery also now accepts 'int' as a valid alias for 'int64') - -{# bool ------------------------------------------------- #} - -{%- macro type_boolean() -%} - {{ return(adapter.dispatch('type_boolean', 'dbt')()) }} -{%- endmacro -%} - -{%- macro default__type_boolean() -%} - {{ return(api.Column.translate_type("boolean")) }} -{%- endmacro -%} - --- returns 'boolean' everywhere. BigQuery accepts 'boolean' as a valid alias for 'bool' diff --git a/core/dbt/adapters/include/global_project/macros/utils/date_spine.sql b/core/dbt/adapters/include/global_project/macros/utils/date_spine.sql deleted file mode 100644 index 833fbcc575b..00000000000 --- a/core/dbt/adapters/include/global_project/macros/utils/date_spine.sql +++ /dev/null @@ -1,75 +0,0 @@ -{% macro get_intervals_between(start_date, end_date, datepart) -%} - {{ return(adapter.dispatch('get_intervals_between', 'dbt')(start_date, end_date, datepart)) }} -{%- endmacro %} - -{% macro default__get_intervals_between(start_date, end_date, datepart) -%} - {%- call statement('get_intervals_between', fetch_result=True) %} - - select {{ dbt.datediff(start_date, end_date, datepart) }} - - {%- endcall -%} - - {%- set value_list = load_result('get_intervals_between') -%} - - {%- if value_list and value_list['data'] -%} - {%- set values = value_list['data'] | map(attribute=0) | list %} - {{ return(values[0]) }} - {%- else -%} - {{ return(1) }} - {%- endif -%} - -{%- endmacro %} - - - - -{% macro date_spine(datepart, start_date, end_date) %} - {{ return(adapter.dispatch('date_spine', 'dbt')(datepart, start_date, end_date)) }} -{%- endmacro %} - -{% macro default__date_spine(datepart, start_date, end_date) %} - - - {# call as follows: - - date_spine( - "day", - "to_date('01/01/2016', 'mm/dd/yyyy')", - "dbt.dateadd(week, 1, current_date)" - ) #} - - - with rawdata as ( - - {{dbt.generate_series( - dbt.get_intervals_between(start_date, end_date, datepart) - )}} - - ), - - all_periods as ( - - select ( - {{ - dbt.dateadd( - datepart, - "row_number() over (order by 1) - 1", - start_date - ) - }} - ) as date_{{datepart}} - from rawdata - - ), - - filtered as ( - - select * - from all_periods - where date_{{datepart}} <= {{ end_date }} - - ) - - select * from filtered - -{% endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/utils/date_trunc.sql b/core/dbt/adapters/include/global_project/macros/utils/date_trunc.sql deleted file mode 100644 index deadc40864a..00000000000 --- a/core/dbt/adapters/include/global_project/macros/utils/date_trunc.sql +++ /dev/null @@ -1,7 +0,0 @@ -{% macro date_trunc(datepart, date) -%} - {{ return(adapter.dispatch('date_trunc', 'dbt') (datepart, date)) }} -{%- endmacro %} - -{% macro default__date_trunc(datepart, date) -%} - date_trunc('{{datepart}}', {{date}}) -{%- endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/utils/dateadd.sql b/core/dbt/adapters/include/global_project/macros/utils/dateadd.sql deleted file mode 100644 index 2e246098fe5..00000000000 --- a/core/dbt/adapters/include/global_project/macros/utils/dateadd.sql +++ /dev/null @@ -1,14 +0,0 @@ -{% macro dateadd(datepart, interval, from_date_or_timestamp) %} - {{ return(adapter.dispatch('dateadd', 'dbt')(datepart, interval, from_date_or_timestamp)) }} -{% endmacro %} - - -{% macro default__dateadd(datepart, interval, from_date_or_timestamp) %} - - dateadd( - {{ datepart }}, - {{ interval }}, - {{ from_date_or_timestamp }} - ) - -{% endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/utils/datediff.sql b/core/dbt/adapters/include/global_project/macros/utils/datediff.sql deleted file mode 100644 index 7d70d331d86..00000000000 --- a/core/dbt/adapters/include/global_project/macros/utils/datediff.sql +++ /dev/null @@ -1,14 +0,0 @@ -{% macro datediff(first_date, second_date, datepart) %} - {{ return(adapter.dispatch('datediff', 'dbt')(first_date, second_date, datepart)) }} -{% endmacro %} - - -{% macro default__datediff(first_date, second_date, datepart) -%} - - datediff( - {{ datepart }}, - {{ first_date }}, - {{ second_date }} - ) - -{%- endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/utils/escape_single_quotes.sql b/core/dbt/adapters/include/global_project/macros/utils/escape_single_quotes.sql deleted file mode 100644 index d86b6131c29..00000000000 --- a/core/dbt/adapters/include/global_project/macros/utils/escape_single_quotes.sql +++ /dev/null @@ -1,8 +0,0 @@ -{% macro escape_single_quotes(expression) %} - {{ return(adapter.dispatch('escape_single_quotes', 'dbt') (expression)) }} -{% endmacro %} - -{# /*Default to replacing a single apostrophe with two apostrophes: they're -> they''re*/ #} -{% macro default__escape_single_quotes(expression) -%} -{{ expression | replace("'","''") }} -{%- endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/utils/except.sql b/core/dbt/adapters/include/global_project/macros/utils/except.sql deleted file mode 100644 index 91d54013d80..00000000000 --- a/core/dbt/adapters/include/global_project/macros/utils/except.sql +++ /dev/null @@ -1,9 +0,0 @@ -{% macro except() %} - {{ return(adapter.dispatch('except', 'dbt')()) }} -{% endmacro %} - -{% macro default__except() %} - - except - -{% endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/utils/generate_series.sql b/core/dbt/adapters/include/global_project/macros/utils/generate_series.sql deleted file mode 100644 index f6a09605af3..00000000000 --- a/core/dbt/adapters/include/global_project/macros/utils/generate_series.sql +++ /dev/null @@ -1,53 +0,0 @@ -{% macro get_powers_of_two(upper_bound) %} - {{ return(adapter.dispatch('get_powers_of_two', 'dbt')(upper_bound)) }} -{% endmacro %} - -{% macro default__get_powers_of_two(upper_bound) %} - - {% if upper_bound <= 0 %} - {{ exceptions.raise_compiler_error("upper bound must be positive") }} - {% endif %} - - {% for _ in range(1, 100) %} - {% if upper_bound <= 2 ** loop.index %}{{ return(loop.index) }}{% endif %} - {% endfor %} - -{% endmacro %} - - -{% macro generate_series(upper_bound) %} - {{ return(adapter.dispatch('generate_series', 'dbt')(upper_bound)) }} -{% endmacro %} - -{% macro default__generate_series(upper_bound) %} - - {% set n = dbt.get_powers_of_two(upper_bound) %} - - with p as ( - select 0 as generated_number union all select 1 - ), unioned as ( - - select - - {% for i in range(n) %} - p{{i}}.generated_number * power(2, {{i}}) - {% if not loop.last %} + {% endif %} - {% endfor %} - + 1 - as generated_number - - from - - {% for i in range(n) %} - p as p{{i}} - {% if not loop.last %} cross join {% endif %} - {% endfor %} - - ) - - select * - from unioned - where generated_number <= {{upper_bound}} - order by generated_number - -{% endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/utils/hash.sql b/core/dbt/adapters/include/global_project/macros/utils/hash.sql deleted file mode 100644 index efa12db7ce6..00000000000 --- a/core/dbt/adapters/include/global_project/macros/utils/hash.sql +++ /dev/null @@ -1,7 +0,0 @@ -{% macro hash(field) -%} - {{ return(adapter.dispatch('hash', 'dbt') (field)) }} -{%- endmacro %} - -{% macro default__hash(field) -%} - md5(cast({{ field }} as {{ api.Column.translate_type('string') }})) -{%- endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/utils/intersect.sql b/core/dbt/adapters/include/global_project/macros/utils/intersect.sql deleted file mode 100644 index 6e8ede00bc2..00000000000 --- a/core/dbt/adapters/include/global_project/macros/utils/intersect.sql +++ /dev/null @@ -1,9 +0,0 @@ -{% macro intersect() %} - {{ return(adapter.dispatch('intersect', 'dbt')()) }} -{% endmacro %} - -{% macro default__intersect() %} - - intersect - -{% endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/utils/last_day.sql b/core/dbt/adapters/include/global_project/macros/utils/last_day.sql deleted file mode 100644 index 6a1aa99c045..00000000000 --- a/core/dbt/adapters/include/global_project/macros/utils/last_day.sql +++ /dev/null @@ -1,15 +0,0 @@ -{% macro last_day(date, datepart) %} - {{ return(adapter.dispatch('last_day', 'dbt') (date, datepart)) }} -{% endmacro %} - -{%- macro default_last_day(date, datepart) -%} - cast( - {{dbt.dateadd('day', '-1', - dbt.dateadd(datepart, '1', dbt.date_trunc(datepart, date)) - )}} - as date) -{%- endmacro -%} - -{% macro default__last_day(date, datepart) -%} - {{dbt.default_last_day(date, datepart)}} -{%- endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/utils/length.sql b/core/dbt/adapters/include/global_project/macros/utils/length.sql deleted file mode 100644 index 1b2fd55e725..00000000000 --- a/core/dbt/adapters/include/global_project/macros/utils/length.sql +++ /dev/null @@ -1,11 +0,0 @@ -{% macro length(expression) -%} - {{ return(adapter.dispatch('length', 'dbt') (expression)) }} -{% endmacro %} - -{% macro default__length(expression) %} - - length( - {{ expression }} - ) - -{%- endmacro -%} diff --git a/core/dbt/adapters/include/global_project/macros/utils/listagg.sql b/core/dbt/adapters/include/global_project/macros/utils/listagg.sql deleted file mode 100644 index f785ca1e682..00000000000 --- a/core/dbt/adapters/include/global_project/macros/utils/listagg.sql +++ /dev/null @@ -1,30 +0,0 @@ -{% macro listagg(measure, delimiter_text="','", order_by_clause=none, limit_num=none) -%} - {{ return(adapter.dispatch('listagg', 'dbt') (measure, delimiter_text, order_by_clause, limit_num)) }} -{%- endmacro %} - -{% macro default__listagg(measure, delimiter_text, order_by_clause, limit_num) -%} - - {% if limit_num -%} - array_to_string( - array_slice( - array_agg( - {{ measure }} - ){% if order_by_clause -%} - within group ({{ order_by_clause }}) - {%- endif %} - ,0 - ,{{ limit_num }} - ), - {{ delimiter_text }} - ) - {%- else %} - listagg( - {{ measure }}, - {{ delimiter_text }} - ) - {% if order_by_clause -%} - within group ({{ order_by_clause }}) - {%- endif %} - {%- endif %} - -{%- endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/utils/literal.sql b/core/dbt/adapters/include/global_project/macros/utils/literal.sql deleted file mode 100644 index ccb0b823513..00000000000 --- a/core/dbt/adapters/include/global_project/macros/utils/literal.sql +++ /dev/null @@ -1,7 +0,0 @@ -{%- macro string_literal(value) -%} - {{ return(adapter.dispatch('string_literal', 'dbt') (value)) }} -{%- endmacro -%} - -{% macro default__string_literal(value) -%} - '{{ value }}' -{%- endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/utils/position.sql b/core/dbt/adapters/include/global_project/macros/utils/position.sql deleted file mode 100644 index dde3ee2bee0..00000000000 --- a/core/dbt/adapters/include/global_project/macros/utils/position.sql +++ /dev/null @@ -1,11 +0,0 @@ -{% macro position(substring_text, string_text) -%} - {{ return(adapter.dispatch('position', 'dbt') (substring_text, string_text)) }} -{% endmacro %} - -{% macro default__position(substring_text, string_text) %} - - position( - {{ substring_text }} in {{ string_text }} - ) - -{%- endmacro -%} diff --git a/core/dbt/adapters/include/global_project/macros/utils/replace.sql b/core/dbt/adapters/include/global_project/macros/utils/replace.sql deleted file mode 100644 index 478809f2c54..00000000000 --- a/core/dbt/adapters/include/global_project/macros/utils/replace.sql +++ /dev/null @@ -1,14 +0,0 @@ -{% macro replace(field, old_chars, new_chars) -%} - {{ return(adapter.dispatch('replace', 'dbt') (field, old_chars, new_chars)) }} -{% endmacro %} - -{% macro default__replace(field, old_chars, new_chars) %} - - replace( - {{ field }}, - {{ old_chars }}, - {{ new_chars }} - ) - - -{% endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/utils/right.sql b/core/dbt/adapters/include/global_project/macros/utils/right.sql deleted file mode 100644 index 5782a2539df..00000000000 --- a/core/dbt/adapters/include/global_project/macros/utils/right.sql +++ /dev/null @@ -1,12 +0,0 @@ -{% macro right(string_text, length_expression) -%} - {{ return(adapter.dispatch('right', 'dbt') (string_text, length_expression)) }} -{% endmacro %} - -{% macro default__right(string_text, length_expression) %} - - right( - {{ string_text }}, - {{ length_expression }} - ) - -{%- endmacro -%} diff --git a/core/dbt/adapters/include/global_project/macros/utils/safe_cast.sql b/core/dbt/adapters/include/global_project/macros/utils/safe_cast.sql deleted file mode 100644 index 53ee7fd2e97..00000000000 --- a/core/dbt/adapters/include/global_project/macros/utils/safe_cast.sql +++ /dev/null @@ -1,9 +0,0 @@ -{% macro safe_cast(field, type) %} - {{ return(adapter.dispatch('safe_cast', 'dbt') (field, type)) }} -{% endmacro %} - -{% macro default__safe_cast(field, type) %} - {# most databases don't support this function yet - so we just need to use cast #} - cast({{field}} as {{type}}) -{% endmacro %} diff --git a/core/dbt/adapters/include/global_project/macros/utils/split_part.sql b/core/dbt/adapters/include/global_project/macros/utils/split_part.sql deleted file mode 100644 index 766539ac0c1..00000000000 --- a/core/dbt/adapters/include/global_project/macros/utils/split_part.sql +++ /dev/null @@ -1,26 +0,0 @@ -{% macro split_part(string_text, delimiter_text, part_number) %} - {{ return(adapter.dispatch('split_part', 'dbt') (string_text, delimiter_text, part_number)) }} -{% endmacro %} - -{% macro default__split_part(string_text, delimiter_text, part_number) %} - - split_part( - {{ string_text }}, - {{ delimiter_text }}, - {{ part_number }} - ) - -{% endmacro %} - -{% macro _split_part_negative(string_text, delimiter_text, part_number) %} - - split_part( - {{ string_text }}, - {{ delimiter_text }}, - length({{ string_text }}) - - length( - replace({{ string_text }}, {{ delimiter_text }}, '') - ) + 2 + {{ part_number }} - ) - -{% endmacro %} diff --git a/core/dbt/adapters/include/global_project/tests/generic/builtin.sql b/core/dbt/adapters/include/global_project/tests/generic/builtin.sql deleted file mode 100644 index 23a7507fa0a..00000000000 --- a/core/dbt/adapters/include/global_project/tests/generic/builtin.sql +++ /dev/null @@ -1,30 +0,0 @@ -/* {# - Generic tests can be defined in `macros/` or in `tests/generic`. - These four tests are built into the dbt-core global project. - To support extensibility to other adapters and SQL dialects, - they call 'dispatched' macros. By default, they will use - the SQL defined in `global_project/macros/generic_test_sql` -#} */ - -{% test unique(model, column_name) %} - {% set macro = adapter.dispatch('test_unique', 'dbt') %} - {{ macro(model, column_name) }} -{% endtest %} - - -{% test not_null(model, column_name) %} - {% set macro = adapter.dispatch('test_not_null', 'dbt') %} - {{ macro(model, column_name) }} -{% endtest %} - - -{% test accepted_values(model, column_name, values, quote=True) %} - {% set macro = adapter.dispatch('test_accepted_values', 'dbt') %} - {{ macro(model, column_name, values, quote) }} -{% endtest %} - - -{% test relationships(model, column_name, to, field) %} - {% set macro = adapter.dispatch('test_relationships', 'dbt') %} - {{ macro(model, column_name, to, field) }} -{% endtest %} diff --git a/core/dbt/adapters/protocol.py b/core/dbt/adapters/protocol.py deleted file mode 100644 index f2de7de50ac..00000000000 --- a/core/dbt/adapters/protocol.py +++ /dev/null @@ -1,166 +0,0 @@ -from dataclasses import dataclass -from typing import ( - Type, - Hashable, - Optional, - ContextManager, - List, - Generic, - TypeVar, - Tuple, - Any, - Dict, -) -from typing_extensions import Protocol - -import agate - -from dbt.adapters.contracts.connection import Connection, AdapterRequiredConfig, AdapterResponse -from dbt.adapters.contracts.macros import MacroResolverProtocol -from dbt.adapters.contracts.relation import Policy, HasQuoting, RelationConfig -from dbt_common.contracts.config.base import BaseConfig -from dbt_common.clients.jinja import MacroProtocol - - -@dataclass -class AdapterConfig(BaseConfig): - pass - - -class ConnectionManagerProtocol(Protocol): - TYPE: str - - -class ColumnProtocol(Protocol): - pass - - -Self = TypeVar("Self", bound="RelationProtocol") - - -class RelationProtocol(Protocol): - @classmethod - def get_default_quote_policy(cls) -> Policy: - ... - - @classmethod - def create_from( - cls: Type[Self], quoting: HasQuoting, relation_config: RelationConfig, **kwargs: Any - ) -> Self: - ... - - -AdapterConfig_T = TypeVar("AdapterConfig_T", bound=AdapterConfig) -ConnectionManager_T = TypeVar("ConnectionManager_T", bound=ConnectionManagerProtocol) -Relation_T = TypeVar("Relation_T", bound=RelationProtocol) -Column_T = TypeVar("Column_T", bound=ColumnProtocol) - - -class MacroContextGeneratorCallable(Protocol): - def __call__( - self, - macro_protocol: MacroProtocol, - config: AdapterRequiredConfig, - macro_resolver: MacroResolverProtocol, - package_name: Optional[str], - ) -> Dict[str, Any]: - ... - - -# TODO CT-211 -class AdapterProtocol( # type: ignore[misc] - Protocol, - Generic[ - AdapterConfig_T, - ConnectionManager_T, - Relation_T, - Column_T, - ], -): - # N.B. Technically these are ClassVars, but mypy doesn't support putting type vars in a - # ClassVar due to the restrictiveness of PEP-526 - # See: https://github.com/python/mypy/issues/5144 - AdapterSpecificConfigs: Type[AdapterConfig_T] - Column: Type[Column_T] - Relation: Type[Relation_T] - ConnectionManager: Type[ConnectionManager_T] - connections: ConnectionManager_T - - def __init__(self, config: AdapterRequiredConfig) -> None: - ... - - def set_macro_resolver(self, macro_resolver: MacroResolverProtocol) -> None: - ... - - def get_macro_resolver(self) -> Optional[MacroResolverProtocol]: - ... - - def clear_macro_resolver(self) -> None: - ... - - def set_macro_context_generator( - self, - macro_context_generator: MacroContextGeneratorCallable, - ) -> None: - ... - - @classmethod - def type(cls) -> str: - pass - - def set_query_header(self, query_header_context: Dict[str, Any]) -> None: - ... - - @staticmethod - def get_thread_identifier() -> Hashable: - ... - - def get_thread_connection(self) -> Connection: - ... - - def set_thread_connection(self, conn: Connection) -> None: - ... - - def get_if_exists(self) -> Optional[Connection]: - ... - - def clear_thread_connection(self) -> None: - ... - - def clear_transaction(self) -> None: - ... - - def exception_handler(self, sql: str) -> ContextManager: - ... - - def set_connection_name(self, name: Optional[str] = None) -> Connection: - ... - - def cancel_open(self) -> Optional[List[str]]: - ... - - def open(cls, connection: Connection) -> Connection: - ... - - def release(self) -> None: - ... - - def cleanup_all(self) -> None: - ... - - def begin(self) -> None: - ... - - def commit(self) -> None: - ... - - def close(cls, connection: Connection) -> Connection: - ... - - def commit_if_has_connection(self) -> None: - ... - - def execute( - self, sql: str, auto_begin: bool = False, fetch: bool = False - ) -> Tuple[AdapterResponse, agate.Table]: - ... diff --git a/core/dbt/adapters/reference_keys.py b/core/dbt/adapters/reference_keys.py deleted file mode 100644 index 53a0a9d9819..00000000000 --- a/core/dbt/adapters/reference_keys.py +++ /dev/null @@ -1,37 +0,0 @@ -# this module exists to resolve circular imports with the events module - -from collections import namedtuple -from typing import Any, Optional - - -_ReferenceKey = namedtuple("_ReferenceKey", "database schema identifier") - - -def lowercase(value: Optional[str]) -> Optional[str]: - if value is None: - return None - else: - return value.lower() - - -# For backwards compatibility. New code should use _make_ref_key -def _make_key(relation: Any) -> _ReferenceKey: - return _make_ref_key(relation) - - -def _make_ref_key(relation: Any) -> _ReferenceKey: - """Make _ReferenceKeys with lowercase values for the cache so we don't have - to keep track of quoting - """ - # databases and schemas can both be None - return _ReferenceKey( - lowercase(relation.database), lowercase(relation.schema), lowercase(relation.identifier) - ) - - -def _make_ref_key_dict(relation: Any): - return { - "database": relation.database, - "schema": relation.schema, - "identifier": relation.identifier, - } diff --git a/core/dbt/adapters/relation_configs/README.md b/core/dbt/adapters/relation_configs/README.md deleted file mode 100644 index 6be3bc59d12..00000000000 --- a/core/dbt/adapters/relation_configs/README.md +++ /dev/null @@ -1,25 +0,0 @@ -# RelationConfig -This package serves as an initial abstraction for managing the inspection of existing relations and determining -changes on those relations. It arose from the materialized view work and is currently only supporting -materialized views for Postgres and Redshift as well as dynamic tables for Snowflake. There are three main -classes in this package. - -## RelationConfigBase -This is a very small class that only has a `from_dict()` method and a default `NotImplementedError()`. At some -point this could be replaced by a more robust framework, like `mashumaro` or `pydantic`. - -## RelationConfigChange -This class inherits from `RelationConfigBase` ; however, this can be thought of as a separate class. The subclassing -merely points to the idea that both classes would likely inherit from the same class in a `mashumaro` or -`pydantic` implementation. This class is much more restricted in attribution. It should really only -ever need an `action` and a `context`. This can be though of as being analogous to a web request. You need to -know what you're doing (`action`: 'create' = GET, 'drop' = DELETE, etc.) and the information (`context`) needed -to make the change. In our scenarios, the context tends to be an instance of `RelationConfigBase` corresponding -to the new state. - -## RelationConfigValidationMixin -This mixin provides optional validation mechanics that can be applied to either `RelationConfigBase` or -`RelationConfigChange` subclasses. A validation rule is a combination of a `validation_check`, something -that should evaluate to `True`, and an optional `validation_error`, an instance of `DbtRuntimeError` -that should be raised in the event the `validation_check` fails. While optional, it's recommended that -the `validation_error` be provided for clearer transparency to the end user. diff --git a/core/dbt/adapters/relation_configs/__init__.py b/core/dbt/adapters/relation_configs/__init__.py deleted file mode 100644 index b8c73447a68..00000000000 --- a/core/dbt/adapters/relation_configs/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -from dbt.adapters.relation_configs.config_base import ( # noqa: F401 - RelationConfigBase, - RelationResults, -) -from dbt.adapters.relation_configs.config_change import ( # noqa: F401 - RelationConfigChangeAction, - RelationConfigChange, -) -from dbt.adapters.relation_configs.config_validation import ( # noqa: F401 - RelationConfigValidationMixin, - RelationConfigValidationRule, -) diff --git a/core/dbt/adapters/relation_configs/config_base.py b/core/dbt/adapters/relation_configs/config_base.py deleted file mode 100644 index 57b791939bf..00000000000 --- a/core/dbt/adapters/relation_configs/config_base.py +++ /dev/null @@ -1,44 +0,0 @@ -from dataclasses import dataclass -from typing import Union, Dict - -import agate -from dbt_common.utils import filter_null_values - - -""" -This is what relation metadata from the database looks like. It's a dictionary because there will be -multiple grains of data for a single object. For example, a materialized view in Postgres has base level information, -like name. But it also can have multiple indexes, which needs to be a separate query. It might look like this: - -{ - "base": agate.Row({"table_name": "table_abc", "query": "select * from table_def"}) - "indexes": agate.Table("rows": [ - agate.Row({"name": "index_a", "columns": ["column_a"], "type": "hash", "unique": False}), - agate.Row({"name": "index_b", "columns": ["time_dim_a"], "type": "btree", "unique": False}), - ]) -} -""" -RelationResults = Dict[str, Union[agate.Row, agate.Table]] - - -@dataclass(frozen=True) -class RelationConfigBase: - @classmethod - def from_dict(cls, kwargs_dict) -> "RelationConfigBase": - """ - This assumes the subclass of `RelationConfigBase` is flat, in the sense that no attribute is - itself another subclass of `RelationConfigBase`. If that's not the case, this should be overriden - to manually manage that complexity. - - Args: - kwargs_dict: the dict representation of this instance - - Returns: the `RelationConfigBase` representation associated with the provided dict - """ - return cls(**filter_null_values(kwargs_dict)) # type: ignore - - @classmethod - def _not_implemented_error(cls) -> NotImplementedError: - return NotImplementedError( - "This relation type has not been fully configured for this adapter." - ) diff --git a/core/dbt/adapters/relation_configs/config_change.py b/core/dbt/adapters/relation_configs/config_change.py deleted file mode 100644 index 94c0b6eb598..00000000000 --- a/core/dbt/adapters/relation_configs/config_change.py +++ /dev/null @@ -1,23 +0,0 @@ -from abc import ABC, abstractmethod -from dataclasses import dataclass -from typing import Hashable - -from dbt.adapters.relation_configs.config_base import RelationConfigBase -from dbt_common.dataclass_schema import StrEnum - - -class RelationConfigChangeAction(StrEnum): - alter = "alter" - create = "create" - drop = "drop" - - -@dataclass(frozen=True, eq=True, unsafe_hash=True) # type: ignore -class RelationConfigChange(RelationConfigBase, ABC): - action: RelationConfigChangeAction - context: Hashable # this is usually a RelationConfig, e.g. IndexConfig, but shouldn't be limited - - @property - @abstractmethod - def requires_full_refresh(self) -> bool: - raise self._not_implemented_error() diff --git a/core/dbt/adapters/relation_configs/config_validation.py b/core/dbt/adapters/relation_configs/config_validation.py deleted file mode 100644 index 9442a60a091..00000000000 --- a/core/dbt/adapters/relation_configs/config_validation.py +++ /dev/null @@ -1,57 +0,0 @@ -from dataclasses import dataclass -from typing import Set, Optional - -from dbt_common.exceptions import DbtRuntimeError - - -@dataclass(frozen=True, eq=True, unsafe_hash=True) -class RelationConfigValidationRule: - validation_check: bool - validation_error: Optional[DbtRuntimeError] - - @property - def default_error(self): - return DbtRuntimeError( - "There was a validation error in preparing this relation config." - "No additional context was provided by this adapter." - ) - - -@dataclass(frozen=True) -class RelationConfigValidationMixin: - def __post_init__(self): - self.run_validation_rules() - - @property - def validation_rules(self) -> Set[RelationConfigValidationRule]: - """ - A set of validation rules to run against the object upon creation. - - A validation rule is a combination of a validation check (bool) and an optional error message. - - This defaults to no validation rules if not implemented. It's recommended to override this with values, - but that may not always be necessary. - - Returns: a set of validation rules - """ - return set() - - def run_validation_rules(self): - for validation_rule in self.validation_rules: - try: - assert validation_rule.validation_check - except AssertionError: - if validation_rule.validation_error: - raise validation_rule.validation_error - else: - raise validation_rule.default_error - self.run_child_validation_rules() - - def run_child_validation_rules(self): - for attr_value in vars(self).values(): - if hasattr(attr_value, "validation_rules"): - attr_value.run_validation_rules() - if isinstance(attr_value, set): - for member in attr_value: - if hasattr(member, "validation_rules"): - member.run_validation_rules() diff --git a/core/dbt/adapters/sql/__init__.py b/core/dbt/adapters/sql/__init__.py deleted file mode 100644 index 3535806364d..00000000000 --- a/core/dbt/adapters/sql/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# these are all just exports, #noqa them so flake8 will be happy -from dbt.adapters.sql.connections import SQLConnectionManager # noqa -from dbt.adapters.sql.impl import SQLAdapter # noqa diff --git a/core/dbt/adapters/sql/connections.py b/core/dbt/adapters/sql/connections.py deleted file mode 100644 index 324a84ff020..00000000000 --- a/core/dbt/adapters/sql/connections.py +++ /dev/null @@ -1,184 +0,0 @@ -import abc -import time -from typing import List, Optional, Tuple, Any, Iterable, Dict - -import agate - -from dbt.adapters.events.types import ConnectionUsed, SQLQuery, SQLCommit, SQLQueryStatus -import dbt_common.clients.agate_helper -import dbt_common.exceptions -from dbt.adapters.base import BaseConnectionManager -from dbt.adapters.contracts.connection import Connection, ConnectionState, AdapterResponse -from dbt_common.events.functions import fire_event -from dbt_common.events.contextvars import get_node_info -from dbt_common.utils import cast_to_str - - -class SQLConnectionManager(BaseConnectionManager): - """The default connection manager with some common SQL methods implemented. - - Methods to implement: - - exception_handler - - cancel - - get_response - - open - """ - - @abc.abstractmethod - def cancel(self, connection: Connection): - """Cancel the given connection.""" - raise dbt_common.exceptions.base.NotImplementedError( - "`cancel` is not implemented for this adapter!" - ) - - def cancel_open(self) -> List[str]: - names = [] - this_connection = self.get_if_exists() - with self.lock: - for connection in self.thread_connections.values(): - if connection is this_connection: - continue - - # if the connection failed, the handle will be None so we have - # nothing to cancel. - if connection.handle is not None and connection.state == ConnectionState.OPEN: - self.cancel(connection) - if connection.name is not None: - names.append(connection.name) - return names - - def add_query( - self, - sql: str, - auto_begin: bool = True, - bindings: Optional[Any] = None, - abridge_sql_log: bool = False, - ) -> Tuple[Connection, Any]: - connection = self.get_thread_connection() - if auto_begin and connection.transaction_open is False: - self.begin() - fire_event( - ConnectionUsed( - conn_type=self.TYPE, - conn_name=cast_to_str(connection.name), - node_info=get_node_info(), - ) - ) - - with self.exception_handler(sql): - if abridge_sql_log: - log_sql = "{}...".format(sql[:512]) - else: - log_sql = sql - - fire_event( - SQLQuery( - conn_name=cast_to_str(connection.name), sql=log_sql, node_info=get_node_info() - ) - ) - pre = time.time() - - cursor = connection.handle.cursor() - cursor.execute(sql, bindings) - - fire_event( - SQLQueryStatus( - status=str(self.get_response(cursor)), - elapsed=round((time.time() - pre)), - node_info=get_node_info(), - ) - ) - - return connection, cursor - - @classmethod - @abc.abstractmethod - def get_response(cls, cursor: Any) -> AdapterResponse: - """Get the status of the cursor.""" - raise dbt_common.exceptions.base.NotImplementedError( - "`get_response` is not implemented for this adapter!" - ) - - @classmethod - def process_results( - cls, column_names: Iterable[str], rows: Iterable[Any] - ) -> List[Dict[str, Any]]: - # TODO CT-211 - unique_col_names = dict() # type: ignore[var-annotated] - # TODO CT-211 - for idx in range(len(column_names)): # type: ignore[arg-type] - # TODO CT-211 - col_name = column_names[idx] # type: ignore[index] - if col_name in unique_col_names: - unique_col_names[col_name] += 1 - # TODO CT-211 - column_names[idx] = f"{col_name}_{unique_col_names[col_name]}" # type: ignore[index] # noqa - else: - # TODO CT-211 - unique_col_names[column_names[idx]] = 1 # type: ignore[index] - return [dict(zip(column_names, row)) for row in rows] - - @classmethod - def get_result_from_cursor(cls, cursor: Any, limit: Optional[int]) -> agate.Table: - data: List[Any] = [] - column_names: List[str] = [] - - if cursor.description is not None: - column_names = [col[0] for col in cursor.description] - if limit: - rows = cursor.fetchmany(limit) - else: - rows = cursor.fetchall() - data = cls.process_results(column_names, rows) - - return dbt_common.clients.agate_helper.table_from_data_flat(data, column_names) - - def execute( - self, sql: str, auto_begin: bool = False, fetch: bool = False, limit: Optional[int] = None - ) -> Tuple[AdapterResponse, agate.Table]: - sql = self._add_query_comment(sql) - _, cursor = self.add_query(sql, auto_begin) - response = self.get_response(cursor) - if fetch: - table = self.get_result_from_cursor(cursor, limit) - else: - table = dbt_common.clients.agate_helper.empty_table() - return response, table - - def add_begin_query(self): - return self.add_query("BEGIN", auto_begin=False) - - def add_commit_query(self): - return self.add_query("COMMIT", auto_begin=False) - - def add_select_query(self, sql: str) -> Tuple[Connection, Any]: - sql = self._add_query_comment(sql) - return self.add_query(sql, auto_begin=False) - - def begin(self): - connection = self.get_thread_connection() - if connection.transaction_open is True: - raise dbt_common.exceptions.DbtInternalError( - 'Tried to begin a new transaction on connection "{}", but ' - "it already had one open!".format(connection.name) - ) - - self.add_begin_query() - - connection.transaction_open = True - return connection - - def commit(self): - connection = self.get_thread_connection() - if connection.transaction_open is False: - raise dbt_common.exceptions.DbtInternalError( - 'Tried to commit transaction on connection "{}", but ' - "it does not have one open!".format(connection.name) - ) - - fire_event(SQLCommit(conn_name=connection.name, node_info=get_node_info())) - self.add_commit_query() - - connection.transaction_open = False - - return connection diff --git a/core/dbt/adapters/sql/impl.py b/core/dbt/adapters/sql/impl.py deleted file mode 100644 index e8cf8b45677..00000000000 --- a/core/dbt/adapters/sql/impl.py +++ /dev/null @@ -1,274 +0,0 @@ -import agate -from typing import Any, Optional, Tuple, Type, List - -from dbt.adapters.contracts.connection import Connection, AdapterResponse -from dbt.adapters.events.types import ColTypeChange, SchemaCreation, SchemaDrop -from dbt.adapters.exceptions import RelationTypeNullError -from dbt.adapters.base import BaseAdapter, available -from dbt.adapters.cache import _make_ref_key_dict -from dbt.adapters.sql import SQLConnectionManager -from dbt_common.events.functions import fire_event - - -from dbt.adapters.base.relation import BaseRelation - -LIST_RELATIONS_MACRO_NAME = "list_relations_without_caching" -GET_COLUMNS_IN_RELATION_MACRO_NAME = "get_columns_in_relation" -LIST_SCHEMAS_MACRO_NAME = "list_schemas" -CHECK_SCHEMA_EXISTS_MACRO_NAME = "check_schema_exists" -CREATE_SCHEMA_MACRO_NAME = "create_schema" -DROP_SCHEMA_MACRO_NAME = "drop_schema" -RENAME_RELATION_MACRO_NAME = "rename_relation" -TRUNCATE_RELATION_MACRO_NAME = "truncate_relation" -DROP_RELATION_MACRO_NAME = "drop_relation" -ALTER_COLUMN_TYPE_MACRO_NAME = "alter_column_type" -VALIDATE_SQL_MACRO_NAME = "validate_sql" - - -class SQLAdapter(BaseAdapter): - """The default adapter with the common agate conversions and some SQL - methods was implemented. This adapter has a different much shorter list of - methods to implement, but some more macros that must be implemented. - - To implement a macro, implement "${adapter_type}__${macro_name}". in the - adapter's internal project. - - Methods to implement: - - date_function - - Macros to implement: - - get_catalog - - list_relations_without_caching - - get_columns_in_relation - """ - - ConnectionManager: Type[SQLConnectionManager] - connections: SQLConnectionManager - - @available.parse(lambda *a, **k: (None, None)) - def add_query( - self, - sql: str, - auto_begin: bool = True, - bindings: Optional[Any] = None, - abridge_sql_log: bool = False, - ) -> Tuple[Connection, Any]: - """Add a query to the current transaction. A thin wrapper around - ConnectionManager.add_query. - - :param sql: The SQL query to add - :param auto_begin: If set and there is no transaction in progress, - begin a new one. - :param bindings: An optional list of bindings for the query. - :param abridge_sql_log: If set, limit the raw sql logged to 512 - characters - """ - return self.connections.add_query(sql, auto_begin, bindings, abridge_sql_log) - - @classmethod - def convert_text_type(cls, agate_table: agate.Table, col_idx: int) -> str: - return "text" - - @classmethod - def convert_number_type(cls, agate_table: agate.Table, col_idx: int) -> str: - # TODO CT-211 - decimals = agate_table.aggregate(agate.MaxPrecision(col_idx)) # type: ignore[attr-defined] - return "float8" if decimals else "integer" - - @classmethod - def convert_integer_type(cls, agate_table: agate.Table, col_idx: int) -> str: - return "integer" - - @classmethod - def convert_boolean_type(cls, agate_table: agate.Table, col_idx: int) -> str: - return "boolean" - - @classmethod - def convert_datetime_type(cls, agate_table: agate.Table, col_idx: int) -> str: - return "timestamp without time zone" - - @classmethod - def convert_date_type(cls, agate_table: agate.Table, col_idx: int) -> str: - return "date" - - @classmethod - def convert_time_type(cls, agate_table: agate.Table, col_idx: int) -> str: - return "time" - - @classmethod - def is_cancelable(cls) -> bool: - return True - - def expand_column_types(self, goal, current): - reference_columns = {c.name: c for c in self.get_columns_in_relation(goal)} - - target_columns = {c.name: c for c in self.get_columns_in_relation(current)} - - for column_name, reference_column in reference_columns.items(): - target_column = target_columns.get(column_name) - - if target_column is not None and target_column.can_expand_to(reference_column): - col_string_size = reference_column.string_size() - new_type = self.Column.string_type(col_string_size) - fire_event( - ColTypeChange( - orig_type=target_column.data_type, - new_type=new_type, - table=_make_ref_key_dict(current), - ) - ) - - self.alter_column_type(current, column_name, new_type) - - def alter_column_type(self, relation, column_name, new_column_type) -> None: - """ - 1. Create a new column (w/ temp name and correct type) - 2. Copy data over to it - 3. Drop the existing column (cascade!) - 4. Rename the new column to existing column - """ - kwargs = { - "relation": relation, - "column_name": column_name, - "new_column_type": new_column_type, - } - self.execute_macro(ALTER_COLUMN_TYPE_MACRO_NAME, kwargs=kwargs) - - def drop_relation(self, relation): - if relation.type is None: - raise RelationTypeNullError(relation) - - self.cache_dropped(relation) - self.execute_macro(DROP_RELATION_MACRO_NAME, kwargs={"relation": relation}) - - def truncate_relation(self, relation): - self.execute_macro(TRUNCATE_RELATION_MACRO_NAME, kwargs={"relation": relation}) - - def rename_relation(self, from_relation, to_relation): - self.cache_renamed(from_relation, to_relation) - - kwargs = {"from_relation": from_relation, "to_relation": to_relation} - self.execute_macro(RENAME_RELATION_MACRO_NAME, kwargs=kwargs) - - def get_columns_in_relation(self, relation): - return self.execute_macro( - GET_COLUMNS_IN_RELATION_MACRO_NAME, kwargs={"relation": relation} - ) - - def create_schema(self, relation: BaseRelation) -> None: - relation = relation.without_identifier() - fire_event(SchemaCreation(relation=_make_ref_key_dict(relation))) - kwargs = { - "relation": relation, - } - self.execute_macro(CREATE_SCHEMA_MACRO_NAME, kwargs=kwargs) - self.commit_if_has_connection() - # we can't update the cache here, as if the schema already existed we - # don't want to (incorrectly) say that it's empty - - def drop_schema(self, relation: BaseRelation) -> None: - relation = relation.without_identifier() - fire_event(SchemaDrop(relation=_make_ref_key_dict(relation))) - kwargs = { - "relation": relation, - } - self.execute_macro(DROP_SCHEMA_MACRO_NAME, kwargs=kwargs) - self.commit_if_has_connection() - # we can update the cache here - self.cache.drop_schema(relation.database, relation.schema) - - def list_relations_without_caching( - self, - schema_relation: BaseRelation, - ) -> List[BaseRelation]: - kwargs = {"schema_relation": schema_relation} - results = self.execute_macro(LIST_RELATIONS_MACRO_NAME, kwargs=kwargs) - - relations = [] - quote_policy = {"database": True, "schema": True, "identifier": True} - for _database, name, _schema, _type in results: - try: - _type = self.Relation.get_relation_type(_type) - except ValueError: - _type = self.Relation.External - relations.append( - self.Relation.create( - database=_database, - schema=_schema, - identifier=name, - quote_policy=quote_policy, - type=_type, - ) - ) - return relations - - @classmethod - def quote(self, identifier): - return '"{}"'.format(identifier) - - def list_schemas(self, database: str) -> List[str]: - results = self.execute_macro(LIST_SCHEMAS_MACRO_NAME, kwargs={"database": database}) - - return [row[0] for row in results] - - def check_schema_exists(self, database: str, schema: str) -> bool: - information_schema = self.Relation.create( - database=database, - schema=schema, - identifier="INFORMATION_SCHEMA", - quote_policy=self.config.quoting, - ).information_schema() - - kwargs = {"information_schema": information_schema, "schema": schema} - results = self.execute_macro(CHECK_SCHEMA_EXISTS_MACRO_NAME, kwargs=kwargs) - return results[0][0] > 0 - - def validate_sql(self, sql: str) -> AdapterResponse: - """Submit the given SQL to the engine for validation, but not execution. - - By default we simply prefix the query with the explain keyword and allow the - exceptions thrown by the underlying engine on invalid SQL inputs to bubble up - to the exception handler. For adjustments to the explain statement - such as - for adapters that have different mechanisms for hinting at query validation - or dry-run - callers may be able to override the validate_sql_query macro with - the addition of an __validate_sql implementation. - - :param sql str: The sql to validate - """ - kwargs = { - "sql": sql, - } - result = self.execute_macro(VALIDATE_SQL_MACRO_NAME, kwargs=kwargs) - # The statement macro always returns an AdapterResponse in the output AttrDict's - # `response` property, and we preserve the full payload in case we want to - # return fetched output for engines where explain plans are emitted as columnar - # results. Any macro override that deviates from this behavior may encounter an - # assertion error in the runtime. - adapter_response = result.response # type: ignore[attr-defined] - assert isinstance(adapter_response, AdapterResponse), ( - f"Expected AdapterResponse from validate_sql macro execution, " - f"got {type(adapter_response)}." - ) - return adapter_response - - # This is for use in the test suite - def run_sql_for_tests(self, sql, fetch, conn): - cursor = conn.handle.cursor() - try: - cursor.execute(sql) - if hasattr(conn.handle, "commit"): - conn.handle.commit() - if fetch == "one": - return cursor.fetchone() - elif fetch == "all": - return cursor.fetchall() - else: - return - except BaseException as e: - if conn.handle and not getattr(conn.handle, "closed", True): - conn.handle.rollback() - print(sql) - print(e) - raise - finally: - conn.transaction_open = False diff --git a/core/dbt/adapters/utils.py b/core/dbt/adapters/utils.py deleted file mode 100644 index 44e6160a837..00000000000 --- a/core/dbt/adapters/utils.py +++ /dev/null @@ -1,68 +0,0 @@ -from typing import Mapping, Sequence, Any, Dict, List -from dbt.adapters.exceptions import DuplicateAliasError - - -class Translator: - def __init__(self, aliases: Mapping[str, str], recursive: bool = False) -> None: - self.aliases = aliases - self.recursive = recursive - - def translate_mapping(self, kwargs: Mapping[str, Any]) -> Dict[str, Any]: - result: Dict[str, Any] = {} - - for key, value in kwargs.items(): - canonical_key = self.aliases.get(key, key) - if canonical_key in result: - raise DuplicateAliasError(kwargs, self.aliases, canonical_key) - result[canonical_key] = self.translate_value(value) - return result - - def translate_sequence(self, value: Sequence[Any]) -> List[Any]: - return [self.translate_value(v) for v in value] - - def translate_value(self, value: Any) -> Any: - if self.recursive: - if isinstance(value, Mapping): - return self.translate_mapping(value) - elif isinstance(value, (list, tuple)): - return self.translate_sequence(value) - return value - - def translate(self, value: Mapping[str, Any]) -> Dict[str, Any]: - try: - return self.translate_mapping(value) - except RuntimeError as exc: - if "maximum recursion depth exceeded" in str(exc): - raise RecursionError("Cycle detected in a value passed to translate!") - raise - - -def translate_aliases( - kwargs: Dict[str, Any], - aliases: Dict[str, str], - recurse: bool = False, -) -> Dict[str, Any]: - """Given a dict of keyword arguments and a dict mapping aliases to their - canonical values, canonicalize the keys in the kwargs dict. - - If recurse is True, perform this operation recursively. - - :returns: A dict containing all the values in kwargs referenced by their - canonical key. - :raises: `AliasError`, if a canonical key is defined more than once. - """ - translator = Translator(aliases, recurse) - return translator.translate(kwargs) - - -# some types need to make constants available to the jinja context as -# attributes, and regular properties only work with objects. maybe this should -# be handled by the RelationProxy? - - -class classproperty(object): - def __init__(self, func) -> None: - self.func = func - - def __get__(self, obj, objtype): - return self.func(objtype) diff --git a/core/dbt/context/macro_resolver.py b/core/dbt/context/macro_resolver.py index dbeaf53ed78..d897c754049 100644 --- a/core/dbt/context/macro_resolver.py +++ b/core/dbt/context/macro_resolver.py @@ -1,7 +1,7 @@ from typing import Dict, MutableMapping, Optional from dbt.contracts.graph.nodes import Macro from dbt.exceptions import DuplicateMacroNameError, PackageNotFoundForMacroError -from dbt.adapters.include.global_project import PROJECT_NAME as GLOBAL_PROJECT_NAME +from dbt.include.global_project import PROJECT_NAME as GLOBAL_PROJECT_NAME from dbt.clients.jinja import MacroGenerator MacroNamespace = Dict[str, Macro] diff --git a/core/dbt/context/macros.py b/core/dbt/context/macros.py index a2cc710ee3f..c2442b1f4a8 100644 --- a/core/dbt/context/macros.py +++ b/core/dbt/context/macros.py @@ -2,7 +2,7 @@ from dbt.clients.jinja import MacroGenerator, MacroStack from dbt.contracts.graph.nodes import Macro -from dbt.adapters.include.global_project import PROJECT_NAME as GLOBAL_PROJECT_NAME +from dbt.include.global_project import PROJECT_NAME as GLOBAL_PROJECT_NAME from dbt.exceptions import DuplicateMacroNameError, PackageNotFoundForMacroError diff --git a/core/dbt/task/init.py b/core/dbt/task/init.py index 417a293fac3..0b6f4fb22d6 100644 --- a/core/dbt/task/init.py +++ b/core/dbt/task/init.py @@ -34,7 +34,7 @@ from dbt.include.starter_project import PACKAGE_PATH as starter_project_directory -from dbt.adapters.include.global_project import PROJECT_NAME as GLOBAL_PROJECT_NAME +from dbt.include.global_project import PROJECT_NAME as GLOBAL_PROJECT_NAME from dbt.task.base import BaseTask, move_to_nearest_project_dir diff --git a/core/setup.py b/core/setup.py index d0d8c12f25e..66edb342cd5 100644 --- a/core/setup.py +++ b/core/setup.py @@ -72,6 +72,7 @@ "minimal-snowplow-tracker~=0.0.2", "dbt-semantic-interfaces~=0.5.0a2", "dbt-common~=0.1.0", + "dbt-adapters~=0.1.0a2", # ---- # Expect compatibility with all new versions of these packages, so lower bounds only. "packaging>20.9", diff --git a/dev-requirements.txt b/dev-requirements.txt index 4f93c615fae..77108282a40 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -1,3 +1,4 @@ +git+https://github.com/dbt-labs/dbt-adapters.git@main black==23.3.0 bumpversion ddtrace==2.1.7 diff --git a/plugins/postgres/dbt/adapters/postgres/connections.py b/plugins/postgres/dbt/adapters/postgres/connections.py index 94702969bed..8d104b28268 100644 --- a/plugins/postgres/dbt/adapters/postgres/connections.py +++ b/plugins/postgres/dbt/adapters/postgres/connections.py @@ -4,9 +4,8 @@ from psycopg2.extensions import string_types import dbt_common.exceptions -from dbt.adapters.base import Credentials from dbt.adapters.sql import SQLConnectionManager -from dbt.adapters.contracts.connection import AdapterResponse +from dbt.adapters.contracts.connection import AdapterResponse, Credentials from dbt.adapters.events.logging import AdapterLogger from dbt_common.helper_types import Port diff --git a/tests/adapter/dbt/__init__.py b/tests/adapter/dbt/__init__.py new file mode 100644 index 00000000000..b36383a6102 --- /dev/null +++ b/tests/adapter/dbt/__init__.py @@ -0,0 +1,3 @@ +from pkgutil import extend_path + +__path__ = extend_path(__path__, __name__) diff --git a/tests/unit/test_adapter_factory.py b/tests/unit/test_adapter_factory.py index 49974d14dfb..c67b61d7fc0 100644 --- a/tests/unit/test_adapter_factory.py +++ b/tests/unit/test_adapter_factory.py @@ -3,7 +3,7 @@ from unittest import mock from dbt.adapters.factory import AdapterContainer from dbt.adapters.base.plugin import AdapterPlugin -from dbt.adapters.include.global_project import ( +from dbt.include.global_project import ( PROJECT_NAME as GLOBAL_PROJECT_NAME, )