diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 2c867d7722c..6846cafdfd3 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -36,12 +36,4 @@ first_value = 1 [bumpversion:file:core/dbt/version.py] -[bumpversion:file:plugins/postgres/setup.py] - -[bumpversion:file:plugins/postgres/dbt/adapters/postgres/__version__.py] - [bumpversion:file:docker/Dockerfile] - -[bumpversion:file:tests/adapter/setup.py] - -[bumpversion:file:tests/adapter/dbt/tests/adapter/__version__.py] diff --git a/.changes/header.tpl.md b/.changes/header.tpl.md index e48cb3069f6..4d0055e6287 100755 --- a/.changes/header.tpl.md +++ b/.changes/header.tpl.md @@ -1,6 +1,6 @@ # dbt Core Changelog -- This file provides a full account of all changes to `dbt-core` and `dbt-postgres` +- This file provides a full account of all changes to `dbt-core` - Changes are listed under the (pre)release in which they first appear. Subsequent releases include changes from previous releases. - "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version. - Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#adding-changelog-entry) diff --git a/.changes/unreleased/Breaking Changes-20240130-140550.yaml b/.changes/unreleased/Breaking Changes-20240130-140550.yaml new file mode 100644 index 00000000000..aac49fecb90 --- /dev/null +++ b/.changes/unreleased/Breaking Changes-20240130-140550.yaml @@ -0,0 +1,6 @@ +kind: Breaking Changes +body: Remove dbt-tests-adapter and dbt-postgres packages from dbt-core +time: 2024-01-30T14:05:50.291291-08:00 +custom: + Author: colin-rogers-dbt + Issue: "9455" diff --git a/.github/dependabot.yml b/.github/dependabot.yml index d0c525798f9..ae39691c7e0 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -11,11 +11,6 @@ updates: schedule: interval: "daily" rebase-strategy: "disabled" - - package-ecosystem: "pip" - directory: "/plugins/postgres" - schedule: - interval: "daily" - rebase-strategy: "disabled" # docker dependencies - package-ecosystem: "docker" diff --git a/ARCHITECTURE.md b/ARCHITECTURE.md index 5373f780aa1..f24e573e850 100644 --- a/ARCHITECTURE.md +++ b/ARCHITECTURE.md @@ -31,7 +31,8 @@ This is the docs website code. It comes from the dbt-docs repository, and is gen ## Adapters -dbt uses an adapter-plugin pattern to extend support to different databases, warehouses, query engines, etc. For testing and development purposes, the dbt-postgres plugin lives alongside the dbt-core codebase, in the [`plugins`](plugins) subdirectory. Like other adapter plugins, it is a self-contained codebase and package that builds on top of dbt-core. +dbt uses an adapter-plugin pattern to extend support to different databases, warehouses, query engines, etc. +Note: dbt-postgres used to exist in dbt-core but is now in [its own repo](https://github.com/dbt-labs/dbt-postgres) Each adapter is a mix of python, Jinja2, and SQL. The adapter code also makes heavy use of Jinja2 to wrap modular chunks of SQL functionality, define default implementations, and allow plugins to override it. diff --git a/CHANGELOG.md b/CHANGELOG.md index 3c9de94a974..9acba2135bd 100755 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,6 @@ # dbt Core Changelog -- This file provides a full account of all changes to `dbt-core` and `dbt-postgres` +- This file provides a full account of all changes to `dbt-core` - Changes are listed under the (pre)release in which they first appear. Subsequent releases include changes from previous releases. - "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version. - Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#adding-changelog-entry) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 97acded7d8e..d33279543f2 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -22,7 +22,7 @@ If you get stuck, we're happy to help! Drop us a line in the `#dbt-core-developm ### Notes -- **Adapters:** Is your issue or proposed code change related to a specific [database adapter](https://docs.getdbt.com/docs/available-adapters)? If so, please open issues, PRs, and discussions in that adapter's repository instead. The sole exception is Postgres; the `dbt-postgres` plugin lives in this repository (`dbt-core`). +- **Adapters:** Is your issue or proposed code change related to a specific [database adapter](https://docs.getdbt.com/docs/available-adapters)? If so, please open issues, PRs, and discussions in that adapter's repository instead. - **CLA:** Please note that anyone contributing code to `dbt-core` must sign the [Contributor License Agreement](https://docs.getdbt.com/docs/contributor-license-agreements). If you are unable to sign the CLA, the `dbt-core` maintainers will unfortunately be unable to merge any of your Pull Requests. We welcome you to participate in discussions, open issues, and comment on existing ones. - **Branches:** All pull requests from community contributors should target the `main` branch (default). If the change is needed as a patch for a minor version of dbt that has already been released (or is already a release candidate), a maintainer will backport the changes in your PR to the relevant "latest" release branch (`1.0.latest`, `1.1.latest`, ...). If an issue fix applies to a release branch, that fix should be first committed to the development branch and then to the release branch (rarely release-branch fixes may not apply to `main`). - **Releases**: Before releasing a new minor version of Core, we prepare a series of alphas and release candidates to allow users (especially employees of dbt Labs!) to test the new version in live environments. This is an important quality assurance step, as it exposes the new code to a wide variety of complicated deployments and can surface bugs before official release. Releases are accessible via pip, homebrew, and dbt Cloud. diff --git a/Makefile b/Makefile index 595026452ab..09dbf44beed 100644 --- a/Makefile +++ b/Makefile @@ -86,12 +86,12 @@ test: .env ## Runs unit tests with py and code checks against staged changes. $(DOCKER_CMD) pre-commit run mypy-check --hook-stage manual | grep -v "INFO" .PHONY: integration -integration: .env ## Runs postgres integration tests with py-integration +integration: .env ## Runs core integration tests using postgres with py-integration @\ $(CI_FLAGS) $(DOCKER_CMD) tox -e py-integration -- -nauto .PHONY: integration-fail-fast -integration-fail-fast: .env ## Runs postgres integration tests with py-integration in "fail fast" mode. +integration-fail-fast: .env ## Runs core integration tests using postgres with py-integration in "fail fast" mode. @\ $(DOCKER_CMD) tox -e py-integration -- -x -nauto diff --git a/plugins/postgres/dbt/include/postgres/macros/utils/columns_spec_ddl.sql b/core/dbt/tests/__init__.py similarity index 100% rename from plugins/postgres/dbt/include/postgres/macros/utils/columns_spec_ddl.sql rename to core/dbt/tests/__init__.py diff --git a/core/dbt/tests/fixtures/project.py b/core/dbt/tests/fixtures/project.py index 63168a1818a..77ec1098018 100644 --- a/core/dbt/tests/fixtures/project.py +++ b/core/dbt/tests/fixtures/project.py @@ -398,7 +398,7 @@ def logs_dir(request, prefix): # This fixture is for customizing tests that need overrides in adapter -# repos. Example in dbt.tests.adapter.basic.test_base. +# repos. Example in tests.functional.adapter.basic.test_base. @pytest.fixture(scope="class") def test_config(): return {} diff --git a/dev-requirements.txt b/dev-requirements.txt index 97c700bc2f3..38f0b378126 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -1,4 +1,5 @@ git+https://github.com/dbt-labs/dbt-adapters.git@main +git+https://github.com/dbt-labs/dbt-postgres.git@main black==23.3.0 bumpversion ddtrace==2.1.7 diff --git a/docker/Dockerfile b/docker/Dockerfile index ac3b4cc4853..3f6b46ec350 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -14,10 +14,8 @@ ARG build_for=linux/amd64 FROM --platform=$build_for python:3.10.7-slim-bullseye as base # N.B. The refs updated automagically every release via bumpversion -# N.B. dbt-postgres is currently found in the core codebase so a value of dbt-core@ is correct - ARG dbt_core_ref=dbt-core@v1.8.0a1 -ARG dbt_postgres_ref=dbt-core@v1.8.0a1 +ARG dbt_postgres_ref=dbt-postgres@v1.8.0a1 ARG dbt_redshift_ref=dbt-redshift@v1.8.0a1 ARG dbt_bigquery_ref=dbt-bigquery@v1.8.0a1 ARG dbt_snowflake_ref=dbt-snowflake@v1.8.0a1 diff --git a/docker/README.md b/docker/README.md index f735c312d15..e4af582a29a 100644 --- a/docker/README.md +++ b/docker/README.md @@ -82,7 +82,6 @@ docker build --tag my-third-party-dbt \ There are a few special cases worth noting: * The `dbt-spark` database adapter comes in three different versions named `PyHive`, `ODBC`, and the default `all`. If you wish to overide this you can use the `--build-arg` flag with the value of `dbt_spark_version=`. See the [docs](https://docs.getdbt.com/reference/warehouse-profiles/spark-profile) for more information. -* The `dbt-postgres` database adapter is released as part of the `dbt-core` codebase. If you wish to overide the version used, make sure you use the gitref for `dbt-core`: ``` docker build --tag my_dbt \ --target dbt-postgres \ diff --git a/editable-requirements.txt b/editable-requirements.txt index 28fa6bc216d..6f81820bb96 100644 --- a/editable-requirements.txt +++ b/editable-requirements.txt @@ -1,3 +1 @@ -e ./core --e ./plugins/postgres --e ./tests/adapter diff --git a/plugins/postgres/MANIFEST.in b/plugins/postgres/MANIFEST.in deleted file mode 100644 index 96f39441d58..00000000000 --- a/plugins/postgres/MANIFEST.in +++ /dev/null @@ -1 +0,0 @@ -recursive-include dbt/include *.sql *.yml diff --git a/plugins/postgres/README.md b/plugins/postgres/README.md deleted file mode 100644 index 62858c4d727..00000000000 --- a/plugins/postgres/README.md +++ /dev/null @@ -1,36 +0,0 @@ -

- dbt logo -

-

- - CI Badge - -

- -**[dbt](https://www.getdbt.com/)** enables data analysts and engineers to transform their data using the same practices that software engineers use to build applications. - -dbt is the T in ELT. Organize, cleanse, denormalize, filter, rename, and pre-aggregate the raw data in your warehouse so that it's ready for analysis. - -## dbt-postgres - -The `dbt-postgres` package contains all of the code enabling dbt to work with a Postgres database. For -more information on using dbt with Postgres, consult [the docs](https://docs.getdbt.com/docs/profile-postgres). - -## Getting started - -- [Install dbt](https://docs.getdbt.com/docs/installation) -- Read the [introduction](https://docs.getdbt.com/docs/introduction/) and [viewpoint](https://docs.getdbt.com/docs/about/viewpoint/) - -## Join the dbt Community - -- Be part of the conversation in the [dbt Community Slack](http://community.getdbt.com/) -- Read more on the [dbt Community Discourse](https://discourse.getdbt.com) - -## Reporting bugs and contributing code - -- Want to report a bug or request a feature? Let us know on [Slack](http://community.getdbt.com/), or open [an issue](https://github.com/dbt-labs/dbt-core/issues/new) -- Want to help us build dbt? Check out the [Contributing Guide](https://github.com/dbt-labs/dbt-core/blob/HEAD/CONTRIBUTING.md) - -## Code of Conduct - -Everyone interacting in the dbt project's codebases, issue trackers, chat rooms, and mailing lists is expected to follow the [dbt Code of Conduct](https://community.getdbt.com/code-of-conduct). diff --git a/plugins/postgres/dbt/__init__.py b/plugins/postgres/dbt/__init__.py deleted file mode 100644 index 3a7ded78b77..00000000000 --- a/plugins/postgres/dbt/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -# N.B. -# This will add to the package’s __path__ all subdirectories of directories on sys.path named after the package which effectively combines both modules into a single namespace (dbt.adapters) -# The matching statement is in core/dbt/__init__.py - -from pkgutil import extend_path - -__path__ = extend_path(__path__, __name__) diff --git a/plugins/postgres/dbt/adapters/__init__.py b/plugins/postgres/dbt/adapters/__init__.py deleted file mode 100644 index 65bb44b672e..00000000000 --- a/plugins/postgres/dbt/adapters/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -# N.B. -# This will add to the package’s __path__ all subdirectories of directories on sys.path named after the package which effectively combines both modules into a single namespace (dbt.adapters) -# The matching statement is in core/dbt/adapters/__init__.py - -from pkgutil import extend_path - -__path__ = extend_path(__path__, __name__) diff --git a/plugins/postgres/dbt/adapters/postgres/__init__.py b/plugins/postgres/dbt/adapters/postgres/__init__.py deleted file mode 100644 index 38dce8bdb22..00000000000 --- a/plugins/postgres/dbt/adapters/postgres/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# these are mostly just exports, #noqa them so flake8 will be happy -from dbt.adapters.postgres.connections import PostgresConnectionManager # noqa -from dbt.adapters.postgres.connections import PostgresCredentials -from dbt.adapters.postgres.column import PostgresColumn # noqa -from dbt.adapters.postgres.relation import PostgresRelation # noqa: F401 -from dbt.adapters.postgres.impl import PostgresAdapter - -from dbt.adapters.base import AdapterPlugin -from dbt.include import postgres - -Plugin = AdapterPlugin( - adapter=PostgresAdapter, credentials=PostgresCredentials, include_path=postgres.PACKAGE_PATH -) diff --git a/plugins/postgres/dbt/adapters/postgres/__version__.py b/plugins/postgres/dbt/adapters/postgres/__version__.py deleted file mode 100644 index f15b401d12d..00000000000 --- a/plugins/postgres/dbt/adapters/postgres/__version__.py +++ /dev/null @@ -1 +0,0 @@ -version = "1.8.0a1" diff --git a/plugins/postgres/dbt/adapters/postgres/column.py b/plugins/postgres/dbt/adapters/postgres/column.py deleted file mode 100644 index 686ec0cb8a4..00000000000 --- a/plugins/postgres/dbt/adapters/postgres/column.py +++ /dev/null @@ -1,12 +0,0 @@ -from dbt.adapters.base import Column - - -class PostgresColumn(Column): - @property - def data_type(self): - # on postgres, do not convert 'text' or 'varchar' to 'varchar()' - if self.dtype.lower() == "text" or ( - self.dtype.lower() == "character varying" and self.char_size is None - ): - return self.dtype - return super().data_type diff --git a/plugins/postgres/dbt/adapters/postgres/connections.py b/plugins/postgres/dbt/adapters/postgres/connections.py deleted file mode 100644 index 8d104b28268..00000000000 --- a/plugins/postgres/dbt/adapters/postgres/connections.py +++ /dev/null @@ -1,209 +0,0 @@ -from contextlib import contextmanager - -import psycopg2 -from psycopg2.extensions import string_types - -import dbt_common.exceptions -from dbt.adapters.sql import SQLConnectionManager -from dbt.adapters.contracts.connection import AdapterResponse, Credentials -from dbt.adapters.events.logging import AdapterLogger - -from dbt_common.helper_types import Port -from dataclasses import dataclass -from typing import Optional -from typing_extensions import Annotated -from mashumaro.jsonschema.annotations import Maximum, Minimum - - -logger = AdapterLogger("Postgres") - - -@dataclass -class PostgresCredentials(Credentials): - host: str - user: str - # Annotated is used by mashumaro for jsonschema generation - port: Annotated[Port, Minimum(0), Maximum(65535)] - password: str # on postgres the password is mandatory - connect_timeout: int = 10 - role: Optional[str] = None - search_path: Optional[str] = None - keepalives_idle: int = 0 # 0 means to use the default value - sslmode: Optional[str] = None - sslcert: Optional[str] = None - sslkey: Optional[str] = None - sslrootcert: Optional[str] = None - application_name: Optional[str] = "dbt" - retries: int = 1 - - _ALIASES = {"dbname": "database", "pass": "password"} - - @property - def type(self): - return "postgres" - - @property - def unique_field(self): - return self.host - - def _connection_keys(self): - return ( - "host", - "port", - "user", - "database", - "schema", - "connect_timeout", - "role", - "search_path", - "keepalives_idle", - "sslmode", - "sslcert", - "sslkey", - "sslrootcert", - "application_name", - "retries", - ) - - -class PostgresConnectionManager(SQLConnectionManager): - TYPE = "postgres" - - @contextmanager - def exception_handler(self, sql): - try: - yield - - except psycopg2.DatabaseError as e: - logger.debug("Postgres error: {}".format(str(e))) - - try: - self.rollback_if_open() - except psycopg2.Error: - logger.debug("Failed to release connection!") - pass - - raise dbt_common.exceptions.DbtDatabaseError(str(e).strip()) from e - - except Exception as e: - logger.debug("Error running SQL: {}", sql) - logger.debug("Rolling back transaction.") - self.rollback_if_open() - if isinstance(e, dbt_common.exceptions.DbtRuntimeError): - # during a sql query, an internal to dbt exception was raised. - # this sounds a lot like a signal handler and probably has - # useful information, so raise it without modification. - raise - - raise dbt_common.exceptions.DbtRuntimeError(e) from e - - @classmethod - def open(cls, connection): - if connection.state == "open": - logger.debug("Connection is already open, skipping open.") - return connection - - credentials = cls.get_credentials(connection.credentials) - kwargs = {} - # we don't want to pass 0 along to connect() as postgres will try to - # call an invalid setsockopt() call (contrary to the docs). - if credentials.keepalives_idle: - kwargs["keepalives_idle"] = credentials.keepalives_idle - - # psycopg2 doesn't support search_path officially, - # see https://github.com/psycopg/psycopg2/issues/465 - search_path = credentials.search_path - if search_path is not None and search_path != "": - # see https://postgresql.org/docs/9.5/libpq-connect.html - kwargs["options"] = "-c search_path={}".format(search_path.replace(" ", "\\ ")) - - if credentials.sslmode: - kwargs["sslmode"] = credentials.sslmode - - if credentials.sslcert is not None: - kwargs["sslcert"] = credentials.sslcert - - if credentials.sslkey is not None: - kwargs["sslkey"] = credentials.sslkey - - if credentials.sslrootcert is not None: - kwargs["sslrootcert"] = credentials.sslrootcert - - if credentials.application_name: - kwargs["application_name"] = credentials.application_name - - def connect(): - handle = psycopg2.connect( - dbname=credentials.database, - user=credentials.user, - host=credentials.host, - password=credentials.password, - port=credentials.port, - connect_timeout=credentials.connect_timeout, - **kwargs, - ) - if credentials.role: - handle.cursor().execute("set role {}".format(credentials.role)) - return handle - - retryable_exceptions = [ - # OperationalError is subclassed by all psycopg2 Connection Exceptions and it's raised - # by generic connection timeouts without an error code. This is a limitation of - # psycopg2 which doesn't provide subclasses for errors without a SQLSTATE error code. - # The limitation has been known for a while and there are no efforts to tackle it. - # See: https://github.com/psycopg/psycopg2/issues/682 - psycopg2.errors.OperationalError, - ] - - def exponential_backoff(attempt: int): - return attempt * attempt - - return cls.retry_connection( - connection, - connect=connect, - logger=logger, - retry_limit=credentials.retries, - retry_timeout=exponential_backoff, - retryable_exceptions=retryable_exceptions, - ) - - def cancel(self, connection): - connection_name = connection.name - try: - pid = connection.handle.get_backend_pid() - except psycopg2.InterfaceError as exc: - # if the connection is already closed, not much to cancel! - if "already closed" in str(exc): - logger.debug(f"Connection {connection_name} was already closed") - return - # probably bad, re-raise it - raise - - sql = "select pg_terminate_backend({})".format(pid) - - logger.debug("Cancelling query '{}' ({})".format(connection_name, pid)) - - _, cursor = self.add_query(sql) - res = cursor.fetchone() - - logger.debug("Cancel query '{}': {}".format(connection_name, res)) - - @classmethod - def get_credentials(cls, credentials): - return credentials - - @classmethod - def get_response(cls, cursor) -> AdapterResponse: - message = str(cursor.statusmessage) - rows = cursor.rowcount - status_message_parts = message.split() if message is not None else [] - status_messsage_strings = [part for part in status_message_parts if not part.isdigit()] - code = " ".join(status_messsage_strings) - return AdapterResponse(_message=message, code=code, rows_affected=rows) - - @classmethod - def data_type_code_to_name(cls, type_code: int) -> str: - if type_code in string_types: - return string_types[type_code].name - else: - return f"unknown type_code {type_code}" diff --git a/plugins/postgres/dbt/adapters/postgres/impl.py b/plugins/postgres/dbt/adapters/postgres/impl.py deleted file mode 100644 index 99db1d32f76..00000000000 --- a/plugins/postgres/dbt/adapters/postgres/impl.py +++ /dev/null @@ -1,152 +0,0 @@ -from datetime import datetime -from dataclasses import dataclass -from typing import Any, Optional, Set, List - -from dbt.adapters.base.meta import available -from dbt.adapters.base.impl import AdapterConfig, ConstraintSupport -from dbt.adapters.capability import CapabilitySupport, Support, CapabilityDict, Capability -from dbt.adapters.sql import SQLAdapter -from dbt.adapters.postgres import PostgresConnectionManager -from dbt.adapters.postgres.column import PostgresColumn -from dbt.adapters.postgres import PostgresRelation -from dbt_common.contracts.constraints import ConstraintType -from dbt_common.dataclass_schema import dbtClassMixin, ValidationError -from dbt_common.exceptions import DbtRuntimeError -from dbt.adapters.exceptions import ( - CrossDbReferenceProhibitedError, - IndexConfigNotDictError, - IndexConfigError, - UnexpectedDbReferenceError, -) -from dbt_common.utils import encoding as dbt_encoding - - -GET_RELATIONS_MACRO_NAME = "postgres__get_relations" - - -@dataclass -class PostgresIndexConfig(dbtClassMixin): - columns: List[str] - unique: bool = False - type: Optional[str] = None - - def render(self, relation): - # We append the current timestamp to the index name because otherwise - # the index will only be created on every other run. See - # https://github.com/dbt-labs/dbt-core/issues/1945#issuecomment-576714925 - # for an explanation. - now = datetime.utcnow().isoformat() - inputs = self.columns + [relation.render(), str(self.unique), str(self.type), now] - string = "_".join(inputs) - return dbt_encoding.md5(string) - - @classmethod - def parse(cls, raw_index) -> Optional["PostgresIndexConfig"]: - if raw_index is None: - return None - try: - cls.validate(raw_index) - return cls.from_dict(raw_index) - except ValidationError as exc: - raise IndexConfigError(exc) - except TypeError: - raise IndexConfigNotDictError(raw_index) - - -@dataclass -class PostgresConfig(AdapterConfig): - unlogged: Optional[bool] = None - indexes: Optional[List[PostgresIndexConfig]] = None - - -class PostgresAdapter(SQLAdapter): - Relation = PostgresRelation - ConnectionManager = PostgresConnectionManager - Column = PostgresColumn - - AdapterSpecificConfigs = PostgresConfig - - CONSTRAINT_SUPPORT = { - ConstraintType.check: ConstraintSupport.ENFORCED, - ConstraintType.not_null: ConstraintSupport.ENFORCED, - ConstraintType.unique: ConstraintSupport.ENFORCED, - ConstraintType.primary_key: ConstraintSupport.ENFORCED, - ConstraintType.foreign_key: ConstraintSupport.ENFORCED, - } - - CATALOG_BY_RELATION_SUPPORT = True - - _capabilities: CapabilityDict = CapabilityDict( - {Capability.SchemaMetadataByRelations: CapabilitySupport(support=Support.Full)} - ) - - @classmethod - def date_function(cls): - return "now()" - - @available - def verify_database(self, database): - if database.startswith('"'): - database = database.strip('"') - expected = self.config.credentials.database - if database.lower() != expected.lower(): - raise UnexpectedDbReferenceError(self.type(), database, expected) - # return an empty string on success so macros can call this - return "" - - @available - def parse_index(self, raw_index: Any) -> Optional[PostgresIndexConfig]: - return PostgresIndexConfig.parse(raw_index) - - def _link_cached_database_relations(self, schemas: Set[str]): - """ - :param schemas: The set of schemas that should have links added. - """ - database = self.config.credentials.database - table = self.execute_macro(GET_RELATIONS_MACRO_NAME) - - for (dep_schema, dep_name, refed_schema, refed_name) in table: - dependent = self.Relation.create( - database=database, schema=dep_schema, identifier=dep_name - ) - referenced = self.Relation.create( - database=database, schema=refed_schema, identifier=refed_name - ) - - # don't record in cache if this relation isn't in a relevant - # schema - if refed_schema.lower() in schemas: - self.cache.add_link(referenced, dependent) - - def _get_catalog_schemas(self, manifest): - # postgres only allow one database (the main one) - schema_search_map = super()._get_catalog_schemas(manifest) - try: - return schema_search_map.flatten() - except DbtRuntimeError as exc: - raise CrossDbReferenceProhibitedError(self.type(), exc.msg) - - def _link_cached_relations(self, manifest): - schemas: Set[str] = set() - relations_schemas = self._get_cache_schemas(manifest) - for relation in relations_schemas: - self.verify_database(relation.database) - schemas.add(relation.schema.lower()) - - self._link_cached_database_relations(schemas) - - def _relations_cache_for_schemas(self, manifest, cache_schemas=None): - super()._relations_cache_for_schemas(manifest, cache_schemas) - self._link_cached_relations(manifest) - - def timestamp_add_sql(self, add_to: str, number: int = 1, interval: str = "hour") -> str: - return f"{add_to} + interval '{number} {interval}'" - - def valid_incremental_strategies(self): - """The set of standard builtin strategies which this adapter supports out-of-the-box. - Not used to validate custom strategies defined by end users. - """ - return ["append", "delete+insert", "merge"] - - def debug_query(self): - self.execute("select 1 as id") diff --git a/plugins/postgres/dbt/adapters/postgres/relation.py b/plugins/postgres/dbt/adapters/postgres/relation.py deleted file mode 100644 index 3fc1a2f58eb..00000000000 --- a/plugins/postgres/dbt/adapters/postgres/relation.py +++ /dev/null @@ -1,103 +0,0 @@ -from dataclasses import dataclass -from typing import Optional, Set, FrozenSet - -from dbt.adapters.base.relation import BaseRelation -from dbt.adapters.relation_configs import ( - RelationConfigChangeAction, - RelationResults, -) -from dbt.adapters.contracts.relation import RelationType, RelationConfig -from dbt_common.exceptions import DbtRuntimeError - -from dbt.adapters.postgres.relation_configs import ( - PostgresIndexConfig, - PostgresIndexConfigChange, - PostgresMaterializedViewConfig, - PostgresMaterializedViewConfigChangeCollection, - MAX_CHARACTERS_IN_IDENTIFIER, -) - - -@dataclass(frozen=True, eq=False, repr=False) -class PostgresRelation(BaseRelation): - renameable_relations = frozenset( - { - RelationType.View, - RelationType.Table, - RelationType.MaterializedView, - } - ) - replaceable_relations = frozenset( - { - RelationType.View, - RelationType.Table, - } - ) - - def __post_init__(self): - # Check for length of Postgres table/view names. - # Check self.type to exclude test relation identifiers - if ( - self.identifier is not None - and self.type is not None - and len(self.identifier) > self.relation_max_name_length() - ): - raise DbtRuntimeError( - f"Relation name '{self.identifier}' " - f"is longer than {self.relation_max_name_length()} characters" - ) - - def relation_max_name_length(self): - return MAX_CHARACTERS_IN_IDENTIFIER - - def get_materialized_view_config_change_collection( - self, relation_results: RelationResults, relation_config: RelationConfig - ) -> Optional[PostgresMaterializedViewConfigChangeCollection]: - config_change_collection = PostgresMaterializedViewConfigChangeCollection() - - existing_materialized_view = PostgresMaterializedViewConfig.from_relation_results( - relation_results - ) - new_materialized_view = PostgresMaterializedViewConfig.from_config(relation_config) - - config_change_collection.indexes = self._get_index_config_changes( - existing_materialized_view.indexes, new_materialized_view.indexes - ) - - # we return `None` instead of an empty `PostgresMaterializedViewConfigChangeCollection` object - # so that it's easier and more extensible to check in the materialization: - # `core/../materializations/materialized_view.sql` : - # {% if configuration_changes is none %} - if config_change_collection.has_changes: - return config_change_collection - - def _get_index_config_changes( - self, - existing_indexes: FrozenSet[PostgresIndexConfig], - new_indexes: FrozenSet[PostgresIndexConfig], - ) -> Set[PostgresIndexConfigChange]: - """ - Get the index updates that will occur as a result of a new run - - There are four scenarios: - - 1. Indexes are equal -> don't return these - 2. Index is new -> create these - 3. Index is old -> drop these - 4. Indexes are not equal -> drop old, create new -> two actions - - Returns: a set of index updates in the form {"action": "drop/create", "context": } - """ - drop_changes = set( - PostgresIndexConfigChange.from_dict( - {"action": RelationConfigChangeAction.drop, "context": index} - ) - for index in existing_indexes.difference(new_indexes) - ) - create_changes = set( - PostgresIndexConfigChange.from_dict( - {"action": RelationConfigChangeAction.create, "context": index} - ) - for index in new_indexes.difference(existing_indexes) - ) - return set().union(drop_changes, create_changes) diff --git a/plugins/postgres/dbt/adapters/postgres/relation_configs/__init__.py b/plugins/postgres/dbt/adapters/postgres/relation_configs/__init__.py deleted file mode 100644 index 9fdb942bfa5..00000000000 --- a/plugins/postgres/dbt/adapters/postgres/relation_configs/__init__.py +++ /dev/null @@ -1,11 +0,0 @@ -from dbt.adapters.postgres.relation_configs.constants import ( # noqa: F401 - MAX_CHARACTERS_IN_IDENTIFIER, -) -from dbt.adapters.postgres.relation_configs.index import ( # noqa: F401 - PostgresIndexConfig, - PostgresIndexConfigChange, -) -from dbt.adapters.postgres.relation_configs.materialized_view import ( # noqa: F401 - PostgresMaterializedViewConfig, - PostgresMaterializedViewConfigChangeCollection, -) diff --git a/plugins/postgres/dbt/adapters/postgres/relation_configs/constants.py b/plugins/postgres/dbt/adapters/postgres/relation_configs/constants.py deleted file mode 100644 index 9228df23043..00000000000 --- a/plugins/postgres/dbt/adapters/postgres/relation_configs/constants.py +++ /dev/null @@ -1 +0,0 @@ -MAX_CHARACTERS_IN_IDENTIFIER = 63 diff --git a/plugins/postgres/dbt/adapters/postgres/relation_configs/index.py b/plugins/postgres/dbt/adapters/postgres/relation_configs/index.py deleted file mode 100644 index ba0a9ce12ae..00000000000 --- a/plugins/postgres/dbt/adapters/postgres/relation_configs/index.py +++ /dev/null @@ -1,165 +0,0 @@ -from dataclasses import dataclass, field -from typing import Set, FrozenSet - -import agate -from dbt_common.dataclass_schema import StrEnum -from dbt_common.exceptions import DbtRuntimeError -from dbt.adapters.relation_configs import ( - RelationConfigBase, - RelationConfigValidationMixin, - RelationConfigValidationRule, - RelationConfigChangeAction, - RelationConfigChange, -) - - -class PostgresIndexMethod(StrEnum): - btree = "btree" - hash = "hash" - gist = "gist" - spgist = "spgist" - gin = "gin" - brin = "brin" - - @classmethod - def default(cls) -> "PostgresIndexMethod": - return cls.btree - - -@dataclass(frozen=True, eq=True, unsafe_hash=True) -class PostgresIndexConfig(RelationConfigBase, RelationConfigValidationMixin): - """ - This config fallows the specs found here: - https://www.postgresql.org/docs/current/sql-createindex.html - - The following parameters are configurable by dbt: - - name: the name of the index in the database, this isn't predictable since we apply a timestamp - - unique: checks for duplicate values when the index is created and on data updates - - method: the index method to be used - - column_names: the columns in the index - - Applicable defaults for non-configurable parameters: - - concurrently: `False` - - nulls_distinct: `True` - """ - - name: str = field(default=None, hash=False, compare=False) - column_names: FrozenSet[str] = field(default_factory=frozenset, hash=True) - unique: bool = field(default=False, hash=True) - method: PostgresIndexMethod = field(default=PostgresIndexMethod.default(), hash=True) - - @property - def validation_rules(self) -> Set[RelationConfigValidationRule]: - return { - RelationConfigValidationRule( - validation_check=self.column_names is not None, - validation_error=DbtRuntimeError( - "Indexes require at least one column, but none were provided" - ), - ), - } - - @classmethod - def from_dict(cls, config_dict) -> "PostgresIndexConfig": - # TODO: include the QuotePolicy instead of defaulting to lower() - kwargs_dict = { - "name": config_dict.get("name"), - "column_names": frozenset( - column.lower() for column in config_dict.get("column_names", set()) - ), - "unique": config_dict.get("unique"), - "method": config_dict.get("method"), - } - index: "PostgresIndexConfig" = super().from_dict(kwargs_dict) # type: ignore - return index - - @classmethod - def parse_model_node(cls, model_node_entry: dict) -> dict: - config_dict = { - "column_names": set(model_node_entry.get("columns", set())), - "unique": model_node_entry.get("unique"), - "method": model_node_entry.get("type"), - } - return config_dict - - @classmethod - def parse_relation_results(cls, relation_results_entry: agate.Row) -> dict: - config_dict = { - "name": relation_results_entry.get("name"), - "column_names": set(relation_results_entry.get("column_names", "").split(",")), - "unique": relation_results_entry.get("unique"), - "method": relation_results_entry.get("method"), - } - return config_dict - - @property - def as_node_config(self) -> dict: - """ - Returns: a dictionary that can be passed into `get_create_index_sql()` - """ - node_config = { - "columns": list(self.column_names), - "unique": self.unique, - "type": self.method.value, - } - return node_config - - -@dataclass(frozen=True, eq=True, unsafe_hash=True) -class PostgresIndexConfigChange(RelationConfigChange, RelationConfigValidationMixin): - """ - Example of an index change: - { - "action": "create", - "context": { - "name": "", # we don't know the name since it gets created as a hash at runtime - "columns": ["column_1", "column_3"], - "type": "hash", - "unique": True - } - }, - { - "action": "drop", - "context": { - "name": "index_abc", # we only need this to drop, but we need the rest to compare - "columns": ["column_1"], - "type": "btree", - "unique": True - } - } - """ - - context: PostgresIndexConfig - - @property - def requires_full_refresh(self) -> bool: - return False - - @property - def validation_rules(self) -> Set[RelationConfigValidationRule]: - return { - RelationConfigValidationRule( - validation_check=self.action - in {RelationConfigChangeAction.create, RelationConfigChangeAction.drop}, - validation_error=DbtRuntimeError( - "Invalid operation, only `drop` and `create` changes are supported for indexes." - ), - ), - RelationConfigValidationRule( - validation_check=not ( - self.action == RelationConfigChangeAction.drop and self.context.name is None - ), - validation_error=DbtRuntimeError( - "Invalid operation, attempting to drop an index with no name." - ), - ), - RelationConfigValidationRule( - validation_check=not ( - self.action == RelationConfigChangeAction.create - and self.context.column_names == set() - ), - validation_error=DbtRuntimeError( - "Invalid operations, attempting to create an index with no columns." - ), - ), - } diff --git a/plugins/postgres/dbt/adapters/postgres/relation_configs/materialized_view.py b/plugins/postgres/dbt/adapters/postgres/relation_configs/materialized_view.py deleted file mode 100644 index 4638eab1c2e..00000000000 --- a/plugins/postgres/dbt/adapters/postgres/relation_configs/materialized_view.py +++ /dev/null @@ -1,112 +0,0 @@ -from dataclasses import dataclass, field -from typing import Set, FrozenSet, List, Dict -from typing_extensions import Self - -import agate -from dbt.adapters.relation_configs import ( - RelationConfigBase, - RelationResults, - RelationConfigValidationMixin, - RelationConfigValidationRule, -) -from dbt.adapters.contracts.relation import RelationConfig -from dbt_common.exceptions import DbtRuntimeError - -from dbt.adapters.postgres.relation_configs.constants import MAX_CHARACTERS_IN_IDENTIFIER -from dbt.adapters.postgres.relation_configs.index import ( - PostgresIndexConfig, - PostgresIndexConfigChange, -) - - -@dataclass(frozen=True, eq=True, unsafe_hash=True) -class PostgresMaterializedViewConfig(RelationConfigBase, RelationConfigValidationMixin): - """ - This config follows the specs found here: - https://www.postgresql.org/docs/current/sql-creatematerializedview.html - - The following parameters are configurable by dbt: - - table_name: name of the materialized view - - query: the query that defines the view - - indexes: the collection (set) of indexes on the materialized view - - Applicable defaults for non-configurable parameters: - - method: `heap` - - tablespace_name: `default_tablespace` - - with_data: `True` - """ - - table_name: str = "" - query: str = "" - indexes: FrozenSet[PostgresIndexConfig] = field(default_factory=frozenset) - - @property - def validation_rules(self) -> Set[RelationConfigValidationRule]: - # index rules get run by default with the mixin - return { - RelationConfigValidationRule( - validation_check=self.table_name is None - or len(self.table_name) <= MAX_CHARACTERS_IN_IDENTIFIER, - validation_error=DbtRuntimeError( - f"The materialized view name is more than {MAX_CHARACTERS_IN_IDENTIFIER} " - f"characters: {self.table_name}" - ), - ), - } - - @classmethod - def from_dict(cls, config_dict: dict) -> Self: - kwargs_dict = { - "table_name": config_dict.get("table_name"), - "query": config_dict.get("query"), - "indexes": frozenset( - PostgresIndexConfig.from_dict(index) for index in config_dict.get("indexes", {}) - ), - } - materialized_view: Self = super().from_dict(kwargs_dict) # type: ignore - return materialized_view - - @classmethod - def from_config(cls, relation_config: RelationConfig) -> Self: - materialized_view_config = cls.parse_config(relation_config) - materialized_view = cls.from_dict(materialized_view_config) - return materialized_view - - @classmethod - def parse_config(cls, relation_config: RelationConfig) -> Dict: - indexes: List[dict] = relation_config.config.extra.get("indexes", []) - config_dict = { - "table_name": relation_config.identifier, - "query": relation_config.compiled_code, - "indexes": [PostgresIndexConfig.parse_model_node(index) for index in indexes], - } - return config_dict - - @classmethod - def from_relation_results(cls, relation_results: RelationResults) -> Self: - materialized_view_config = cls.parse_relation_results(relation_results) - materialized_view = cls.from_dict(materialized_view_config) - return materialized_view - - @classmethod - def parse_relation_results(cls, relation_results: RelationResults) -> dict: - indexes: agate.Table = relation_results.get("indexes", agate.Table(rows={})) - config_dict = { - "indexes": [ - PostgresIndexConfig.parse_relation_results(index) for index in indexes.rows - ], - } - return config_dict - - -@dataclass -class PostgresMaterializedViewConfigChangeCollection: - indexes: Set[PostgresIndexConfigChange] = field(default_factory=set) - - @property - def requires_full_refresh(self) -> bool: - return any(index.requires_full_refresh for index in self.indexes) - - @property - def has_changes(self) -> bool: - return self.indexes != set() diff --git a/plugins/postgres/dbt/include/postgres/__init__.py b/plugins/postgres/dbt/include/postgres/__init__.py deleted file mode 100644 index b177e5d4932..00000000000 --- a/plugins/postgres/dbt/include/postgres/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -import os - -PACKAGE_PATH = os.path.dirname(__file__) diff --git a/plugins/postgres/dbt/include/postgres/dbt_project.yml b/plugins/postgres/dbt/include/postgres/dbt_project.yml deleted file mode 100644 index 081149f6fd7..00000000000 --- a/plugins/postgres/dbt/include/postgres/dbt_project.yml +++ /dev/null @@ -1,5 +0,0 @@ -config-version: 2 -name: dbt_postgres -version: 1.0 - -macro-paths: ["macros"] diff --git a/plugins/postgres/dbt/include/postgres/macros/adapters.sql b/plugins/postgres/dbt/include/postgres/macros/adapters.sql deleted file mode 100644 index ee864e9b7c5..00000000000 --- a/plugins/postgres/dbt/include/postgres/macros/adapters.sql +++ /dev/null @@ -1,254 +0,0 @@ -{% macro postgres__create_table_as(temporary, relation, sql) -%} - {%- set unlogged = config.get('unlogged', default=false) -%} - {%- set sql_header = config.get('sql_header', none) -%} - - {{ sql_header if sql_header is not none }} - - create {% if temporary -%} - temporary - {%- elif unlogged -%} - unlogged - {%- endif %} table {{ relation }} - {% set contract_config = config.get('contract') %} - {% if contract_config.enforced %} - {{ get_assert_columns_equivalent(sql) }} - {% endif -%} - {% if contract_config.enforced and (not temporary) -%} - {{ get_table_columns_and_constraints() }} ; - insert into {{ relation }} ( - {{ adapter.dispatch('get_column_names', 'dbt')() }} - ) - {%- set sql = get_select_subquery(sql) %} - {% else %} - as - {% endif %} - ( - {{ sql }} - ); -{%- endmacro %} - -{% macro postgres__get_create_index_sql(relation, index_dict) -%} - {%- set index_config = adapter.parse_index(index_dict) -%} - {%- set comma_separated_columns = ", ".join(index_config.columns) -%} - {%- set index_name = index_config.render(relation) -%} - - create {% if index_config.unique -%} - unique - {%- endif %} index if not exists - "{{ index_name }}" - on {{ relation }} {% if index_config.type -%} - using {{ index_config.type }} - {%- endif %} - ({{ comma_separated_columns }}); -{%- endmacro %} - -{% macro postgres__create_schema(relation) -%} - {% if relation.database -%} - {{ adapter.verify_database(relation.database) }} - {%- endif -%} - {%- call statement('create_schema') -%} - create schema if not exists {{ relation.without_identifier().include(database=False) }} - {%- endcall -%} -{% endmacro %} - -{% macro postgres__drop_schema(relation) -%} - {% if relation.database -%} - {{ adapter.verify_database(relation.database) }} - {%- endif -%} - {%- call statement('drop_schema') -%} - drop schema if exists {{ relation.without_identifier().include(database=False) }} cascade - {%- endcall -%} -{% endmacro %} - -{% macro postgres__get_columns_in_relation(relation) -%} - {% call statement('get_columns_in_relation', fetch_result=True) %} - select - column_name, - data_type, - character_maximum_length, - numeric_precision, - numeric_scale - - from {{ relation.information_schema('columns') }} - where table_name = '{{ relation.identifier }}' - {% if relation.schema %} - and table_schema = '{{ relation.schema }}' - {% endif %} - order by ordinal_position - - {% endcall %} - {% set table = load_result('get_columns_in_relation').table %} - {{ return(sql_convert_columns_in_relation(table)) }} -{% endmacro %} - - -{% macro postgres__list_relations_without_caching(schema_relation) %} - {% call statement('list_relations_without_caching', fetch_result=True) -%} - select - '{{ schema_relation.database }}' as database, - tablename as name, - schemaname as schema, - 'table' as type - from pg_tables - where schemaname ilike '{{ schema_relation.schema }}' - union all - select - '{{ schema_relation.database }}' as database, - viewname as name, - schemaname as schema, - 'view' as type - from pg_views - where schemaname ilike '{{ schema_relation.schema }}' - union all - select - '{{ schema_relation.database }}' as database, - matviewname as name, - schemaname as schema, - 'materialized_view' as type - from pg_matviews - where schemaname ilike '{{ schema_relation.schema }}' - {% endcall %} - {{ return(load_result('list_relations_without_caching').table) }} -{% endmacro %} - -{% macro postgres__information_schema_name(database) -%} - {% if database_name -%} - {{ adapter.verify_database(database_name) }} - {%- endif -%} - information_schema -{%- endmacro %} - -{% macro postgres__list_schemas(database) %} - {% if database -%} - {{ adapter.verify_database(database) }} - {%- endif -%} - {% call statement('list_schemas', fetch_result=True, auto_begin=False) %} - select distinct nspname from pg_namespace - {% endcall %} - {{ return(load_result('list_schemas').table) }} -{% endmacro %} - -{% macro postgres__check_schema_exists(information_schema, schema) -%} - {% if information_schema.database -%} - {{ adapter.verify_database(information_schema.database) }} - {%- endif -%} - {% call statement('check_schema_exists', fetch_result=True, auto_begin=False) %} - select count(*) from pg_namespace where nspname = '{{ schema }}' - {% endcall %} - {{ return(load_result('check_schema_exists').table) }} -{% endmacro %} - -{# - Postgres tables have a maximum length of 63 characters, anything longer is silently truncated. - Temp and backup relations add a lot of extra characters to the end of table names to ensure uniqueness. - To prevent this going over the character limit, the base_relation name is truncated to ensure - that name + suffix + uniquestring is < 63 characters. -#} - -{% macro postgres__make_relation_with_suffix(base_relation, suffix, dstring) %} - {% if dstring %} - {% set dt = modules.datetime.datetime.now() %} - {% set dtstring = dt.strftime("%H%M%S%f") %} - {% set suffix = suffix ~ dtstring %} - {% endif %} - {% set suffix_length = suffix|length %} - {% set relation_max_name_length = base_relation.relation_max_name_length() %} - {% if suffix_length > relation_max_name_length %} - {% do exceptions.raise_compiler_error('Relation suffix is too long (' ~ suffix_length ~ ' characters). Maximum length is ' ~ relation_max_name_length ~ ' characters.') %} - {% endif %} - {% set identifier = base_relation.identifier[:relation_max_name_length - suffix_length] ~ suffix %} - - {{ return(base_relation.incorporate(path={"identifier": identifier })) }} - - {% endmacro %} - -{% macro postgres__make_intermediate_relation(base_relation, suffix) %} - {{ return(postgres__make_relation_with_suffix(base_relation, suffix, dstring=False)) }} -{% endmacro %} - -{% macro postgres__make_temp_relation(base_relation, suffix) %} - {% set temp_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=True) %} - {{ return(temp_relation.incorporate(path={"schema": none, - "database": none})) }} -{% endmacro %} - -{% macro postgres__make_backup_relation(base_relation, backup_relation_type, suffix) %} - {% set backup_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=False) %} - {{ return(backup_relation.incorporate(type=backup_relation_type)) }} -{% endmacro %} - -{# - By using dollar-quoting like this, users can embed anything they want into their comments - (including nested dollar-quoting), as long as they do not use this exact dollar-quoting - label. It would be nice to just pick a new one but eventually you do have to give up. -#} -{% macro postgres_escape_comment(comment) -%} - {% if comment is not string %} - {% do exceptions.raise_compiler_error('cannot escape a non-string: ' ~ comment) %} - {% endif %} - {%- set magic = '$dbt_comment_literal_block$' -%} - {%- if magic in comment -%} - {%- do exceptions.raise_compiler_error('The string ' ~ magic ~ ' is not allowed in comments.') -%} - {%- endif -%} - {{ magic }}{{ comment }}{{ magic }} -{%- endmacro %} - - -{% macro postgres__alter_relation_comment(relation, comment) %} - {% set escaped_comment = postgres_escape_comment(comment) %} - comment on {{ relation.type }} {{ relation }} is {{ escaped_comment }}; -{% endmacro %} - - -{% macro postgres__alter_column_comment(relation, column_dict) %} - {% set existing_columns = adapter.get_columns_in_relation(relation) | map(attribute="name") | list %} - {% for column_name in column_dict if (column_name in existing_columns) %} - {% set comment = column_dict[column_name]['description'] %} - {% set escaped_comment = postgres_escape_comment(comment) %} - comment on column {{ relation }}.{{ adapter.quote(column_name) if column_dict[column_name]['quote'] else column_name }} is {{ escaped_comment }}; - {% endfor %} -{% endmacro %} - -{%- macro postgres__get_show_grant_sql(relation) -%} - select grantee, privilege_type - from {{ relation.information_schema('role_table_grants') }} - where grantor = current_role - and grantee != current_role - and table_schema = '{{ relation.schema }}' - and table_name = '{{ relation.identifier }}' -{%- endmacro -%} - -{% macro postgres__copy_grants() %} - {{ return(False) }} -{% endmacro %} - - -{% macro postgres__get_show_indexes_sql(relation) %} - select - i.relname as name, - m.amname as method, - ix.indisunique as "unique", - array_to_string(array_agg(a.attname), ',') as column_names - from pg_index ix - join pg_class i - on i.oid = ix.indexrelid - join pg_am m - on m.oid=i.relam - join pg_class t - on t.oid = ix.indrelid - join pg_namespace n - on n.oid = t.relnamespace - join pg_attribute a - on a.attrelid = t.oid - and a.attnum = ANY(ix.indkey) - where t.relname = '{{ relation.identifier }}' - and n.nspname = '{{ relation.schema }}' - and t.relkind in ('r', 'm') - group by 1, 2, 3 - order by 1, 2, 3 -{% endmacro %} - - -{%- macro postgres__get_drop_index_sql(relation, index_name) -%} - drop index if exists "{{ relation.schema }}"."{{ index_name }}" -{%- endmacro -%} diff --git a/plugins/postgres/dbt/include/postgres/macros/catalog.sql b/plugins/postgres/dbt/include/postgres/macros/catalog.sql deleted file mode 100644 index bd587f0ac97..00000000000 --- a/plugins/postgres/dbt/include/postgres/macros/catalog.sql +++ /dev/null @@ -1,67 +0,0 @@ - -{% macro postgres__get_catalog_relations(information_schema, relations) -%} - {%- call statement('catalog', fetch_result=True) -%} - - {# - If the user has multiple databases set and the first one is wrong, this will fail. - But we won't fail in the case where there are multiple quoting-difference-only dbs, which is better. - #} - {% set database = information_schema.database %} - {{ adapter.verify_database(database) }} - - select - '{{ database }}' as table_database, - sch.nspname as table_schema, - tbl.relname as table_name, - case tbl.relkind - when 'v' then 'VIEW' - when 'm' then 'MATERIALIZED VIEW' - else 'BASE TABLE' - end as table_type, - tbl_desc.description as table_comment, - col.attname as column_name, - col.attnum as column_index, - pg_catalog.format_type(col.atttypid, col.atttypmod) as column_type, - col_desc.description as column_comment, - pg_get_userbyid(tbl.relowner) as table_owner - - from pg_catalog.pg_namespace sch - join pg_catalog.pg_class tbl on tbl.relnamespace = sch.oid - join pg_catalog.pg_attribute col on col.attrelid = tbl.oid - left outer join pg_catalog.pg_description tbl_desc on (tbl_desc.objoid = tbl.oid and tbl_desc.objsubid = 0) - left outer join pg_catalog.pg_description col_desc on (col_desc.objoid = tbl.oid and col_desc.objsubid = col.attnum) - where ( - {%- for relation in relations -%} - {%- if relation.identifier -%} - (upper(sch.nspname) = upper('{{ relation.schema }}') and - upper(tbl.relname) = upper('{{ relation.identifier }}')) - {%- else-%} - upper(sch.nspname) = upper('{{ relation.schema }}') - {%- endif -%} - {%- if not loop.last %} or {% endif -%} - {%- endfor -%} - ) - and not pg_is_other_temp_schema(sch.oid) -- not a temporary schema belonging to another session - and tbl.relpersistence in ('p', 'u') -- [p]ermanent table or [u]nlogged table. Exclude [t]emporary tables - and tbl.relkind in ('r', 'v', 'f', 'p', 'm') -- o[r]dinary table, [v]iew, [f]oreign table, [p]artitioned table, [m]aterialized view. Other values are [i]ndex, [S]equence, [c]omposite type, [t]OAST table - and col.attnum > 0 -- negative numbers are used for system columns such as oid - and not col.attisdropped -- column as not been dropped - - order by - sch.nspname, - tbl.relname, - col.attnum - - {%- endcall -%} - - {{ return(load_result('catalog').table) }} -{%- endmacro %} - - -{% macro postgres__get_catalog(information_schema, schemas) -%} - {%- set relations = [] -%} - {%- for schema in schemas -%} - {%- set dummy = relations.append({'schema': schema}) -%} - {%- endfor -%} - {{ return(postgres__get_catalog_relations(information_schema, relations)) }} -{%- endmacro %} diff --git a/plugins/postgres/dbt/include/postgres/macros/materializations/incremental_strategies.sql b/plugins/postgres/dbt/include/postgres/macros/materializations/incremental_strategies.sql deleted file mode 100644 index f2fbf41e090..00000000000 --- a/plugins/postgres/dbt/include/postgres/macros/materializations/incremental_strategies.sql +++ /dev/null @@ -1,9 +0,0 @@ -{% macro postgres__get_incremental_default_sql(arg_dict) %} - - {% if arg_dict["unique_key"] %} - {% do return(get_incremental_delete_insert_sql(arg_dict)) %} - {% else %} - {% do return(get_incremental_append_sql(arg_dict)) %} - {% endif %} - -{% endmacro %} diff --git a/plugins/postgres/dbt/include/postgres/macros/materializations/snapshot_merge.sql b/plugins/postgres/dbt/include/postgres/macros/materializations/snapshot_merge.sql deleted file mode 100644 index 807c70b6c02..00000000000 --- a/plugins/postgres/dbt/include/postgres/macros/materializations/snapshot_merge.sql +++ /dev/null @@ -1,18 +0,0 @@ - -{% macro postgres__snapshot_merge_sql(target, source, insert_cols) -%} - {%- set insert_cols_csv = insert_cols | join(', ') -%} - - update {{ target }} - set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to - from {{ source }} as DBT_INTERNAL_SOURCE - where DBT_INTERNAL_SOURCE.dbt_scd_id::text = {{ target }}.dbt_scd_id::text - and DBT_INTERNAL_SOURCE.dbt_change_type::text in ('update'::text, 'delete'::text) - and {{ target }}.dbt_valid_to is null; - - insert into {{ target }} ({{ insert_cols_csv }}) - select {% for column in insert_cols -%} - DBT_INTERNAL_SOURCE.{{ column }} {%- if not loop.last %}, {%- endif %} - {%- endfor %} - from {{ source }} as DBT_INTERNAL_SOURCE - where DBT_INTERNAL_SOURCE.dbt_change_type::text = 'insert'::text; -{% endmacro %} diff --git a/plugins/postgres/dbt/include/postgres/macros/relations.sql b/plugins/postgres/dbt/include/postgres/macros/relations.sql deleted file mode 100644 index dd50cf00163..00000000000 --- a/plugins/postgres/dbt/include/postgres/macros/relations.sql +++ /dev/null @@ -1,80 +0,0 @@ -{% macro postgres__get_relations() -%} - - {# - -- in pg_depend, objid is the dependent, refobjid is the referenced object - -- > a pg_depend entry indicates that the referenced object cannot be - -- > dropped without also dropping the dependent object. - #} - - {%- call statement('relations', fetch_result=True) -%} - with relation as ( - select - pg_rewrite.ev_class as class, - pg_rewrite.oid as id - from pg_rewrite - ), - class as ( - select - oid as id, - relname as name, - relnamespace as schema, - relkind as kind - from pg_class - ), - dependency as ( - select distinct - pg_depend.objid as id, - pg_depend.refobjid as ref - from pg_depend - ), - schema as ( - select - pg_namespace.oid as id, - pg_namespace.nspname as name - from pg_namespace - where nspname != 'information_schema' and nspname not like 'pg\_%' - ), - referenced as ( - select - relation.id AS id, - referenced_class.name , - referenced_class.schema , - referenced_class.kind - from relation - join class as referenced_class on relation.class=referenced_class.id - where referenced_class.kind in ('r', 'v', 'm') - ), - relationships as ( - select - referenced.name as referenced_name, - referenced.schema as referenced_schema_id, - dependent_class.name as dependent_name, - dependent_class.schema as dependent_schema_id, - referenced.kind as kind - from referenced - join dependency on referenced.id=dependency.id - join class as dependent_class on dependency.ref=dependent_class.id - where - (referenced.name != dependent_class.name or - referenced.schema != dependent_class.schema) - ) - - select - referenced_schema.name as referenced_schema, - relationships.referenced_name as referenced_name, - dependent_schema.name as dependent_schema, - relationships.dependent_name as dependent_name - from relationships - join schema as dependent_schema on relationships.dependent_schema_id=dependent_schema.id - join schema as referenced_schema on relationships.referenced_schema_id=referenced_schema.id - group by referenced_schema, referenced_name, dependent_schema, dependent_name - order by referenced_schema, referenced_name, dependent_schema, dependent_name; - - {%- endcall -%} - - {{ return(load_result('relations').table) }} -{% endmacro %} - -{% macro postgres_get_relations() %} - {{ return(postgres__get_relations()) }} -{% endmacro %} diff --git a/plugins/postgres/dbt/include/postgres/macros/relations/materialized_view/alter.sql b/plugins/postgres/dbt/include/postgres/macros/relations/materialized_view/alter.sql deleted file mode 100644 index ee53c113627..00000000000 --- a/plugins/postgres/dbt/include/postgres/macros/relations/materialized_view/alter.sql +++ /dev/null @@ -1,50 +0,0 @@ -{% macro postgres__get_alter_materialized_view_as_sql( - relation, - configuration_changes, - sql, - existing_relation, - backup_relation, - intermediate_relation -) %} - - -- apply a full refresh immediately if needed - {% if configuration_changes.requires_full_refresh %} - - {{ get_replace_sql(existing_relation, relation, sql) }} - - -- otherwise apply individual changes as needed - {% else %} - - {{ postgres__update_indexes_on_materialized_view(relation, configuration_changes.indexes) }} - - {%- endif -%} - -{% endmacro %} - - -{%- macro postgres__update_indexes_on_materialized_view(relation, index_changes) -%} - {{- log("Applying UPDATE INDEXES to: " ~ relation) -}} - - {%- for _index_change in index_changes -%} - {%- set _index = _index_change.context -%} - - {%- if _index_change.action == "drop" -%} - - {{ postgres__get_drop_index_sql(relation, _index.name) }}; - - {%- elif _index_change.action == "create" -%} - - {{ postgres__get_create_index_sql(relation, _index.as_node_config) }} - - {%- endif -%} - - {%- endfor -%} - -{%- endmacro -%} - - -{% macro postgres__get_materialized_view_configuration_changes(existing_relation, new_config) %} - {% set _existing_materialized_view = postgres__describe_materialized_view(existing_relation) %} - {% set _configuration_changes = existing_relation.get_materialized_view_config_change_collection(_existing_materialized_view, new_config.model) %} - {% do return(_configuration_changes) %} -{% endmacro %} diff --git a/plugins/postgres/dbt/include/postgres/macros/relations/materialized_view/create.sql b/plugins/postgres/dbt/include/postgres/macros/relations/materialized_view/create.sql deleted file mode 100644 index 17e5cb06434..00000000000 --- a/plugins/postgres/dbt/include/postgres/macros/relations/materialized_view/create.sql +++ /dev/null @@ -1,8 +0,0 @@ -{% macro postgres__get_create_materialized_view_as_sql(relation, sql) %} - create materialized view if not exists {{ relation }} as {{ sql }}; - - {% for _index_dict in config.get('indexes', []) -%} - {{- get_create_index_sql(relation, _index_dict) -}} - {%- endfor -%} - -{% endmacro %} diff --git a/plugins/postgres/dbt/include/postgres/macros/relations/materialized_view/describe.sql b/plugins/postgres/dbt/include/postgres/macros/relations/materialized_view/describe.sql deleted file mode 100644 index cb133b6a8b5..00000000000 --- a/plugins/postgres/dbt/include/postgres/macros/relations/materialized_view/describe.sql +++ /dev/null @@ -1,5 +0,0 @@ -{% macro postgres__describe_materialized_view(relation) %} - -- for now just get the indexes, we don't need the name or the query yet - {% set _indexes = run_query(get_show_indexes_sql(relation)) %} - {% do return({'indexes': _indexes}) %} -{% endmacro %} diff --git a/plugins/postgres/dbt/include/postgres/macros/relations/materialized_view/drop.sql b/plugins/postgres/dbt/include/postgres/macros/relations/materialized_view/drop.sql deleted file mode 100644 index 2263bb652b2..00000000000 --- a/plugins/postgres/dbt/include/postgres/macros/relations/materialized_view/drop.sql +++ /dev/null @@ -1,3 +0,0 @@ -{% macro postgres__drop_materialized_view(relation) -%} - drop materialized view if exists {{ relation }} cascade -{%- endmacro %} diff --git a/plugins/postgres/dbt/include/postgres/macros/relations/materialized_view/refresh.sql b/plugins/postgres/dbt/include/postgres/macros/relations/materialized_view/refresh.sql deleted file mode 100644 index 48b863e519b..00000000000 --- a/plugins/postgres/dbt/include/postgres/macros/relations/materialized_view/refresh.sql +++ /dev/null @@ -1,3 +0,0 @@ -{% macro postgres__refresh_materialized_view(relation) %} - refresh materialized view {{ relation }} -{% endmacro %} diff --git a/plugins/postgres/dbt/include/postgres/macros/relations/materialized_view/rename.sql b/plugins/postgres/dbt/include/postgres/macros/relations/materialized_view/rename.sql deleted file mode 100644 index 293ec9d1e12..00000000000 --- a/plugins/postgres/dbt/include/postgres/macros/relations/materialized_view/rename.sql +++ /dev/null @@ -1,3 +0,0 @@ -{% macro postgres__get_rename_materialized_view_sql(relation, new_name) %} - alter materialized view {{ relation }} rename to {{ new_name }} -{% endmacro %} diff --git a/plugins/postgres/dbt/include/postgres/macros/relations/table/drop.sql b/plugins/postgres/dbt/include/postgres/macros/relations/table/drop.sql deleted file mode 100644 index 146cfc8273f..00000000000 --- a/plugins/postgres/dbt/include/postgres/macros/relations/table/drop.sql +++ /dev/null @@ -1,3 +0,0 @@ -{% macro postgres__drop_table(relation) -%} - drop table if exists {{ relation }} cascade -{%- endmacro %} diff --git a/plugins/postgres/dbt/include/postgres/macros/relations/table/rename.sql b/plugins/postgres/dbt/include/postgres/macros/relations/table/rename.sql deleted file mode 100644 index bc3c234abb7..00000000000 --- a/plugins/postgres/dbt/include/postgres/macros/relations/table/rename.sql +++ /dev/null @@ -1,3 +0,0 @@ -{% macro postgres__get_rename_table_sql(relation, new_name) %} - alter table {{ relation }} rename to {{ new_name }} -{% endmacro %} diff --git a/plugins/postgres/dbt/include/postgres/macros/relations/table/replace.sql b/plugins/postgres/dbt/include/postgres/macros/relations/table/replace.sql deleted file mode 100644 index 3750edfdf95..00000000000 --- a/plugins/postgres/dbt/include/postgres/macros/relations/table/replace.sql +++ /dev/null @@ -1,17 +0,0 @@ -{% macro postgres__get_replace_table_sql(relation, sql) -%} - - {%- set sql_header = config.get('sql_header', none) -%} - {{ sql_header if sql_header is not none }} - - create or replace table {{ relation }} - {% set contract_config = config.get('contract') %} - {% if contract_config.enforced %} - {{ get_assert_columns_equivalent(sql) }} - {{ get_table_columns_and_constraints() }} - {%- set sql = get_select_subquery(sql) %} - {% endif %} - as ( - {{ sql }} - ); - -{%- endmacro %} diff --git a/plugins/postgres/dbt/include/postgres/macros/relations/view/drop.sql b/plugins/postgres/dbt/include/postgres/macros/relations/view/drop.sql deleted file mode 100644 index 46bd5a063eb..00000000000 --- a/plugins/postgres/dbt/include/postgres/macros/relations/view/drop.sql +++ /dev/null @@ -1,3 +0,0 @@ -{% macro postgres__drop_view(relation) -%} - drop view if exists {{ relation }} cascade -{%- endmacro %} diff --git a/plugins/postgres/dbt/include/postgres/macros/relations/view/rename.sql b/plugins/postgres/dbt/include/postgres/macros/relations/view/rename.sql deleted file mode 100644 index 3c890a5b2d0..00000000000 --- a/plugins/postgres/dbt/include/postgres/macros/relations/view/rename.sql +++ /dev/null @@ -1,3 +0,0 @@ -{% macro postgres__get_rename_view_sql(relation, new_name) %} - alter view {{ relation }} rename to {{ new_name }} -{% endmacro %} diff --git a/plugins/postgres/dbt/include/postgres/macros/relations/view/replace.sql b/plugins/postgres/dbt/include/postgres/macros/relations/view/replace.sql deleted file mode 100644 index e2724c37e7c..00000000000 --- a/plugins/postgres/dbt/include/postgres/macros/relations/view/replace.sql +++ /dev/null @@ -1,15 +0,0 @@ -{% macro postgres__get_replace_view_sql(relation, sql) -%} - - {%- set sql_header = config.get('sql_header', none) -%} - {{ sql_header if sql_header is not none }} - - create or replace view {{ relation }} - {% set contract_config = config.get('contract') %} - {% if contract_config.enforced %} - {{ get_assert_columns_equivalent(sql) }} - {%- endif %} - as ( - {{ sql }} - ); - -{%- endmacro %} diff --git a/plugins/postgres/dbt/include/postgres/macros/timestamps.sql b/plugins/postgres/dbt/include/postgres/macros/timestamps.sql deleted file mode 100644 index 7233571b677..00000000000 --- a/plugins/postgres/dbt/include/postgres/macros/timestamps.sql +++ /dev/null @@ -1,20 +0,0 @@ -{% macro postgres__current_timestamp() -%} - now() -{%- endmacro %} - -{% macro postgres__snapshot_string_as_time(timestamp) -%} - {%- set result = "'" ~ timestamp ~ "'::timestamp without time zone" -%} - {{ return(result) }} -{%- endmacro %} - -{% macro postgres__snapshot_get_time() -%} - {{ current_timestamp() }}::timestamp without time zone -{%- endmacro %} - -{% macro postgres__current_timestamp_backcompat() %} - current_timestamp::{{ type_timestamp() }} -{% endmacro %} - -{% macro postgres__current_timestamp_in_utc_backcompat() %} - (current_timestamp at time zone 'utc')::{{ type_timestamp() }} -{% endmacro %} diff --git a/plugins/postgres/dbt/include/postgres/macros/utils/any_value.sql b/plugins/postgres/dbt/include/postgres/macros/utils/any_value.sql deleted file mode 100644 index 6fcb4eebe5f..00000000000 --- a/plugins/postgres/dbt/include/postgres/macros/utils/any_value.sql +++ /dev/null @@ -1,7 +0,0 @@ -{#- /*Postgres doesn't support any_value, so we're using min() to get the same result*/ -#} - -{% macro postgres__any_value(expression) -%} - - min({{ expression }}) - -{%- endmacro %} diff --git a/plugins/postgres/dbt/include/postgres/macros/utils/dateadd.sql b/plugins/postgres/dbt/include/postgres/macros/utils/dateadd.sql deleted file mode 100644 index 97009ccdd53..00000000000 --- a/plugins/postgres/dbt/include/postgres/macros/utils/dateadd.sql +++ /dev/null @@ -1,5 +0,0 @@ -{% macro postgres__dateadd(datepart, interval, from_date_or_timestamp) %} - - {{ from_date_or_timestamp }} + ((interval '1 {{ datepart }}') * ({{ interval }})) - -{% endmacro %} diff --git a/plugins/postgres/dbt/include/postgres/macros/utils/datediff.sql b/plugins/postgres/dbt/include/postgres/macros/utils/datediff.sql deleted file mode 100644 index b452529bec3..00000000000 --- a/plugins/postgres/dbt/include/postgres/macros/utils/datediff.sql +++ /dev/null @@ -1,32 +0,0 @@ -{% macro postgres__datediff(first_date, second_date, datepart) -%} - - {% if datepart == 'year' %} - (date_part('year', ({{second_date}})::date) - date_part('year', ({{first_date}})::date)) - {% elif datepart == 'quarter' %} - ({{ datediff(first_date, second_date, 'year') }} * 4 + date_part('quarter', ({{second_date}})::date) - date_part('quarter', ({{first_date}})::date)) - {% elif datepart == 'month' %} - ({{ datediff(first_date, second_date, 'year') }} * 12 + date_part('month', ({{second_date}})::date) - date_part('month', ({{first_date}})::date)) - {% elif datepart == 'day' %} - (({{second_date}})::date - ({{first_date}})::date) - {% elif datepart == 'week' %} - ({{ datediff(first_date, second_date, 'day') }} / 7 + case - when date_part('dow', ({{first_date}})::timestamp) <= date_part('dow', ({{second_date}})::timestamp) then - case when {{first_date}} <= {{second_date}} then 0 else -1 end - else - case when {{first_date}} <= {{second_date}} then 1 else 0 end - end) - {% elif datepart == 'hour' %} - ({{ datediff(first_date, second_date, 'day') }} * 24 + date_part('hour', ({{second_date}})::timestamp) - date_part('hour', ({{first_date}})::timestamp)) - {% elif datepart == 'minute' %} - ({{ datediff(first_date, second_date, 'hour') }} * 60 + date_part('minute', ({{second_date}})::timestamp) - date_part('minute', ({{first_date}})::timestamp)) - {% elif datepart == 'second' %} - ({{ datediff(first_date, second_date, 'minute') }} * 60 + floor(date_part('second', ({{second_date}})::timestamp)) - floor(date_part('second', ({{first_date}})::timestamp))) - {% elif datepart == 'millisecond' %} - ({{ datediff(first_date, second_date, 'minute') }} * 60000 + floor(date_part('millisecond', ({{second_date}})::timestamp)) - floor(date_part('millisecond', ({{first_date}})::timestamp))) - {% elif datepart == 'microsecond' %} - ({{ datediff(first_date, second_date, 'minute') }} * 60000000 + floor(date_part('microsecond', ({{second_date}})::timestamp)) - floor(date_part('microsecond', ({{first_date}})::timestamp))) - {% else %} - {{ exceptions.raise_compiler_error("Unsupported datepart for macro datediff in postgres: {!r}".format(datepart)) }} - {% endif %} - -{%- endmacro %} diff --git a/plugins/postgres/dbt/include/postgres/macros/utils/last_day.sql b/plugins/postgres/dbt/include/postgres/macros/utils/last_day.sql deleted file mode 100644 index 16995301cb4..00000000000 --- a/plugins/postgres/dbt/include/postgres/macros/utils/last_day.sql +++ /dev/null @@ -1,14 +0,0 @@ -{% macro postgres__last_day(date, datepart) -%} - - {%- if datepart == 'quarter' -%} - -- postgres dateadd does not support quarter interval. - cast( - {{dbt.dateadd('day', '-1', - dbt.dateadd('month', '3', dbt.date_trunc(datepart, date)) - )}} - as date) - {%- else -%} - {{dbt.default_last_day(date, datepart)}} - {%- endif -%} - -{%- endmacro %} diff --git a/plugins/postgres/dbt/include/postgres/macros/utils/listagg.sql b/plugins/postgres/dbt/include/postgres/macros/utils/listagg.sql deleted file mode 100644 index f3e19427dc4..00000000000 --- a/plugins/postgres/dbt/include/postgres/macros/utils/listagg.sql +++ /dev/null @@ -1,23 +0,0 @@ -{% macro postgres__listagg(measure, delimiter_text, order_by_clause, limit_num) -%} - - {% if limit_num -%} - array_to_string( - (array_agg( - {{ measure }} - {% if order_by_clause -%} - {{ order_by_clause }} - {%- endif %} - ))[1:{{ limit_num }}], - {{ delimiter_text }} - ) - {%- else %} - string_agg( - {{ measure }}, - {{ delimiter_text }} - {% if order_by_clause -%} - {{ order_by_clause }} - {%- endif %} - ) - {%- endif %} - -{%- endmacro %} diff --git a/plugins/postgres/dbt/include/postgres/macros/utils/split_part.sql b/plugins/postgres/dbt/include/postgres/macros/utils/split_part.sql deleted file mode 100644 index e4174d2ee9f..00000000000 --- a/plugins/postgres/dbt/include/postgres/macros/utils/split_part.sql +++ /dev/null @@ -1,9 +0,0 @@ -{% macro postgres__split_part(string_text, delimiter_text, part_number) %} - - {% if part_number >= 0 %} - {{ dbt.default__split_part(string_text, delimiter_text, part_number) }} - {% else %} - {{ dbt._split_part_negative(string_text, delimiter_text, part_number) }} - {% endif %} - -{% endmacro %} diff --git a/plugins/postgres/dbt/include/postgres/profile_template.yml b/plugins/postgres/dbt/include/postgres/profile_template.yml deleted file mode 100644 index 5060a272f5d..00000000000 --- a/plugins/postgres/dbt/include/postgres/profile_template.yml +++ /dev/null @@ -1,21 +0,0 @@ -fixed: - type: postgres -prompts: - host: - hint: 'hostname for the instance' - port: - default: 5432 - type: 'int' - user: - hint: 'dev username' - pass: - hint: 'dev password' - hide_input: true - dbname: - hint: 'default database that dbt will build objects in' - schema: - hint: 'default schema that dbt will build objects in' - threads: - hint: '1 or more' - type: 'int' - default: 1 diff --git a/plugins/postgres/dbt/include/postgres/sample_profiles.yml b/plugins/postgres/dbt/include/postgres/sample_profiles.yml deleted file mode 100644 index 567f3912893..00000000000 --- a/plugins/postgres/dbt/include/postgres/sample_profiles.yml +++ /dev/null @@ -1,24 +0,0 @@ -default: - outputs: - - dev: - type: postgres - threads: [1 or more] - host: [host] - port: [port] - user: [dev_username] - pass: [dev_password] - dbname: [dbname] - schema: [dev_schema] - - prod: - type: postgres - threads: [1 or more] - host: [host] - port: [port] - user: [prod_username] - pass: [prod_password] - dbname: [dbname] - schema: [prod_schema] - - target: dev diff --git a/plugins/postgres/setup.py b/plugins/postgres/setup.py deleted file mode 100644 index 4622d8d847e..00000000000 --- a/plugins/postgres/setup.py +++ /dev/null @@ -1,83 +0,0 @@ -#!/usr/bin/env python -import os -import sys - -if sys.version_info < (3, 8): - print("Error: dbt does not support this version of Python.") - print("Please upgrade to Python 3.8 or higher.") - sys.exit(1) - - -from setuptools import setup - -try: - from setuptools import find_namespace_packages -except ImportError: - # the user has a downlevel version of setuptools. - print("Error: dbt requires setuptools v40.1.0 or higher.") - print('Please upgrade setuptools with "pip install --upgrade setuptools" ' "and try again") - sys.exit(1) - - -PSYCOPG2_MESSAGE = """ -No package name override was set. -Using 'psycopg2-binary' package to satisfy 'psycopg2' - -If you experience segmentation faults, silent crashes, or installation errors, -consider retrying with the 'DBT_PSYCOPG2_NAME' environment variable set to -'psycopg2'. It may require a compiler toolchain and development libraries! -""".strip() - - -def _dbt_psycopg2_name(): - # if the user chose something, use that - package_name = os.getenv("DBT_PSYCOPG2_NAME", "") - if package_name: - return package_name - - # default to psycopg2-binary for all OSes/versions - print(PSYCOPG2_MESSAGE) - return "psycopg2-binary" - - -package_name = "dbt-postgres" -package_version = "1.8.0a1" -description = """The postgres adapter plugin for dbt (data build tool)""" - -this_directory = os.path.abspath(os.path.dirname(__file__)) -with open(os.path.join(this_directory, "README.md")) as f: - long_description = f.read() - -DBT_PSYCOPG2_NAME = _dbt_psycopg2_name() - -setup( - name=package_name, - version=package_version, - description=description, - long_description=long_description, - long_description_content_type="text/markdown", - author="dbt Labs", - author_email="info@dbtlabs.com", - url="https://github.com/dbt-labs/dbt-core", - packages=find_namespace_packages(include=["dbt", "dbt.*"]), - include_package_data=True, - install_requires=[ - "dbt-core=={}".format(package_version), - "{}~=2.8".format(DBT_PSYCOPG2_NAME), - # installed via dbt-core, but referenced directly, don't pin to avoid version conflicts with dbt-core - "agate", - ], - zip_safe=False, - classifiers=[ - "Development Status :: 5 - Production/Stable", - "License :: OSI Approved :: Apache Software License", - "Operating System :: Microsoft :: Windows", - "Operating System :: MacOS :: MacOS X", - "Operating System :: POSIX :: Linux", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - ], - python_requires=">=3.8", -) diff --git a/requirements.txt b/requirements.txt index 279403c7e64..a2895053cb5 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1 @@ ./core -./plugins/postgres diff --git a/scripts/build-dist.sh b/scripts/build-dist.sh index de41a1bfd8a..5dd0fa17f22 100755 --- a/scripts/build-dist.sh +++ b/scripts/build-dist.sh @@ -14,13 +14,11 @@ rm -rf "$DBT_PATH"/dist rm -rf "$DBT_PATH"/build mkdir -p "$DBT_PATH"/dist -for SUBPATH in core plugins/postgres tests/adapter -do - rm -rf "$DBT_PATH"/"$SUBPATH"/dist - rm -rf "$DBT_PATH"/"$SUBPATH"/build - cd "$DBT_PATH"/"$SUBPATH" - $PYTHON_BIN setup.py sdist bdist_wheel - cp -r "$DBT_PATH"/"$SUBPATH"/dist/* "$DBT_PATH"/dist/ -done +rm -rf "$DBT_PATH"/core/dist +rm -rf "$DBT_PATH"core/build +cd "$DBT_PATH"/core +$PYTHON_BIN setup.py sdist bdist_wheel +cp -r "$DBT_PATH"/"core"/dist/* "$DBT_PATH"/dist/ + set +x diff --git a/tests/adapter/README.md b/tests/adapter/README.md deleted file mode 100644 index 954fa9432fd..00000000000 --- a/tests/adapter/README.md +++ /dev/null @@ -1,41 +0,0 @@ -

- dbt logo -

- -# dbt-tests-adapter - -For context and guidance on using this package, please read: ["Testing a new adapter"](https://docs.getdbt.com/docs/contributing/testing-a-new-adapter) - -## What is it? - -This package includes reusable test cases that reinforce behaviors common to all or many adapter plugins. There are two categories of tests: - -1. **Basic tests** that every adapter plugin is expected to pass. These are defined in `tests.adapter.basic`. Given differences across data platforms, these may require slight modification or reimplementation. Significantly overriding or disabling these tests should be with good reason, since each represents basic functionality expected by dbt users. For example, if your adapter does not support incremental models, you should disable the test, [by marking it with `skip` or `xfail`](https://docs.pytest.org/en/latest/how-to/skipping.html), as well as noting that limitation in any documentation, READMEs, and usage guides that accompany your adapter. - -2. **Optional tests**, for second-order functionality that is common across plugins, but not required for basic use. Your plugin can opt into these test cases by inheriting existing ones, or reimplementing them with adjustments. For now, this category includes all tests located outside the `basic` subdirectory. More tests will be added as we convert older tests defined on dbt-core and mature plugins to use the standard framework. - -## How to use it? - -Each test case in this repo is packaged as a class, prefixed `Base`. To enable a test case to run with your adapter plugin, you should inherit the base class into a new class, prefixed `Test`. That test class will be discovered and run by `pytest`. It can also makes modifications if needed. - -```python -class TestSimpleMaterializations(BaseSimpleMaterializations): - pass -``` - -## Distribution - -To install: - -```sh -pip install dbt-tests-adapter -``` - -This package is versioned in lockstep with `dbt-core`, and [the same versioning guidelines](https://docs.getdbt.com/docs/core-versions) apply: -- New "basic" test cases MAY be added in minor versions ONLY. They may not be included in patch releases. -- Breaking changes to existing test cases MAY be included and communicated as part of minor version upgrades ONLY. They MAY NOT be included in patch releases. We will aim to avoid these whenever possible. -- New "optional" test cases, and non-breaking fixes to existing test cases, MAY be added in minor or patch versions. - -Assuming you adapter plugin is pinned to a specific minor version of `dbt-core` (e.g. `~=1.1.0`), you can use the same pin for `dbt-tests-adapter`. - -**Note:** This is packaged as a plugin using a python namespace package. It cannot have an `__init__.py` file in the part of the hierarchy to which it needs to be attached. diff --git a/tests/adapter/dbt/__init__.py b/tests/adapter/dbt/__init__.py deleted file mode 100644 index b36383a6102..00000000000 --- a/tests/adapter/dbt/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from pkgutil import extend_path - -__path__ = extend_path(__path__, __name__) diff --git a/tests/adapter/dbt/tests/adapter/__version__.py b/tests/adapter/dbt/tests/adapter/__version__.py deleted file mode 100644 index f15b401d12d..00000000000 --- a/tests/adapter/dbt/tests/adapter/__version__.py +++ /dev/null @@ -1 +0,0 @@ -version = "1.8.0a1" diff --git a/tests/adapter/dbt/tests/adapter/catalog/files.py b/tests/adapter/dbt/tests/adapter/catalog/files.py deleted file mode 100644 index 9c19522e7f9..00000000000 --- a/tests/adapter/dbt/tests/adapter/catalog/files.py +++ /dev/null @@ -1,33 +0,0 @@ -MY_SEED = """ -id,value,record_valid_date -1,100,2023-01-01 00:00:00 -2,200,2023-01-02 00:00:00 -3,300,2023-01-02 00:00:00 -""".strip() - - -MY_TABLE = """ -{{ config( - materialized='table', -) }} -select * -from {{ ref('my_seed') }} -""" - - -MY_VIEW = """ -{{ config( - materialized='view', -) }} -select * -from {{ ref('my_seed') }} -""" - - -MY_MATERIALIZED_VIEW = """ -{{ config( - materialized='materialized_view', -) }} -select * -from {{ ref('my_seed') }} -""" diff --git a/tests/adapter/dbt/tests/adapter/grants/base_grants.py b/tests/adapter/dbt/tests/adapter/grants/base_grants.py deleted file mode 100644 index 82f5b9fe664..00000000000 --- a/tests/adapter/dbt/tests/adapter/grants/base_grants.py +++ /dev/null @@ -1,58 +0,0 @@ -import pytest -import os -from dbt.tests.util import ( - relation_from_name, - get_connection, -) -from dbt.context.base import BaseContext # diff_of_two_dicts only - -TEST_USER_ENV_VARS = ["DBT_TEST_USER_1", "DBT_TEST_USER_2", "DBT_TEST_USER_3"] - - -def replace_all(text, dic): - for i, j in dic.items(): - text = text.replace(i, j) - return text - - -class BaseGrants: - def privilege_grantee_name_overrides(self): - # these privilege and grantee names are valid on most databases, but not all! - # looking at you, BigQuery - # optionally use this to map from "select" --> "other_select_name", "insert" --> ... - return { - "select": "select", - "insert": "insert", - "fake_privilege": "fake_privilege", - "invalid_user": "invalid_user", - } - - def interpolate_name_overrides(self, yaml_text): - return replace_all(yaml_text, self.privilege_grantee_name_overrides()) - - @pytest.fixture(scope="class", autouse=True) - def get_test_users(self, project): - test_users = [] - for env_var in TEST_USER_ENV_VARS: - user_name = os.getenv(env_var) - if user_name: - test_users.append(user_name) - return test_users - - def get_grants_on_relation(self, project, relation_name): - relation = relation_from_name(project.adapter, relation_name) - adapter = project.adapter - with get_connection(adapter): - kwargs = {"relation": relation} - show_grant_sql = adapter.execute_macro("get_show_grant_sql", kwargs=kwargs) - _, grant_table = adapter.execute(show_grant_sql, fetch=True) - actual_grants = adapter.standardize_grants_dict(grant_table) - return actual_grants - - def assert_expected_grants_match_actual(self, project, relation_name, expected_grants): - actual_grants = self.get_grants_on_relation(project, relation_name) - # need a case-insensitive comparison - # so just a simple "assert expected == actual_grants" won't work - diff_a = BaseContext.diff_of_two_dicts(actual_grants, expected_grants) - diff_b = BaseContext.diff_of_two_dicts(expected_grants, actual_grants) - assert diff_a == diff_b == {} diff --git a/tests/adapter/dbt/tests/adapter/grants/test_incremental_grants.py b/tests/adapter/dbt/tests/adapter/grants/test_incremental_grants.py deleted file mode 100644 index 2f28eac02ab..00000000000 --- a/tests/adapter/dbt/tests/adapter/grants/test_incremental_grants.py +++ /dev/null @@ -1,102 +0,0 @@ -import pytest -from dbt.tests.util import ( - run_dbt, - run_dbt_and_capture, - get_manifest, - write_file, - relation_from_name, - get_connection, -) -from dbt.tests.adapter.grants.base_grants import BaseGrants - -my_incremental_model_sql = """ - select 1 as fun -""" - -incremental_model_schema_yml = """ -version: 2 -models: - - name: my_incremental_model - config: - materialized: incremental - grants: - select: ["{{ env_var('DBT_TEST_USER_1') }}"] -""" - -user2_incremental_model_schema_yml = """ -version: 2 -models: - - name: my_incremental_model - config: - materialized: incremental - grants: - select: ["{{ env_var('DBT_TEST_USER_2') }}"] -""" - - -class BaseIncrementalGrants(BaseGrants): - @pytest.fixture(scope="class") - def models(self): - updated_schema = self.interpolate_name_overrides(incremental_model_schema_yml) - return { - "my_incremental_model.sql": my_incremental_model_sql, - "schema.yml": updated_schema, - } - - def test_incremental_grants(self, project, get_test_users): - # we want the test to fail, not silently skip - test_users = get_test_users - select_privilege_name = self.privilege_grantee_name_overrides()["select"] - assert len(test_users) == 3 - - # Incremental materialization, single select grant - (results, log_output) = run_dbt_and_capture(["--debug", "run"]) - assert len(results) == 1 - manifest = get_manifest(project.project_root) - model_id = "model.test.my_incremental_model" - model = manifest.nodes[model_id] - assert model.config.materialized == "incremental" - expected = {select_privilege_name: [test_users[0]]} - self.assert_expected_grants_match_actual(project, "my_incremental_model", expected) - - # Incremental materialization, run again without changes - (results, log_output) = run_dbt_and_capture(["--debug", "run"]) - assert len(results) == 1 - assert "revoke " not in log_output - assert "grant " not in log_output # with space to disambiguate from 'show grants' - self.assert_expected_grants_match_actual(project, "my_incremental_model", expected) - - # Incremental materialization, change select grant user - updated_yaml = self.interpolate_name_overrides(user2_incremental_model_schema_yml) - write_file(updated_yaml, project.project_root, "models", "schema.yml") - (results, log_output) = run_dbt_and_capture(["--debug", "run"]) - assert len(results) == 1 - assert "revoke " in log_output - manifest = get_manifest(project.project_root) - model = manifest.nodes[model_id] - assert model.config.materialized == "incremental" - expected = {select_privilege_name: [test_users[1]]} - self.assert_expected_grants_match_actual(project, "my_incremental_model", expected) - - # Incremental materialization, same config, now with --full-refresh - run_dbt(["--debug", "run", "--full-refresh"]) - assert len(results) == 1 - # whether grants or revokes happened will vary by adapter - self.assert_expected_grants_match_actual(project, "my_incremental_model", expected) - - # Now drop the schema (with the table in it) - adapter = project.adapter - relation = relation_from_name(adapter, "my_incremental_model") - with get_connection(adapter): - adapter.drop_schema(relation) - - # Incremental materialization, same config, rebuild now that table is missing - (results, log_output) = run_dbt_and_capture(["--debug", "run"]) - assert len(results) == 1 - assert "grant " in log_output - assert "revoke " not in log_output - self.assert_expected_grants_match_actual(project, "my_incremental_model", expected) - - -class TestIncrementalGrants(BaseIncrementalGrants): - pass diff --git a/tests/adapter/dbt/tests/adapter/grants/test_invalid_grants.py b/tests/adapter/dbt/tests/adapter/grants/test_invalid_grants.py deleted file mode 100644 index b16cedaac84..00000000000 --- a/tests/adapter/dbt/tests/adapter/grants/test_invalid_grants.py +++ /dev/null @@ -1,68 +0,0 @@ -import pytest -from dbt.tests.util import ( - run_dbt_and_capture, - write_file, -) -from dbt.tests.adapter.grants.base_grants import BaseGrants - -my_invalid_model_sql = """ - select 1 as fun -""" - -invalid_user_table_model_schema_yml = """ -version: 2 -models: - - name: my_invalid_model - config: - materialized: table - grants: - select: ['invalid_user'] -""" - -invalid_privilege_table_model_schema_yml = """ -version: 2 -models: - - name: my_invalid_model - config: - materialized: table - grants: - fake_privilege: ["{{ env_var('DBT_TEST_USER_2') }}"] -""" - - -class BaseInvalidGrants(BaseGrants): - # The purpose of this test is to understand the user experience when providing - # an invalid 'grants' configuration. dbt will *not* try to intercept or interpret - # the database's own error at runtime -- it will just return those error messages. - # Hopefully they're helpful! - - @pytest.fixture(scope="class") - def models(self): - return { - "my_invalid_model.sql": my_invalid_model_sql, - } - - # Adapters will need to reimplement these methods with the specific - # language of their database - def grantee_does_not_exist_error(self): - return "does not exist" - - def privilege_does_not_exist_error(self): - return "unrecognized privilege" - - def test_invalid_grants(self, project, get_test_users, logs_dir): - # failure when grant to a user/role that doesn't exist - yaml_file = self.interpolate_name_overrides(invalid_user_table_model_schema_yml) - write_file(yaml_file, project.project_root, "models", "schema.yml") - (results, log_output) = run_dbt_and_capture(["--debug", "run"], expect_pass=False) - assert self.grantee_does_not_exist_error() in log_output - - # failure when grant to a privilege that doesn't exist - yaml_file = self.interpolate_name_overrides(invalid_privilege_table_model_schema_yml) - write_file(yaml_file, project.project_root, "models", "schema.yml") - (results, log_output) = run_dbt_and_capture(["--debug", "run"], expect_pass=False) - assert self.privilege_does_not_exist_error() in log_output - - -class TestInvalidGrants(BaseInvalidGrants): - pass diff --git a/tests/adapter/dbt/tests/adapter/grants/test_model_grants.py b/tests/adapter/dbt/tests/adapter/grants/test_model_grants.py deleted file mode 100644 index db2fe379f5b..00000000000 --- a/tests/adapter/dbt/tests/adapter/grants/test_model_grants.py +++ /dev/null @@ -1,156 +0,0 @@ -import pytest -from dbt.tests.util import ( - run_dbt_and_capture, - get_manifest, - write_file, -) -from dbt.tests.adapter.grants.base_grants import BaseGrants - -my_model_sql = """ - select 1 as fun -""" - -model_schema_yml = """ -version: 2 -models: - - name: my_model - config: - grants: - select: ["{{ env_var('DBT_TEST_USER_1') }}"] -""" - -user2_model_schema_yml = """ -version: 2 -models: - - name: my_model - config: - grants: - select: ["{{ env_var('DBT_TEST_USER_2') }}"] -""" - -table_model_schema_yml = """ -version: 2 -models: - - name: my_model - config: - materialized: table - grants: - select: ["{{ env_var('DBT_TEST_USER_1') }}"] -""" - -user2_table_model_schema_yml = """ -version: 2 -models: - - name: my_model - config: - materialized: table - grants: - select: ["{{ env_var('DBT_TEST_USER_2') }}"] -""" - -multiple_users_table_model_schema_yml = """ -version: 2 -models: - - name: my_model - config: - materialized: table - grants: - select: ["{{ env_var('DBT_TEST_USER_1') }}", "{{ env_var('DBT_TEST_USER_2') }}"] -""" - -multiple_privileges_table_model_schema_yml = """ -version: 2 -models: - - name: my_model - config: - materialized: table - grants: - select: ["{{ env_var('DBT_TEST_USER_1') }}"] - insert: ["{{ env_var('DBT_TEST_USER_2') }}"] -""" - - -class BaseModelGrants(BaseGrants): - @pytest.fixture(scope="class") - def models(self): - updated_schema = self.interpolate_name_overrides(model_schema_yml) - return { - "my_model.sql": my_model_sql, - "schema.yml": updated_schema, - } - - def test_view_table_grants(self, project, get_test_users): - # we want the test to fail, not silently skip - test_users = get_test_users - select_privilege_name = self.privilege_grantee_name_overrides()["select"] - insert_privilege_name = self.privilege_grantee_name_overrides()["insert"] - assert len(test_users) == 3 - - # View materialization, single select grant - (results, log_output) = run_dbt_and_capture(["--debug", "run"]) - assert len(results) == 1 - manifest = get_manifest(project.project_root) - model_id = "model.test.my_model" - model = manifest.nodes[model_id] - expected = {select_privilege_name: [test_users[0]]} - assert model.config.grants == expected - assert model.config.materialized == "view" - self.assert_expected_grants_match_actual(project, "my_model", expected) - - # View materialization, change select grant user - updated_yaml = self.interpolate_name_overrides(user2_model_schema_yml) - write_file(updated_yaml, project.project_root, "models", "schema.yml") - (results, log_output) = run_dbt_and_capture(["--debug", "run"]) - assert len(results) == 1 - - expected = {select_privilege_name: [get_test_users[1]]} - self.assert_expected_grants_match_actual(project, "my_model", expected) - - # Table materialization, single select grant - updated_yaml = self.interpolate_name_overrides(table_model_schema_yml) - write_file(updated_yaml, project.project_root, "models", "schema.yml") - (results, log_output) = run_dbt_and_capture(["--debug", "run"]) - assert len(results) == 1 - manifest = get_manifest(project.project_root) - model_id = "model.test.my_model" - model = manifest.nodes[model_id] - assert model.config.materialized == "table" - expected = {select_privilege_name: [test_users[0]]} - self.assert_expected_grants_match_actual(project, "my_model", expected) - - # Table materialization, change select grant user - updated_yaml = self.interpolate_name_overrides(user2_table_model_schema_yml) - write_file(updated_yaml, project.project_root, "models", "schema.yml") - (results, log_output) = run_dbt_and_capture(["--debug", "run"]) - assert len(results) == 1 - manifest = get_manifest(project.project_root) - model = manifest.nodes[model_id] - assert model.config.materialized == "table" - expected = {select_privilege_name: [test_users[1]]} - self.assert_expected_grants_match_actual(project, "my_model", expected) - - # Table materialization, multiple grantees - updated_yaml = self.interpolate_name_overrides(multiple_users_table_model_schema_yml) - write_file(updated_yaml, project.project_root, "models", "schema.yml") - (results, log_output) = run_dbt_and_capture(["--debug", "run"]) - assert len(results) == 1 - manifest = get_manifest(project.project_root) - model = manifest.nodes[model_id] - assert model.config.materialized == "table" - expected = {select_privilege_name: [test_users[0], test_users[1]]} - self.assert_expected_grants_match_actual(project, "my_model", expected) - - # Table materialization, multiple privileges - updated_yaml = self.interpolate_name_overrides(multiple_privileges_table_model_schema_yml) - write_file(updated_yaml, project.project_root, "models", "schema.yml") - (results, log_output) = run_dbt_and_capture(["--debug", "run"]) - assert len(results) == 1 - manifest = get_manifest(project.project_root) - model = manifest.nodes[model_id] - assert model.config.materialized == "table" - expected = {select_privilege_name: [test_users[0]], insert_privilege_name: [test_users[1]]} - self.assert_expected_grants_match_actual(project, "my_model", expected) - - -class TestModelGrants(BaseModelGrants): - pass diff --git a/tests/adapter/dbt/tests/adapter/grants/test_seed_grants.py b/tests/adapter/dbt/tests/adapter/grants/test_seed_grants.py deleted file mode 100644 index aff20c65cad..00000000000 --- a/tests/adapter/dbt/tests/adapter/grants/test_seed_grants.py +++ /dev/null @@ -1,143 +0,0 @@ -import pytest -from dbt.tests.util import ( - run_dbt, - run_dbt_and_capture, - get_manifest, - write_file, -) -from dbt.tests.adapter.grants.base_grants import BaseGrants - -seeds__my_seed_csv = """ -id,name,some_date -1,Easton,1981-05-20T06:46:51 -2,Lillian,1978-09-03T18:10:33 -""".lstrip() - -schema_base_yml = """ -version: 2 -seeds: - - name: my_seed - config: - grants: - select: ["{{ env_var('DBT_TEST_USER_1') }}"] -""" - -user2_schema_base_yml = """ -version: 2 -seeds: - - name: my_seed - config: - grants: - select: ["{{ env_var('DBT_TEST_USER_2') }}"] -""" - -ignore_grants_yml = """ -version: 2 -seeds: - - name: my_seed - config: - grants: {} -""" - -zero_grants_yml = """ -version: 2 -seeds: - - name: my_seed - config: - grants: - select: [] -""" - - -class BaseSeedGrants(BaseGrants): - def seeds_support_partial_refresh(self): - return True - - @pytest.fixture(scope="class") - def seeds(self): - updated_schema = self.interpolate_name_overrides(schema_base_yml) - return { - "my_seed.csv": seeds__my_seed_csv, - "schema.yml": updated_schema, - } - - def test_seed_grants(self, project, get_test_users): - test_users = get_test_users - select_privilege_name = self.privilege_grantee_name_overrides()["select"] - - # seed command - (results, log_output) = run_dbt_and_capture(["--debug", "seed"]) - assert len(results) == 1 - manifest = get_manifest(project.project_root) - seed_id = "seed.test.my_seed" - seed = manifest.nodes[seed_id] - expected = {select_privilege_name: [test_users[0]]} - assert seed.config.grants == expected - assert "grant " in log_output - self.assert_expected_grants_match_actual(project, "my_seed", expected) - - # run it again, with no config changes - (results, log_output) = run_dbt_and_capture(["--debug", "seed"]) - assert len(results) == 1 - if self.seeds_support_partial_refresh(): - # grants carried over -- nothing should have changed - assert "revoke " not in log_output - assert "grant " not in log_output - else: - # seeds are always full-refreshed on this adapter, so we need to re-grant - assert "grant " in log_output - self.assert_expected_grants_match_actual(project, "my_seed", expected) - - # change the grantee, assert it updates - updated_yaml = self.interpolate_name_overrides(user2_schema_base_yml) - write_file(updated_yaml, project.project_root, "seeds", "schema.yml") - (results, log_output) = run_dbt_and_capture(["--debug", "seed"]) - assert len(results) == 1 - expected = {select_privilege_name: [test_users[1]]} - self.assert_expected_grants_match_actual(project, "my_seed", expected) - - # run it again, with --full-refresh, grants should be the same - run_dbt(["seed", "--full-refresh"]) - self.assert_expected_grants_match_actual(project, "my_seed", expected) - - # change config to 'grants: {}' -- should be completely ignored - updated_yaml = self.interpolate_name_overrides(ignore_grants_yml) - write_file(updated_yaml, project.project_root, "seeds", "schema.yml") - (results, log_output) = run_dbt_and_capture(["--debug", "seed"]) - assert len(results) == 1 - assert "revoke " not in log_output - assert "grant " not in log_output - manifest = get_manifest(project.project_root) - seed_id = "seed.test.my_seed" - seed = manifest.nodes[seed_id] - expected_config = {} - expected_actual = {select_privilege_name: [test_users[1]]} - assert seed.config.grants == expected_config - if self.seeds_support_partial_refresh(): - # ACTUAL grants will NOT match expected grants - self.assert_expected_grants_match_actual(project, "my_seed", expected_actual) - else: - # there should be ZERO grants on the seed - self.assert_expected_grants_match_actual(project, "my_seed", expected_config) - - # now run with ZERO grants -- all grants should be removed - # whether explicitly (revoke) or implicitly (recreated without any grants added on) - updated_yaml = self.interpolate_name_overrides(zero_grants_yml) - write_file(updated_yaml, project.project_root, "seeds", "schema.yml") - (results, log_output) = run_dbt_and_capture(["--debug", "seed"]) - assert len(results) == 1 - if self.seeds_support_partial_refresh(): - assert "revoke " in log_output - expected = {} - self.assert_expected_grants_match_actual(project, "my_seed", expected) - - # run it again -- dbt shouldn't try to grant or revoke anything - (results, log_output) = run_dbt_and_capture(["--debug", "seed"]) - assert len(results) == 1 - assert "revoke " not in log_output - assert "grant " not in log_output - self.assert_expected_grants_match_actual(project, "my_seed", expected) - - -class TestSeedGrants(BaseSeedGrants): - pass diff --git a/tests/adapter/dbt/tests/adapter/grants/test_snapshot_grants.py b/tests/adapter/dbt/tests/adapter/grants/test_snapshot_grants.py deleted file mode 100644 index 6bf69b3bb94..00000000000 --- a/tests/adapter/dbt/tests/adapter/grants/test_snapshot_grants.py +++ /dev/null @@ -1,78 +0,0 @@ -import pytest -from dbt.tests.util import ( - run_dbt, - run_dbt_and_capture, - get_manifest, - write_file, -) -from dbt.tests.adapter.grants.base_grants import BaseGrants - -my_snapshot_sql = """ -{% snapshot my_snapshot %} - {{ config( - check_cols='all', unique_key='id', strategy='check', - target_database=database, target_schema=schema - ) }} - select 1 as id, cast('blue' as {{ type_string() }}) as color -{% endsnapshot %} -""".strip() - -snapshot_schema_yml = """ -version: 2 -snapshots: - - name: my_snapshot - config: - grants: - select: ["{{ env_var('DBT_TEST_USER_1') }}"] -""" - -user2_snapshot_schema_yml = """ -version: 2 -snapshots: - - name: my_snapshot - config: - grants: - select: ["{{ env_var('DBT_TEST_USER_2') }}"] -""" - - -class BaseSnapshotGrants(BaseGrants): - @pytest.fixture(scope="class") - def snapshots(self): - return { - "my_snapshot.sql": my_snapshot_sql, - "schema.yml": self.interpolate_name_overrides(snapshot_schema_yml), - } - - def test_snapshot_grants(self, project, get_test_users): - test_users = get_test_users - select_privilege_name = self.privilege_grantee_name_overrides()["select"] - - # run the snapshot - results = run_dbt(["snapshot"]) - assert len(results) == 1 - manifest = get_manifest(project.project_root) - snapshot_id = "snapshot.test.my_snapshot" - snapshot = manifest.nodes[snapshot_id] - expected = {select_privilege_name: [test_users[0]]} - assert snapshot.config.grants == expected - self.assert_expected_grants_match_actual(project, "my_snapshot", expected) - - # run it again, nothing should have changed - (results, log_output) = run_dbt_and_capture(["--debug", "snapshot"]) - assert len(results) == 1 - assert "revoke " not in log_output - assert "grant " not in log_output - self.assert_expected_grants_match_actual(project, "my_snapshot", expected) - - # change the grantee, assert it updates - updated_yaml = self.interpolate_name_overrides(user2_snapshot_schema_yml) - write_file(updated_yaml, project.project_root, "snapshots", "schema.yml") - (results, log_output) = run_dbt_and_capture(["--debug", "snapshot"]) - assert len(results) == 1 - expected = {select_privilege_name: [test_users[1]]} - self.assert_expected_grants_match_actual(project, "my_snapshot", expected) - - -class TestSnapshotGrants(BaseSnapshotGrants): - pass diff --git a/tests/adapter/setup.py b/tests/adapter/setup.py deleted file mode 100644 index 1c5b1898648..00000000000 --- a/tests/adapter/setup.py +++ /dev/null @@ -1,57 +0,0 @@ -#!/usr/bin/env python -import os -import sys - -if sys.version_info < (3, 8): - print("Error: dbt does not support this version of Python.") - print("Please upgrade to Python 3.8 or higher.") - sys.exit(1) - - -from setuptools import setup - -try: - from setuptools import find_namespace_packages -except ImportError: - # the user has a downlevel version of setuptools. - print("Error: dbt requires setuptools v40.1.0 or higher.") - print('Please upgrade setuptools with "pip install --upgrade setuptools" ' "and try again") - sys.exit(1) - - -package_name = "dbt-tests-adapter" -package_version = "1.8.0a1" -description = """The dbt adapter tests for adapter plugins""" - -this_directory = os.path.abspath(os.path.dirname(__file__)) -with open(os.path.join(this_directory, "README.md")) as f: - long_description = f.read() - -setup( - name=package_name, - version=package_version, - description=description, - long_description=long_description, - long_description_content_type="text/markdown", - author="dbt Labs", - author_email="info@dbtlabs.com", - url="https://github.com/dbt-labs/dbt-core/tree/main/tests/adapter", - packages=find_namespace_packages(include=["dbt", "dbt.*"]), - install_requires=[ - "dbt-core=={}".format(package_version), - "pytest>=7.0.0", - ], - zip_safe=False, - classifiers=[ - "Development Status :: 5 - Production/Stable", - "License :: OSI Approved :: Apache Software License", - "Operating System :: Microsoft :: Windows", - "Operating System :: MacOS :: MacOS X", - "Operating System :: POSIX :: Linux", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - ], - python_requires=">=3.8", -) diff --git a/tests/functional/adapter/__init__.py b/tests/functional/adapter/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/functional/adapter/aliases/__init__.py b/tests/functional/adapter/aliases/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/adapter/dbt/tests/adapter/aliases/fixtures.py b/tests/functional/adapter/aliases/fixtures.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/aliases/fixtures.py rename to tests/functional/adapter/aliases/fixtures.py diff --git a/tests/adapter/dbt/tests/adapter/aliases/test_aliases.py b/tests/functional/adapter/aliases/test_aliases.py similarity index 98% rename from tests/adapter/dbt/tests/adapter/aliases/test_aliases.py rename to tests/functional/adapter/aliases/test_aliases.py index d9ff6b5b28f..d5f5b37152e 100644 --- a/tests/adapter/dbt/tests/adapter/aliases/test_aliases.py +++ b/tests/functional/adapter/aliases/test_aliases.py @@ -1,6 +1,6 @@ import pytest from dbt.tests.util import run_dbt -from dbt.tests.adapter.aliases.fixtures import ( +from tests.functional.adapter.aliases.fixtures import ( MACROS__CAST_SQL, MACROS__EXPECT_VALUE_SQL, MODELS__SCHEMA_YML, diff --git a/tests/adapter/dbt/tests/adapter/basic/__init__.py b/tests/functional/adapter/basic/__init__.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/basic/__init__.py rename to tests/functional/adapter/basic/__init__.py diff --git a/tests/adapter/dbt/tests/adapter/basic/expected_catalog.py b/tests/functional/adapter/basic/expected_catalog.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/basic/expected_catalog.py rename to tests/functional/adapter/basic/expected_catalog.py diff --git a/tests/adapter/dbt/tests/adapter/basic/files.py b/tests/functional/adapter/basic/files.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/basic/files.py rename to tests/functional/adapter/basic/files.py diff --git a/tests/adapter/dbt/tests/adapter/basic/test_adapter_methods.py b/tests/functional/adapter/basic/test_adapter_methods.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/basic/test_adapter_methods.py rename to tests/functional/adapter/basic/test_adapter_methods.py diff --git a/tests/adapter/dbt/tests/adapter/basic/test_base.py b/tests/functional/adapter/basic/test_base.py similarity index 98% rename from tests/adapter/dbt/tests/adapter/basic/test_base.py rename to tests/functional/adapter/basic/test_base.py index da71db6c69b..64edd03872e 100644 --- a/tests/adapter/dbt/tests/adapter/basic/test_base.py +++ b/tests/functional/adapter/basic/test_base.py @@ -6,7 +6,7 @@ check_relation_types, check_relations_equal, ) -from dbt.tests.adapter.basic.files import ( +from tests.functional.adapter.basic.files import ( seeds_base_csv, base_view_sql, base_table_sql, diff --git a/tests/adapter/dbt/tests/adapter/basic/test_docs_generate.py b/tests/functional/adapter/basic/test_docs_generate.py similarity index 97% rename from tests/adapter/dbt/tests/adapter/basic/test_docs_generate.py rename to tests/functional/adapter/basic/test_docs_generate.py index 78236a4f7dd..9b849160564 100644 --- a/tests/adapter/dbt/tests/adapter/basic/test_docs_generate.py +++ b/tests/functional/adapter/basic/test_docs_generate.py @@ -5,7 +5,7 @@ from dbt.tests.util import run_dbt, rm_file, get_artifact, check_datetime_between from dbt.tests.fixtures.project import write_project_files -from dbt.tests.adapter.basic.expected_catalog import ( +from tests.functional.adapter.basic.expected_catalog import ( base_expected_catalog, no_stats, expected_references_catalog, @@ -452,9 +452,9 @@ def test_run_and_generate(self, project, expected_catalog): verify_catalog(project, expected_catalog, start_time) # Check that assets have been copied to the target directory for use in the docs html page - assert os.path.exists(os.path.join(".", "target", "assets")) - assert os.path.exists(os.path.join(".", "target", "assets", "lorem-ipsum.txt")) - assert not os.path.exists(os.path.join(".", "target", "non-existent-assets")) + assert os.path.exists(os.path.join("", "target", "assets")) + assert os.path.exists(os.path.join("", "target", "assets", "lorem-ipsum.txt")) + assert not os.path.exists(os.path.join("", "target", "non-existent-assets")) class TestDocsGenerate(BaseDocsGenerate): diff --git a/tests/adapter/dbt/tests/adapter/basic/test_empty.py b/tests/functional/adapter/basic/test_empty.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/basic/test_empty.py rename to tests/functional/adapter/basic/test_empty.py diff --git a/tests/adapter/dbt/tests/adapter/basic/test_ephemeral.py b/tests/functional/adapter/basic/test_ephemeral.py similarity index 97% rename from tests/adapter/dbt/tests/adapter/basic/test_ephemeral.py rename to tests/functional/adapter/basic/test_ephemeral.py index 311d43651a9..65a9e5af727 100644 --- a/tests/adapter/dbt/tests/adapter/basic/test_ephemeral.py +++ b/tests/functional/adapter/basic/test_ephemeral.py @@ -7,7 +7,7 @@ check_result_nodes_by_name, relation_from_name, ) -from dbt.tests.adapter.basic.files import ( +from tests.functional.adapter.basic.files import ( seeds_base_csv, base_ephemeral_sql, ephemeral_view_sql, diff --git a/tests/adapter/dbt/tests/adapter/basic/test_generic_tests.py b/tests/functional/adapter/basic/test_generic_tests.py similarity index 97% rename from tests/adapter/dbt/tests/adapter/basic/test_generic_tests.py rename to tests/functional/adapter/basic/test_generic_tests.py index e2c0ef7c39a..0c71a197bb0 100644 --- a/tests/adapter/dbt/tests/adapter/basic/test_generic_tests.py +++ b/tests/functional/adapter/basic/test_generic_tests.py @@ -1,6 +1,6 @@ import pytest from dbt.tests.util import run_dbt -from dbt.tests.adapter.basic.files import ( +from tests.functional.adapter.basic.files import ( seeds_base_csv, generic_test_seed_yml, base_view_sql, diff --git a/tests/adapter/dbt/tests/adapter/basic/test_incremental.py b/tests/functional/adapter/basic/test_incremental.py similarity index 98% rename from tests/adapter/dbt/tests/adapter/basic/test_incremental.py rename to tests/functional/adapter/basic/test_incremental.py index 3e1b83794f6..4d94ff17d2d 100644 --- a/tests/adapter/dbt/tests/adapter/basic/test_incremental.py +++ b/tests/functional/adapter/basic/test_incremental.py @@ -1,7 +1,7 @@ import pytest from dbt.tests.util import run_dbt, check_relations_equal, relation_from_name from dbt.artifacts.schemas.results import RunStatus -from dbt.tests.adapter.basic.files import ( +from tests.functional.adapter.basic.files import ( seeds_base_csv, seeds_added_csv, schema_base_yml, diff --git a/tests/adapter/dbt/tests/adapter/basic/test_singular_tests.py b/tests/functional/adapter/basic/test_singular_tests.py similarity index 96% rename from tests/adapter/dbt/tests/adapter/basic/test_singular_tests.py rename to tests/functional/adapter/basic/test_singular_tests.py index 5b77c7e6f60..3f9071cc56e 100644 --- a/tests/adapter/dbt/tests/adapter/basic/test_singular_tests.py +++ b/tests/functional/adapter/basic/test_singular_tests.py @@ -1,5 +1,5 @@ import pytest -from dbt.tests.adapter.basic.files import ( +from tests.functional.adapter.basic.files import ( test_passing_sql, test_failing_sql, ) diff --git a/tests/adapter/dbt/tests/adapter/basic/test_singular_tests_ephemeral.py b/tests/functional/adapter/basic/test_singular_tests_ephemeral.py similarity index 97% rename from tests/adapter/dbt/tests/adapter/basic/test_singular_tests_ephemeral.py rename to tests/functional/adapter/basic/test_singular_tests_ephemeral.py index 21d234d5b92..6e2315d5fbd 100644 --- a/tests/adapter/dbt/tests/adapter/basic/test_singular_tests_ephemeral.py +++ b/tests/functional/adapter/basic/test_singular_tests_ephemeral.py @@ -1,7 +1,7 @@ import pytest from dbt.tests.util import run_dbt, check_result_nodes_by_name -from dbt.tests.adapter.basic.files import ( +from tests.functional.adapter.basic.files import ( seeds_base_csv, ephemeral_with_cte_sql, test_ephemeral_passing_sql, diff --git a/tests/adapter/dbt/tests/adapter/basic/test_snapshot_check_cols.py b/tests/functional/adapter/basic/test_snapshot_check_cols.py similarity index 98% rename from tests/adapter/dbt/tests/adapter/basic/test_snapshot_check_cols.py rename to tests/functional/adapter/basic/test_snapshot_check_cols.py index 26d5a1e651e..29eb35c3bea 100644 --- a/tests/adapter/dbt/tests/adapter/basic/test_snapshot_check_cols.py +++ b/tests/functional/adapter/basic/test_snapshot_check_cols.py @@ -1,6 +1,6 @@ import pytest from dbt.tests.util import run_dbt, update_rows, relation_from_name -from dbt.tests.adapter.basic.files import ( +from tests.functional.adapter.basic.files import ( seeds_base_csv, seeds_added_csv, cc_all_snapshot_sql, diff --git a/tests/adapter/dbt/tests/adapter/basic/test_snapshot_timestamp.py b/tests/functional/adapter/basic/test_snapshot_timestamp.py similarity index 98% rename from tests/adapter/dbt/tests/adapter/basic/test_snapshot_timestamp.py rename to tests/functional/adapter/basic/test_snapshot_timestamp.py index 47320c74919..7c227084336 100644 --- a/tests/adapter/dbt/tests/adapter/basic/test_snapshot_timestamp.py +++ b/tests/functional/adapter/basic/test_snapshot_timestamp.py @@ -1,6 +1,6 @@ import pytest from dbt.tests.util import run_dbt, relation_from_name, update_rows -from dbt.tests.adapter.basic.files import ( +from tests.functional.adapter.basic.files import ( seeds_base_csv, seeds_newcolumns_csv, seeds_added_csv, diff --git a/tests/adapter/dbt/tests/adapter/basic/test_table_materialization.py b/tests/functional/adapter/basic/test_table_materialization.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/basic/test_table_materialization.py rename to tests/functional/adapter/basic/test_table_materialization.py diff --git a/tests/adapter/dbt/tests/adapter/basic/test_validate_connection.py b/tests/functional/adapter/basic/test_validate_connection.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/basic/test_validate_connection.py rename to tests/functional/adapter/basic/test_validate_connection.py diff --git a/tests/functional/adapter/caching/__init__.py b/tests/functional/adapter/caching/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/adapter/dbt/tests/adapter/caching/test_caching.py b/tests/functional/adapter/caching/test_caching.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/caching/test_caching.py rename to tests/functional/adapter/caching/test_caching.py diff --git a/tests/functional/adapter/catalog/__init__.py b/tests/functional/adapter/catalog/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/functional/catalog_tests/files.py b/tests/functional/adapter/catalog/files.py similarity index 100% rename from tests/functional/catalog_tests/files.py rename to tests/functional/adapter/catalog/files.py diff --git a/tests/adapter/dbt/tests/adapter/catalog/relation_types.py b/tests/functional/adapter/catalog/relation_types.py similarity index 98% rename from tests/adapter/dbt/tests/adapter/catalog/relation_types.py rename to tests/functional/adapter/catalog/relation_types.py index ccad99fde39..bbb3bed0a72 100644 --- a/tests/adapter/dbt/tests/adapter/catalog/relation_types.py +++ b/tests/functional/adapter/catalog/relation_types.py @@ -2,7 +2,7 @@ from dbt.tests.util import run_dbt import pytest -from dbt.tests.adapter.catalog import files +from tests.functional.adapter.catalog import files class CatalogRelationTypes: diff --git a/tests/functional/adapter/column_types/__init__.py b/tests/functional/adapter/column_types/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/adapter/dbt/tests/adapter/column_types/fixtures.py b/tests/functional/adapter/column_types/fixtures.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/column_types/fixtures.py rename to tests/functional/adapter/column_types/fixtures.py diff --git a/tests/adapter/dbt/tests/adapter/column_types/test_column_types.py b/tests/functional/adapter/column_types/test_column_types.py similarity index 83% rename from tests/adapter/dbt/tests/adapter/column_types/test_column_types.py rename to tests/functional/adapter/column_types/test_column_types.py index cc213d36a4b..fd783a08ddc 100644 --- a/tests/adapter/dbt/tests/adapter/column_types/test_column_types.py +++ b/tests/functional/adapter/column_types/test_column_types.py @@ -1,6 +1,10 @@ import pytest from dbt.tests.util import run_dbt -from dbt.tests.adapter.column_types.fixtures import macro_test_is_type_sql, model_sql, schema_yml +from tests.functional.adapter.column_types.fixtures import ( + macro_test_is_type_sql, + model_sql, + schema_yml, +) class BaseColumnTypes: diff --git a/tests/functional/adapter/concurrency/__init__.py b/tests/functional/adapter/concurrency/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/adapter/dbt/tests/adapter/concurrency/test_concurrency.py b/tests/functional/adapter/concurrency/test_concurrency.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/concurrency/test_concurrency.py rename to tests/functional/adapter/concurrency/test_concurrency.py diff --git a/tests/functional/adapter/constraints/__init__.py b/tests/functional/adapter/constraints/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/adapter/dbt/tests/adapter/constraints/fixtures.py b/tests/functional/adapter/constraints/fixtures.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/constraints/fixtures.py rename to tests/functional/adapter/constraints/fixtures.py diff --git a/tests/adapter/dbt/tests/adapter/constraints/test_constraints.py b/tests/functional/adapter/constraints/test_constraints.py similarity index 99% rename from tests/adapter/dbt/tests/adapter/constraints/test_constraints.py rename to tests/functional/adapter/constraints/test_constraints.py index 28257d177db..894c451cda4 100644 --- a/tests/adapter/dbt/tests/adapter/constraints/test_constraints.py +++ b/tests/functional/adapter/constraints/test_constraints.py @@ -10,7 +10,7 @@ relation_from_name, ) -from dbt.tests.adapter.constraints.fixtures import ( +from tests.functional.adapter.constraints.fixtures import ( my_model_sql, my_incremental_model_sql, my_model_wrong_order_sql, diff --git a/tests/functional/adapter/dbt_clone/__init__.py b/tests/functional/adapter/dbt_clone/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/adapter/dbt/tests/adapter/dbt_clone/fixtures.py b/tests/functional/adapter/dbt_clone/fixtures.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/dbt_clone/fixtures.py rename to tests/functional/adapter/dbt_clone/fixtures.py diff --git a/tests/adapter/dbt/tests/adapter/dbt_clone/test_dbt_clone.py b/tests/functional/adapter/dbt_clone/test_dbt_clone.py similarity index 99% rename from tests/adapter/dbt/tests/adapter/dbt_clone/test_dbt_clone.py rename to tests/functional/adapter/dbt_clone/test_dbt_clone.py index 1b7db87698c..a602b3cb7f7 100644 --- a/tests/adapter/dbt/tests/adapter/dbt_clone/test_dbt_clone.py +++ b/tests/functional/adapter/dbt_clone/test_dbt_clone.py @@ -6,7 +6,7 @@ import pytest from dbt.exceptions import DbtRuntimeError -from dbt.tests.adapter.dbt_clone.fixtures import ( +from tests.functional.adapter.dbt_clone.fixtures import ( seed_csv, table_model_sql, view_model_sql, diff --git a/tests/functional/adapter/dbt_debug/__init__.py b/tests/functional/adapter/dbt_debug/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/adapter/dbt/tests/adapter/dbt_debug/test_dbt_debug.py b/tests/functional/adapter/dbt_debug/test_dbt_debug.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/dbt_debug/test_dbt_debug.py rename to tests/functional/adapter/dbt_debug/test_dbt_debug.py diff --git a/tests/functional/adapter/dbt_show/__init__.py b/tests/functional/adapter/dbt_show/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/adapter/dbt/tests/adapter/dbt_show/fixtures.py b/tests/functional/adapter/dbt_show/fixtures.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/dbt_show/fixtures.py rename to tests/functional/adapter/dbt_show/fixtures.py diff --git a/tests/adapter/dbt/tests/adapter/dbt_show/test_dbt_show.py b/tests/functional/adapter/dbt_show/test_dbt_show.py similarity index 96% rename from tests/adapter/dbt/tests/adapter/dbt_show/test_dbt_show.py rename to tests/functional/adapter/dbt_show/test_dbt_show.py index 419485cf650..4c5c1c18bac 100644 --- a/tests/adapter/dbt/tests/adapter/dbt_show/test_dbt_show.py +++ b/tests/functional/adapter/dbt_show/test_dbt_show.py @@ -1,7 +1,7 @@ import pytest from dbt.tests.util import run_dbt -from dbt.tests.adapter.dbt_show.fixtures import ( +from tests.functional.adapter.dbt_show.fixtures import ( models__sql_header, models__ephemeral_model, models__second_ephemeral_model, diff --git a/tests/functional/adapter/empty/__init__.py b/tests/functional/adapter/empty/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/adapter/dbt/tests/adapter/empty/test_empty.py b/tests/functional/adapter/empty/test_empty.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/empty/test_empty.py rename to tests/functional/adapter/empty/test_empty.py diff --git a/tests/functional/adapter/ephemeral/__init__.py b/tests/functional/adapter/ephemeral/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/adapter/dbt/tests/adapter/ephemeral/test_ephemeral.py b/tests/functional/adapter/ephemeral/test_ephemeral.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/ephemeral/test_ephemeral.py rename to tests/functional/adapter/ephemeral/test_ephemeral.py diff --git a/tests/functional/adapter/hooks/__init__.py b/tests/functional/adapter/hooks/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/adapter/dbt/tests/adapter/hooks/data/seed_model.sql b/tests/functional/adapter/hooks/data/seed_model.sql similarity index 100% rename from tests/adapter/dbt/tests/adapter/hooks/data/seed_model.sql rename to tests/functional/adapter/hooks/data/seed_model.sql diff --git a/tests/adapter/dbt/tests/adapter/hooks/data/seed_run.sql b/tests/functional/adapter/hooks/data/seed_run.sql similarity index 100% rename from tests/adapter/dbt/tests/adapter/hooks/data/seed_run.sql rename to tests/functional/adapter/hooks/data/seed_run.sql diff --git a/tests/adapter/dbt/tests/adapter/hooks/fixtures.py b/tests/functional/adapter/hooks/fixtures.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/hooks/fixtures.py rename to tests/functional/adapter/hooks/fixtures.py diff --git a/tests/adapter/dbt/tests/adapter/hooks/test_model_hooks.py b/tests/functional/adapter/hooks/test_model_hooks.py similarity index 99% rename from tests/adapter/dbt/tests/adapter/hooks/test_model_hooks.py rename to tests/functional/adapter/hooks/test_model_hooks.py index 7e1476252a3..90ba298054a 100644 --- a/tests/adapter/dbt/tests/adapter/hooks/test_model_hooks.py +++ b/tests/functional/adapter/hooks/test_model_hooks.py @@ -10,7 +10,7 @@ write_file, ) -from dbt.tests.adapter.hooks.fixtures import ( +from tests.functional.adapter.hooks.fixtures import ( models__hooked, models__hooks, models__hooks_configured, diff --git a/tests/adapter/dbt/tests/adapter/hooks/test_run_hooks.py b/tests/functional/adapter/hooks/test_run_hooks.py similarity index 99% rename from tests/adapter/dbt/tests/adapter/hooks/test_run_hooks.py rename to tests/functional/adapter/hooks/test_run_hooks.py index e508d152450..6671e8cee19 100644 --- a/tests/adapter/dbt/tests/adapter/hooks/test_run_hooks.py +++ b/tests/functional/adapter/hooks/test_run_hooks.py @@ -3,7 +3,7 @@ from pathlib import Path -from dbt.tests.adapter.hooks.fixtures import ( +from tests.functional.adapter.hooks.fixtures import ( macros__hook, macros__before_and_after, models__hooks, diff --git a/tests/functional/adapter/incremental/__init__.py b/tests/functional/adapter/incremental/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/adapter/dbt/tests/adapter/incremental/fixtures.py b/tests/functional/adapter/incremental/fixtures.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/incremental/fixtures.py rename to tests/functional/adapter/incremental/fixtures.py diff --git a/tests/adapter/dbt/tests/adapter/incremental/test_incremental_merge_exclude_columns.py b/tests/functional/adapter/incremental/test_incremental_merge_exclude_columns.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/incremental/test_incremental_merge_exclude_columns.py rename to tests/functional/adapter/incremental/test_incremental_merge_exclude_columns.py diff --git a/tests/adapter/dbt/tests/adapter/incremental/test_incremental_on_schema_change.py b/tests/functional/adapter/incremental/test_incremental_on_schema_change.py similarity index 98% rename from tests/adapter/dbt/tests/adapter/incremental/test_incremental_on_schema_change.py rename to tests/functional/adapter/incremental/test_incremental_on_schema_change.py index 4fbefbe7651..f5d415de3cd 100644 --- a/tests/adapter/dbt/tests/adapter/incremental/test_incremental_on_schema_change.py +++ b/tests/functional/adapter/incremental/test_incremental_on_schema_change.py @@ -5,7 +5,7 @@ run_dbt, ) -from dbt.tests.adapter.incremental.fixtures import ( +from tests.functional.adapter.incremental.fixtures import ( _MODELS__INCREMENTAL_SYNC_REMOVE_ONLY, _MODELS__INCREMENTAL_IGNORE, _MODELS__INCREMENTAL_SYNC_REMOVE_ONLY_TARGET, diff --git a/tests/adapter/dbt/tests/adapter/incremental/test_incremental_predicates.py b/tests/functional/adapter/incremental/test_incremental_predicates.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/incremental/test_incremental_predicates.py rename to tests/functional/adapter/incremental/test_incremental_predicates.py diff --git a/tests/adapter/dbt/tests/adapter/incremental/test_incremental_unique_id.py b/tests/functional/adapter/incremental/test_incremental_unique_id.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/incremental/test_incremental_unique_id.py rename to tests/functional/adapter/incremental/test_incremental_unique_id.py diff --git a/tests/functional/adapter/materialized_view/__init__.py b/tests/functional/adapter/materialized_view/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/adapter/dbt/tests/adapter/materialized_view/basic.py b/tests/functional/adapter/materialized_view/basic.py similarity index 99% rename from tests/adapter/dbt/tests/adapter/materialized_view/basic.py rename to tests/functional/adapter/materialized_view/basic.py index 9720945ba50..518522a4631 100644 --- a/tests/adapter/dbt/tests/adapter/materialized_view/basic.py +++ b/tests/functional/adapter/materialized_view/basic.py @@ -12,7 +12,7 @@ set_model_file, ) -from dbt.tests.adapter.materialized_view.files import ( +from tests.functional.adapter.materialized_view.files import ( MY_MATERIALIZED_VIEW, MY_SEED, MY_TABLE, diff --git a/tests/adapter/dbt/tests/adapter/materialized_view/changes.py b/tests/functional/adapter/materialized_view/changes.py similarity index 99% rename from tests/adapter/dbt/tests/adapter/materialized_view/changes.py rename to tests/functional/adapter/materialized_view/changes.py index 5e645ff999a..243b1e34995 100644 --- a/tests/adapter/dbt/tests/adapter/materialized_view/changes.py +++ b/tests/functional/adapter/materialized_view/changes.py @@ -13,7 +13,7 @@ set_model_file, ) -from dbt.tests.adapter.materialized_view.files import ( +from tests.functional.adapter.materialized_view.files import ( MY_MATERIALIZED_VIEW, MY_SEED, ) diff --git a/tests/adapter/dbt/tests/adapter/materialized_view/files.py b/tests/functional/adapter/materialized_view/files.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/materialized_view/files.py rename to tests/functional/adapter/materialized_view/files.py diff --git a/tests/functional/adapter/persist_docs/__init__.py b/tests/functional/adapter/persist_docs/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/adapter/dbt/tests/adapter/persist_docs/fixtures.py b/tests/functional/adapter/persist_docs/fixtures.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/persist_docs/fixtures.py rename to tests/functional/adapter/persist_docs/fixtures.py diff --git a/tests/adapter/dbt/tests/adapter/persist_docs/test_persist_docs.py b/tests/functional/adapter/persist_docs/test_persist_docs.py similarity index 99% rename from tests/adapter/dbt/tests/adapter/persist_docs/test_persist_docs.py rename to tests/functional/adapter/persist_docs/test_persist_docs.py index 99c0ef746f9..aa311eba9f2 100644 --- a/tests/adapter/dbt/tests/adapter/persist_docs/test_persist_docs.py +++ b/tests/functional/adapter/persist_docs/test_persist_docs.py @@ -4,7 +4,7 @@ from dbt.tests.util import run_dbt -from dbt.tests.adapter.persist_docs.fixtures import ( +from tests.functional.adapter.persist_docs.fixtures import ( _DOCS__MY_FUN_DOCS, _MODELS__MISSING_COLUMN, _MODELS__MODEL_USING_QUOTE_UTIL, diff --git a/tests/functional/adapter/python_model/__init__.py b/tests/functional/adapter/python_model/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/adapter/dbt/tests/adapter/python_model/test_python_model.py b/tests/functional/adapter/python_model/test_python_model.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/python_model/test_python_model.py rename to tests/functional/adapter/python_model/test_python_model.py diff --git a/tests/adapter/dbt/tests/adapter/python_model/test_spark.py b/tests/functional/adapter/python_model/test_spark.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/python_model/test_spark.py rename to tests/functional/adapter/python_model/test_spark.py diff --git a/tests/functional/adapter/query_comment/__init__.py b/tests/functional/adapter/query_comment/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/adapter/dbt/tests/adapter/query_comment/fixtures.py b/tests/functional/adapter/query_comment/fixtures.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/query_comment/fixtures.py rename to tests/functional/adapter/query_comment/fixtures.py diff --git a/tests/adapter/dbt/tests/adapter/query_comment/test_query_comment.py b/tests/functional/adapter/query_comment/test_query_comment.py similarity index 97% rename from tests/adapter/dbt/tests/adapter/query_comment/test_query_comment.py rename to tests/functional/adapter/query_comment/test_query_comment.py index 16d51b35e75..18d66ffda7d 100644 --- a/tests/adapter/dbt/tests/adapter/query_comment/test_query_comment.py +++ b/tests/functional/adapter/query_comment/test_query_comment.py @@ -3,7 +3,7 @@ from dbt.exceptions import DbtRuntimeError from dbt.version import __version__ as dbt_version from dbt.tests.util import run_dbt_and_capture -from dbt.tests.adapter.query_comment.fixtures import MACROS__MACRO_SQL, MODELS__X_SQL +from tests.functional.adapter.query_comment.fixtures import MACROS__MACRO_SQL, MODELS__X_SQL class BaseDefaultQueryComments: diff --git a/tests/functional/adapter/relations/__init__.py b/tests/functional/adapter/relations/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/adapter/dbt/tests/adapter/relations/test_changing_relation_type.py b/tests/functional/adapter/relations/test_changing_relation_type.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/relations/test_changing_relation_type.py rename to tests/functional/adapter/relations/test_changing_relation_type.py diff --git a/tests/adapter/dbt/tests/adapter/relations/test_dropping_schema_named.py b/tests/functional/adapter/relations/test_dropping_schema_named.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/relations/test_dropping_schema_named.py rename to tests/functional/adapter/relations/test_dropping_schema_named.py diff --git a/tests/functional/adapter/simple_copy/__init__.py b/tests/functional/adapter/simple_copy/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/adapter/dbt/tests/adapter/simple_copy/fixtures.py b/tests/functional/adapter/simple_copy/fixtures.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/simple_copy/fixtures.py rename to tests/functional/adapter/simple_copy/fixtures.py diff --git a/tests/adapter/dbt/tests/adapter/simple_copy/test_copy_uppercase.py b/tests/functional/adapter/simple_copy/test_copy_uppercase.py similarity index 97% rename from tests/adapter/dbt/tests/adapter/simple_copy/test_copy_uppercase.py rename to tests/functional/adapter/simple_copy/test_copy_uppercase.py index 92716e619d7..277f592fb0e 100644 --- a/tests/adapter/dbt/tests/adapter/simple_copy/test_copy_uppercase.py +++ b/tests/functional/adapter/simple_copy/test_copy_uppercase.py @@ -1,7 +1,7 @@ import pytest from dbt.tests.util import run_dbt, check_relations_equal -from dbt.tests.adapter.simple_copy.fixtures import ( +from tests.functional.adapter.simple_copy.fixtures import ( _PROPERTIES__SCHEMA_YML, _SEEDS__SEED_INITIAL, _MODELS__ADVANCED_INCREMENTAL, diff --git a/tests/adapter/dbt/tests/adapter/simple_copy/test_simple_copy.py b/tests/functional/adapter/simple_copy/test_simple_copy.py similarity index 98% rename from tests/adapter/dbt/tests/adapter/simple_copy/test_simple_copy.py rename to tests/functional/adapter/simple_copy/test_simple_copy.py index 0e436417873..77494b6b679 100644 --- a/tests/adapter/dbt/tests/adapter/simple_copy/test_simple_copy.py +++ b/tests/functional/adapter/simple_copy/test_simple_copy.py @@ -7,7 +7,7 @@ from dbt.tests.util import run_dbt, rm_file, write_file, check_relations_equal -from dbt.tests.adapter.simple_copy.fixtures import ( +from tests.functional.adapter.simple_copy.fixtures import ( _PROPERTIES__SCHEMA_YML, _SEEDS__SEED_INITIAL, _SEEDS__SEED_UPDATE, diff --git a/tests/functional/adapter/simple_seed/__init__.py b/tests/functional/adapter/simple_seed/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/adapter/dbt/tests/adapter/simple_seed/fixtures.py b/tests/functional/adapter/simple_seed/fixtures.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/simple_seed/fixtures.py rename to tests/functional/adapter/simple_seed/fixtures.py diff --git a/tests/adapter/dbt/tests/adapter/simple_seed/seed_bom.csv b/tests/functional/adapter/simple_seed/seed_bom.csv similarity index 100% rename from tests/adapter/dbt/tests/adapter/simple_seed/seed_bom.csv rename to tests/functional/adapter/simple_seed/seed_bom.csv diff --git a/tests/adapter/dbt/tests/adapter/simple_seed/seeds.py b/tests/functional/adapter/simple_seed/seeds.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/simple_seed/seeds.py rename to tests/functional/adapter/simple_seed/seeds.py diff --git a/tests/adapter/dbt/tests/adapter/simple_seed/test_seed.py b/tests/functional/adapter/simple_seed/test_seed.py similarity index 99% rename from tests/adapter/dbt/tests/adapter/simple_seed/test_seed.py rename to tests/functional/adapter/simple_seed/test_seed.py index 3e42cd4c0d0..d01f5fe3159 100644 --- a/tests/adapter/dbt/tests/adapter/simple_seed/test_seed.py +++ b/tests/functional/adapter/simple_seed/test_seed.py @@ -15,13 +15,13 @@ check_table_does_not_exist, ) -from dbt.tests.adapter.simple_seed.fixtures import ( +from tests.functional.adapter.simple_seed.fixtures import ( models__downstream_from_seed_actual, models__from_basic_seed, models__downstream_from_seed_pipe_separated, ) -from dbt.tests.adapter.simple_seed.seeds import ( +from tests.functional.adapter.simple_seed.seeds import ( seed__actual_csv, seeds__expected_sql, seeds__enabled_in_config_csv, diff --git a/tests/adapter/dbt/tests/adapter/simple_seed/test_seed_type_override.py b/tests/functional/adapter/simple_seed/test_seed_type_override.py similarity index 94% rename from tests/adapter/dbt/tests/adapter/simple_seed/test_seed_type_override.py rename to tests/functional/adapter/simple_seed/test_seed_type_override.py index bd6333e607c..438e0bf5047 100644 --- a/tests/adapter/dbt/tests/adapter/simple_seed/test_seed_type_override.py +++ b/tests/functional/adapter/simple_seed/test_seed_type_override.py @@ -2,12 +2,12 @@ from dbt.tests.util import run_dbt -from dbt.tests.adapter.simple_seed.fixtures import ( +from tests.functional.adapter.simple_seed.fixtures import ( macros__schema_test, properties__schema_yml, ) -from dbt.tests.adapter.simple_seed.seeds import ( +from tests.functional.adapter.simple_seed.seeds import ( seeds__enabled_in_config_csv, seeds__disabled_in_config_csv, seeds__tricky_csv, diff --git a/tests/functional/adapter/simple_snapshot/__init__.py b/tests/functional/adapter/simple_snapshot/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/adapter/dbt/tests/adapter/simple_snapshot/common.py b/tests/functional/adapter/simple_snapshot/common.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/simple_snapshot/common.py rename to tests/functional/adapter/simple_snapshot/common.py diff --git a/tests/adapter/dbt/tests/adapter/simple_snapshot/seeds.py b/tests/functional/adapter/simple_snapshot/seeds.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/simple_snapshot/seeds.py rename to tests/functional/adapter/simple_snapshot/seeds.py diff --git a/tests/adapter/dbt/tests/adapter/simple_snapshot/snapshots.py b/tests/functional/adapter/simple_snapshot/snapshots.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/simple_snapshot/snapshots.py rename to tests/functional/adapter/simple_snapshot/snapshots.py diff --git a/tests/adapter/dbt/tests/adapter/simple_snapshot/test_snapshot.py b/tests/functional/adapter/simple_snapshot/test_snapshot.py similarity index 98% rename from tests/adapter/dbt/tests/adapter/simple_snapshot/test_snapshot.py rename to tests/functional/adapter/simple_snapshot/test_snapshot.py index 75a2e2b3e28..e230c395ce1 100644 --- a/tests/adapter/dbt/tests/adapter/simple_snapshot/test_snapshot.py +++ b/tests/functional/adapter/simple_snapshot/test_snapshot.py @@ -3,8 +3,8 @@ from typing import Dict, List, Iterable from dbt.tests.util import run_dbt -from dbt.tests.adapter.simple_snapshot import common -from dbt.tests.adapter.simple_snapshot import seeds, snapshots +from tests.functional.adapter.simple_snapshot import common +from tests.functional.adapter.simple_snapshot import seeds, snapshots MODEL_FACT_SQL = """ {{ config(materialized="table") }} diff --git a/tests/functional/adapter/store_test_failures_tests/__init__.py b/tests/functional/adapter/store_test_failures_tests/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/adapter/dbt/tests/adapter/store_test_failures_tests/_files.py b/tests/functional/adapter/store_test_failures_tests/_files.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/store_test_failures_tests/_files.py rename to tests/functional/adapter/store_test_failures_tests/_files.py diff --git a/tests/adapter/dbt/tests/adapter/store_test_failures_tests/basic.py b/tests/functional/adapter/store_test_failures_tests/basic.py similarity index 99% rename from tests/adapter/dbt/tests/adapter/store_test_failures_tests/basic.py rename to tests/functional/adapter/store_test_failures_tests/basic.py index cd8f5a84691..3f5b8a29a53 100644 --- a/tests/adapter/dbt/tests/adapter/store_test_failures_tests/basic.py +++ b/tests/functional/adapter/store_test_failures_tests/basic.py @@ -6,7 +6,7 @@ from dbt.artifacts.schemas.results import TestStatus from dbt.tests.util import run_dbt, check_relation_types -from dbt.tests.adapter.store_test_failures_tests import _files +from tests.functional.adapter.store_test_failures_tests import _files TestResult = namedtuple("TestResult", ["name", "status", "type"]) diff --git a/tests/adapter/dbt/tests/adapter/store_test_failures_tests/fixtures.py b/tests/functional/adapter/store_test_failures_tests/fixtures.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/store_test_failures_tests/fixtures.py rename to tests/functional/adapter/store_test_failures_tests/fixtures.py diff --git a/tests/adapter/dbt/tests/adapter/store_test_failures_tests/test_store_test_failures.py b/tests/functional/adapter/store_test_failures_tests/test_store_test_failures.py similarity index 98% rename from tests/adapter/dbt/tests/adapter/store_test_failures_tests/test_store_test_failures.py rename to tests/functional/adapter/store_test_failures_tests/test_store_test_failures.py index f24d4733eb3..edf85105a67 100644 --- a/tests/adapter/dbt/tests/adapter/store_test_failures_tests/test_store_test_failures.py +++ b/tests/functional/adapter/store_test_failures_tests/test_store_test_failures.py @@ -5,7 +5,7 @@ run_dbt, ) -from dbt.tests.adapter.store_test_failures_tests.fixtures import ( +from tests.functional.adapter.store_test_failures_tests.fixtures import ( seeds__people, seeds__expected_accepted_values, seeds__expected_failing_test, diff --git a/tests/functional/adapter/unit_testing/__init__.py b/tests/functional/adapter/unit_testing/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/adapter/dbt/tests/adapter/unit_testing/test_unit_testing.py b/tests/functional/adapter/unit_testing/test_unit_testing.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/unit_testing/test_unit_testing.py rename to tests/functional/adapter/unit_testing/test_unit_testing.py diff --git a/tests/functional/adapter/utils/__init__.py b/tests/functional/adapter/utils/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/adapter/dbt/tests/adapter/utils/base_array_utils.py b/tests/functional/adapter/utils/base_array_utils.py similarity index 92% rename from tests/adapter/dbt/tests/adapter/utils/base_array_utils.py rename to tests/functional/adapter/utils/base_array_utils.py index 64147a7bd8b..4c75a8650cb 100644 --- a/tests/adapter/dbt/tests/adapter/utils/base_array_utils.py +++ b/tests/functional/adapter/utils/base_array_utils.py @@ -1,4 +1,4 @@ -from dbt.tests.adapter.utils.base_utils import BaseUtils +from tests.functional.adapter.utils.base_utils import BaseUtils from dbt.tests.util import run_dbt, check_relations_equal, get_relation_columns diff --git a/tests/adapter/dbt/tests/adapter/utils/base_utils.py b/tests/functional/adapter/utils/base_utils.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/utils/base_utils.py rename to tests/functional/adapter/utils/base_utils.py diff --git a/tests/functional/adapter/utils/data_types/__init__.py b/tests/functional/adapter/utils/data_types/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/adapter/dbt/tests/adapter/utils/data_types/base_data_type_macro.py b/tests/functional/adapter/utils/data_types/base_data_type_macro.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/utils/data_types/base_data_type_macro.py rename to tests/functional/adapter/utils/data_types/base_data_type_macro.py diff --git a/tests/adapter/dbt/tests/adapter/utils/data_types/test_type_bigint.py b/tests/functional/adapter/utils/data_types/test_type_bigint.py similarity index 84% rename from tests/adapter/dbt/tests/adapter/utils/data_types/test_type_bigint.py rename to tests/functional/adapter/utils/data_types/test_type_bigint.py index 80e3daa11d8..7b6d31882ba 100644 --- a/tests/adapter/dbt/tests/adapter/utils/data_types/test_type_bigint.py +++ b/tests/functional/adapter/utils/data_types/test_type_bigint.py @@ -1,5 +1,5 @@ import pytest -from dbt.tests.adapter.utils.data_types.base_data_type_macro import BaseDataTypeMacro +from tests.functional.adapter.utils.data_types.base_data_type_macro import BaseDataTypeMacro models__expected_sql = """ select 9223372036854775800 as bigint_col diff --git a/tests/adapter/dbt/tests/adapter/utils/data_types/test_type_boolean.py b/tests/functional/adapter/utils/data_types/test_type_boolean.py similarity index 84% rename from tests/adapter/dbt/tests/adapter/utils/data_types/test_type_boolean.py rename to tests/functional/adapter/utils/data_types/test_type_boolean.py index 3efd7874236..14ef64b1a15 100644 --- a/tests/adapter/dbt/tests/adapter/utils/data_types/test_type_boolean.py +++ b/tests/functional/adapter/utils/data_types/test_type_boolean.py @@ -1,5 +1,5 @@ import pytest -from dbt.tests.adapter.utils.data_types.base_data_type_macro import BaseDataTypeMacro +from tests.functional.adapter.utils.data_types.base_data_type_macro import BaseDataTypeMacro seeds__expected_csv = """boolean_col True diff --git a/tests/adapter/dbt/tests/adapter/utils/data_types/test_type_float.py b/tests/functional/adapter/utils/data_types/test_type_float.py similarity index 84% rename from tests/adapter/dbt/tests/adapter/utils/data_types/test_type_float.py rename to tests/functional/adapter/utils/data_types/test_type_float.py index 03231d04266..8f3077dbd1b 100644 --- a/tests/adapter/dbt/tests/adapter/utils/data_types/test_type_float.py +++ b/tests/functional/adapter/utils/data_types/test_type_float.py @@ -1,5 +1,5 @@ import pytest -from dbt.tests.adapter.utils.data_types.base_data_type_macro import BaseDataTypeMacro +from tests.functional.adapter.utils.data_types.base_data_type_macro import BaseDataTypeMacro seeds__expected_csv = """float_col 1.2345 diff --git a/tests/adapter/dbt/tests/adapter/utils/data_types/test_type_int.py b/tests/functional/adapter/utils/data_types/test_type_int.py similarity index 84% rename from tests/adapter/dbt/tests/adapter/utils/data_types/test_type_int.py rename to tests/functional/adapter/utils/data_types/test_type_int.py index 3721de4d217..cc93e6f0f4c 100644 --- a/tests/adapter/dbt/tests/adapter/utils/data_types/test_type_int.py +++ b/tests/functional/adapter/utils/data_types/test_type_int.py @@ -1,5 +1,5 @@ import pytest -from dbt.tests.adapter.utils.data_types.base_data_type_macro import BaseDataTypeMacro +from tests.functional.adapter.utils.data_types.base_data_type_macro import BaseDataTypeMacro seeds__expected_csv = """int_col 12345678 diff --git a/tests/adapter/dbt/tests/adapter/utils/data_types/test_type_numeric.py b/tests/functional/adapter/utils/data_types/test_type_numeric.py similarity index 90% rename from tests/adapter/dbt/tests/adapter/utils/data_types/test_type_numeric.py rename to tests/functional/adapter/utils/data_types/test_type_numeric.py index 031f1474577..0ba4d88b1fe 100644 --- a/tests/adapter/dbt/tests/adapter/utils/data_types/test_type_numeric.py +++ b/tests/functional/adapter/utils/data_types/test_type_numeric.py @@ -1,5 +1,5 @@ import pytest -from dbt.tests.adapter.utils.data_types.base_data_type_macro import BaseDataTypeMacro +from tests.functional.adapter.utils.data_types.base_data_type_macro import BaseDataTypeMacro seeds__expected_csv = """numeric_col 1.2345 diff --git a/tests/adapter/dbt/tests/adapter/utils/data_types/test_type_string.py b/tests/functional/adapter/utils/data_types/test_type_string.py similarity index 93% rename from tests/adapter/dbt/tests/adapter/utils/data_types/test_type_string.py rename to tests/functional/adapter/utils/data_types/test_type_string.py index 05e7b598cb0..14486e8e871 100644 --- a/tests/adapter/dbt/tests/adapter/utils/data_types/test_type_string.py +++ b/tests/functional/adapter/utils/data_types/test_type_string.py @@ -1,5 +1,5 @@ import pytest -from dbt.tests.adapter.utils.data_types.base_data_type_macro import BaseDataTypeMacro +from tests.functional.adapter.utils.data_types.base_data_type_macro import BaseDataTypeMacro seeds__expected_csv = """string_col "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum." diff --git a/tests/adapter/dbt/tests/adapter/utils/data_types/test_type_timestamp.py b/tests/functional/adapter/utils/data_types/test_type_timestamp.py similarity index 90% rename from tests/adapter/dbt/tests/adapter/utils/data_types/test_type_timestamp.py rename to tests/functional/adapter/utils/data_types/test_type_timestamp.py index 2c8d68999e3..030e664914e 100644 --- a/tests/adapter/dbt/tests/adapter/utils/data_types/test_type_timestamp.py +++ b/tests/functional/adapter/utils/data_types/test_type_timestamp.py @@ -1,5 +1,5 @@ import pytest -from dbt.tests.adapter.utils.data_types.base_data_type_macro import BaseDataTypeMacro +from tests.functional.adapter.utils.data_types.base_data_type_macro import BaseDataTypeMacro seeds__expected_csv = """timestamp_col 2021-01-01 01:01:01 diff --git a/tests/adapter/dbt/tests/adapter/utils/fixture_any_value.py b/tests/functional/adapter/utils/fixture_any_value.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/utils/fixture_any_value.py rename to tests/functional/adapter/utils/fixture_any_value.py diff --git a/tests/adapter/dbt/tests/adapter/utils/fixture_array_append.py b/tests/functional/adapter/utils/fixture_array_append.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/utils/fixture_array_append.py rename to tests/functional/adapter/utils/fixture_array_append.py diff --git a/tests/adapter/dbt/tests/adapter/utils/fixture_array_concat.py b/tests/functional/adapter/utils/fixture_array_concat.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/utils/fixture_array_concat.py rename to tests/functional/adapter/utils/fixture_array_concat.py diff --git a/tests/adapter/dbt/tests/adapter/utils/fixture_array_construct.py b/tests/functional/adapter/utils/fixture_array_construct.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/utils/fixture_array_construct.py rename to tests/functional/adapter/utils/fixture_array_construct.py diff --git a/tests/adapter/dbt/tests/adapter/utils/fixture_bool_or.py b/tests/functional/adapter/utils/fixture_bool_or.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/utils/fixture_bool_or.py rename to tests/functional/adapter/utils/fixture_bool_or.py diff --git a/tests/adapter/dbt/tests/adapter/utils/fixture_cast_bool_to_text.py b/tests/functional/adapter/utils/fixture_cast_bool_to_text.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/utils/fixture_cast_bool_to_text.py rename to tests/functional/adapter/utils/fixture_cast_bool_to_text.py diff --git a/tests/adapter/dbt/tests/adapter/utils/fixture_concat.py b/tests/functional/adapter/utils/fixture_concat.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/utils/fixture_concat.py rename to tests/functional/adapter/utils/fixture_concat.py diff --git a/tests/adapter/dbt/tests/adapter/utils/fixture_date_spine.py b/tests/functional/adapter/utils/fixture_date_spine.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/utils/fixture_date_spine.py rename to tests/functional/adapter/utils/fixture_date_spine.py diff --git a/tests/adapter/dbt/tests/adapter/utils/fixture_date_trunc.py b/tests/functional/adapter/utils/fixture_date_trunc.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/utils/fixture_date_trunc.py rename to tests/functional/adapter/utils/fixture_date_trunc.py diff --git a/tests/adapter/dbt/tests/adapter/utils/fixture_dateadd.py b/tests/functional/adapter/utils/fixture_dateadd.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/utils/fixture_dateadd.py rename to tests/functional/adapter/utils/fixture_dateadd.py diff --git a/tests/adapter/dbt/tests/adapter/utils/fixture_datediff.py b/tests/functional/adapter/utils/fixture_datediff.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/utils/fixture_datediff.py rename to tests/functional/adapter/utils/fixture_datediff.py diff --git a/tests/adapter/dbt/tests/adapter/utils/fixture_equals.py b/tests/functional/adapter/utils/fixture_equals.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/utils/fixture_equals.py rename to tests/functional/adapter/utils/fixture_equals.py diff --git a/tests/adapter/dbt/tests/adapter/utils/fixture_escape_single_quotes.py b/tests/functional/adapter/utils/fixture_escape_single_quotes.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/utils/fixture_escape_single_quotes.py rename to tests/functional/adapter/utils/fixture_escape_single_quotes.py diff --git a/tests/adapter/dbt/tests/adapter/utils/fixture_except.py b/tests/functional/adapter/utils/fixture_except.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/utils/fixture_except.py rename to tests/functional/adapter/utils/fixture_except.py diff --git a/tests/adapter/dbt/tests/adapter/utils/fixture_generate_series.py b/tests/functional/adapter/utils/fixture_generate_series.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/utils/fixture_generate_series.py rename to tests/functional/adapter/utils/fixture_generate_series.py diff --git a/tests/adapter/dbt/tests/adapter/utils/fixture_get_intervals_between.py b/tests/functional/adapter/utils/fixture_get_intervals_between.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/utils/fixture_get_intervals_between.py rename to tests/functional/adapter/utils/fixture_get_intervals_between.py diff --git a/tests/adapter/dbt/tests/adapter/utils/fixture_get_powers_of_two.py b/tests/functional/adapter/utils/fixture_get_powers_of_two.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/utils/fixture_get_powers_of_two.py rename to tests/functional/adapter/utils/fixture_get_powers_of_two.py diff --git a/tests/adapter/dbt/tests/adapter/utils/fixture_hash.py b/tests/functional/adapter/utils/fixture_hash.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/utils/fixture_hash.py rename to tests/functional/adapter/utils/fixture_hash.py diff --git a/tests/adapter/dbt/tests/adapter/utils/fixture_intersect.py b/tests/functional/adapter/utils/fixture_intersect.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/utils/fixture_intersect.py rename to tests/functional/adapter/utils/fixture_intersect.py diff --git a/tests/adapter/dbt/tests/adapter/utils/fixture_last_day.py b/tests/functional/adapter/utils/fixture_last_day.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/utils/fixture_last_day.py rename to tests/functional/adapter/utils/fixture_last_day.py diff --git a/tests/adapter/dbt/tests/adapter/utils/fixture_length.py b/tests/functional/adapter/utils/fixture_length.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/utils/fixture_length.py rename to tests/functional/adapter/utils/fixture_length.py diff --git a/tests/adapter/dbt/tests/adapter/utils/fixture_listagg.py b/tests/functional/adapter/utils/fixture_listagg.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/utils/fixture_listagg.py rename to tests/functional/adapter/utils/fixture_listagg.py diff --git a/tests/adapter/dbt/tests/adapter/utils/fixture_null_compare.py b/tests/functional/adapter/utils/fixture_null_compare.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/utils/fixture_null_compare.py rename to tests/functional/adapter/utils/fixture_null_compare.py diff --git a/tests/adapter/dbt/tests/adapter/utils/fixture_position.py b/tests/functional/adapter/utils/fixture_position.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/utils/fixture_position.py rename to tests/functional/adapter/utils/fixture_position.py diff --git a/tests/adapter/dbt/tests/adapter/utils/fixture_replace.py b/tests/functional/adapter/utils/fixture_replace.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/utils/fixture_replace.py rename to tests/functional/adapter/utils/fixture_replace.py diff --git a/tests/adapter/dbt/tests/adapter/utils/fixture_right.py b/tests/functional/adapter/utils/fixture_right.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/utils/fixture_right.py rename to tests/functional/adapter/utils/fixture_right.py diff --git a/tests/adapter/dbt/tests/adapter/utils/fixture_safe_cast.py b/tests/functional/adapter/utils/fixture_safe_cast.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/utils/fixture_safe_cast.py rename to tests/functional/adapter/utils/fixture_safe_cast.py diff --git a/tests/adapter/dbt/tests/adapter/utils/fixture_split_part.py b/tests/functional/adapter/utils/fixture_split_part.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/utils/fixture_split_part.py rename to tests/functional/adapter/utils/fixture_split_part.py diff --git a/tests/adapter/dbt/tests/adapter/utils/fixture_string_literal.py b/tests/functional/adapter/utils/fixture_string_literal.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/utils/fixture_string_literal.py rename to tests/functional/adapter/utils/fixture_string_literal.py diff --git a/tests/adapter/dbt/tests/adapter/utils/test_any_value.py b/tests/functional/adapter/utils/test_any_value.py similarity index 85% rename from tests/adapter/dbt/tests/adapter/utils/test_any_value.py rename to tests/functional/adapter/utils/test_any_value.py index 38d437ee274..4360c537e73 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_any_value.py +++ b/tests/functional/adapter/utils/test_any_value.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.adapter.utils.base_utils import BaseUtils -from dbt.tests.adapter.utils.fixture_any_value import ( +from tests.functional.adapter.utils.base_utils import BaseUtils +from tests.functional.adapter.utils.fixture_any_value import ( seeds__data_any_value_csv, seeds__data_any_value_expected_csv, models__test_any_value_sql, diff --git a/tests/adapter/dbt/tests/adapter/utils/test_array_append.py b/tests/functional/adapter/utils/test_array_append.py similarity index 73% rename from tests/adapter/dbt/tests/adapter/utils/test_array_append.py rename to tests/functional/adapter/utils/test_array_append.py index 6b295327a9c..52d9bd590f2 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_array_append.py +++ b/tests/functional/adapter/utils/test_array_append.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.adapter.utils.base_array_utils import BaseArrayUtils -from dbt.tests.adapter.utils.fixture_array_append import ( +from tests.functional.adapter.utils.base_array_utils import BaseArrayUtils +from tests.functional.adapter.utils.fixture_array_append import ( models__array_append_actual_sql, models__array_append_expected_sql, ) diff --git a/tests/adapter/dbt/tests/adapter/utils/test_array_concat.py b/tests/functional/adapter/utils/test_array_concat.py similarity index 73% rename from tests/adapter/dbt/tests/adapter/utils/test_array_concat.py rename to tests/functional/adapter/utils/test_array_concat.py index d50540cc0d4..61a0652c220 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_array_concat.py +++ b/tests/functional/adapter/utils/test_array_concat.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.adapter.utils.base_array_utils import BaseArrayUtils -from dbt.tests.adapter.utils.fixture_array_concat import ( +from tests.functional.adapter.utils.base_array_utils import BaseArrayUtils +from tests.functional.adapter.utils.fixture_array_concat import ( models__array_concat_actual_sql, models__array_concat_expected_sql, ) diff --git a/tests/adapter/dbt/tests/adapter/utils/test_array_construct.py b/tests/functional/adapter/utils/test_array_construct.py similarity index 74% rename from tests/adapter/dbt/tests/adapter/utils/test_array_construct.py rename to tests/functional/adapter/utils/test_array_construct.py index e0e3df04cbb..6543b733449 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_array_construct.py +++ b/tests/functional/adapter/utils/test_array_construct.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.adapter.utils.base_array_utils import BaseArrayUtils -from dbt.tests.adapter.utils.fixture_array_construct import ( +from tests.functional.adapter.utils.base_array_utils import BaseArrayUtils +from tests.functional.adapter.utils.fixture_array_construct import ( models__array_construct_actual_sql, models__array_construct_expected_sql, ) diff --git a/tests/adapter/dbt/tests/adapter/utils/test_bool_or.py b/tests/functional/adapter/utils/test_bool_or.py similarity index 84% rename from tests/adapter/dbt/tests/adapter/utils/test_bool_or.py rename to tests/functional/adapter/utils/test_bool_or.py index 40be6b0fc7c..e360ca56a56 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_bool_or.py +++ b/tests/functional/adapter/utils/test_bool_or.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.adapter.utils.base_utils import BaseUtils -from dbt.tests.adapter.utils.fixture_bool_or import ( +from tests.functional.adapter.utils.base_utils import BaseUtils +from tests.functional.adapter.utils.fixture_bool_or import ( seeds__data_bool_or_csv, seeds__data_bool_or_expected_csv, models__test_bool_or_sql, diff --git a/tests/adapter/dbt/tests/adapter/utils/test_cast_bool_to_text.py b/tests/functional/adapter/utils/test_cast_bool_to_text.py similarity index 79% rename from tests/adapter/dbt/tests/adapter/utils/test_cast_bool_to_text.py rename to tests/functional/adapter/utils/test_cast_bool_to_text.py index ecfd0b45353..728caf1ce89 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_cast_bool_to_text.py +++ b/tests/functional/adapter/utils/test_cast_bool_to_text.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.adapter.utils.base_utils import BaseUtils -from dbt.tests.adapter.utils.fixture_cast_bool_to_text import ( +from tests.functional.adapter.utils.base_utils import BaseUtils +from tests.functional.adapter.utils.fixture_cast_bool_to_text import ( models__test_cast_bool_to_text_sql, models__test_cast_bool_to_text_yml, ) diff --git a/tests/adapter/dbt/tests/adapter/utils/test_concat.py b/tests/functional/adapter/utils/test_concat.py similarity index 81% rename from tests/adapter/dbt/tests/adapter/utils/test_concat.py rename to tests/functional/adapter/utils/test_concat.py index 03a5159148f..766ef1790e2 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_concat.py +++ b/tests/functional/adapter/utils/test_concat.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.adapter.utils.base_utils import BaseUtils -from dbt.tests.adapter.utils.fixture_concat import ( +from tests.functional.adapter.utils.base_utils import BaseUtils +from tests.functional.adapter.utils.fixture_concat import ( seeds__data_concat_csv, models__test_concat_sql, models__test_concat_yml, diff --git a/tests/adapter/dbt/tests/adapter/utils/test_current_timestamp.py b/tests/functional/adapter/utils/test_current_timestamp.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/utils/test_current_timestamp.py rename to tests/functional/adapter/utils/test_current_timestamp.py diff --git a/tests/adapter/dbt/tests/adapter/utils/test_date_spine.py b/tests/functional/adapter/utils/test_date_spine.py similarity index 77% rename from tests/adapter/dbt/tests/adapter/utils/test_date_spine.py rename to tests/functional/adapter/utils/test_date_spine.py index 0a5d7b7d29f..0b4b3b95279 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_date_spine.py +++ b/tests/functional/adapter/utils/test_date_spine.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.adapter.utils.base_utils import BaseUtils -from dbt.tests.adapter.utils.fixture_date_spine import ( +from tests.functional.adapter.utils.base_utils import BaseUtils +from tests.functional.adapter.utils.fixture_date_spine import ( models__test_date_spine_sql, models__test_date_spine_yml, ) diff --git a/tests/adapter/dbt/tests/adapter/utils/test_date_trunc.py b/tests/functional/adapter/utils/test_date_trunc.py similarity index 82% rename from tests/adapter/dbt/tests/adapter/utils/test_date_trunc.py rename to tests/functional/adapter/utils/test_date_trunc.py index 946aa010ac3..977a2845efb 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_date_trunc.py +++ b/tests/functional/adapter/utils/test_date_trunc.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.adapter.utils.base_utils import BaseUtils -from dbt.tests.adapter.utils.fixture_date_trunc import ( +from tests.functional.adapter.utils.base_utils import BaseUtils +from tests.functional.adapter.utils.fixture_date_trunc import ( seeds__data_date_trunc_csv, models__test_date_trunc_sql, models__test_date_trunc_yml, diff --git a/tests/adapter/dbt/tests/adapter/utils/test_dateadd.py b/tests/functional/adapter/utils/test_dateadd.py similarity index 90% rename from tests/adapter/dbt/tests/adapter/utils/test_dateadd.py rename to tests/functional/adapter/utils/test_dateadd.py index cb645121c1c..01171bff3fd 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_dateadd.py +++ b/tests/functional/adapter/utils/test_dateadd.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.adapter.utils.base_utils import BaseUtils -from dbt.tests.adapter.utils.fixture_dateadd import ( +from tests.functional.adapter.utils.base_utils import BaseUtils +from tests.functional.adapter.utils.fixture_dateadd import ( seeds__data_dateadd_csv, models__test_dateadd_sql, models__test_dateadd_yml, diff --git a/tests/adapter/dbt/tests/adapter/utils/test_datediff.py b/tests/functional/adapter/utils/test_datediff.py similarity index 82% rename from tests/adapter/dbt/tests/adapter/utils/test_datediff.py rename to tests/functional/adapter/utils/test_datediff.py index b366e7a4587..291e5788997 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_datediff.py +++ b/tests/functional/adapter/utils/test_datediff.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.adapter.utils.base_utils import BaseUtils -from dbt.tests.adapter.utils.fixture_datediff import ( +from tests.functional.adapter.utils.base_utils import BaseUtils +from tests.functional.adapter.utils.fixture_datediff import ( seeds__data_datediff_csv, models__test_datediff_sql, models__test_datediff_yml, diff --git a/tests/adapter/dbt/tests/adapter/utils/test_equals.py b/tests/functional/adapter/utils/test_equals.py similarity index 91% rename from tests/adapter/dbt/tests/adapter/utils/test_equals.py rename to tests/functional/adapter/utils/test_equals.py index 51e7fe84bd3..4fdf5e1eae8 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_equals.py +++ b/tests/functional/adapter/utils/test_equals.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.adapter.utils.base_utils import macros__equals_sql -from dbt.tests.adapter.utils.fixture_equals import ( +from tests.functional.adapter.utils.base_utils import macros__equals_sql +from tests.functional.adapter.utils.fixture_equals import ( SEEDS__DATA_EQUALS_CSV, MODELS__EQUAL_VALUES_SQL, MODELS__NOT_EQUAL_VALUES_SQL, diff --git a/tests/adapter/dbt/tests/adapter/utils/test_escape_single_quotes.py b/tests/functional/adapter/utils/test_escape_single_quotes.py similarity index 88% rename from tests/adapter/dbt/tests/adapter/utils/test_escape_single_quotes.py rename to tests/functional/adapter/utils/test_escape_single_quotes.py index 1b0af36b0f8..5847b4cca59 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_escape_single_quotes.py +++ b/tests/functional/adapter/utils/test_escape_single_quotes.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.adapter.utils.base_utils import BaseUtils -from dbt.tests.adapter.utils.fixture_escape_single_quotes import ( +from tests.functional.adapter.utils.base_utils import BaseUtils +from tests.functional.adapter.utils.fixture_escape_single_quotes import ( models__test_escape_single_quotes_quote_sql, models__test_escape_single_quotes_backslash_sql, models__test_escape_single_quotes_yml, diff --git a/tests/adapter/dbt/tests/adapter/utils/test_except.py b/tests/functional/adapter/utils/test_except.py similarity index 96% rename from tests/adapter/dbt/tests/adapter/utils/test_except.py rename to tests/functional/adapter/utils/test_except.py index 2c058e91c2c..b34b7f5569c 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_except.py +++ b/tests/functional/adapter/utils/test_except.py @@ -1,7 +1,7 @@ import pytest from dbt.tests.util import run_dbt, check_relations_equal -from dbt.tests.adapter.utils.base_utils import BaseUtils -from dbt.tests.adapter.utils.fixture_except import ( +from tests.functional.adapter.utils.base_utils import BaseUtils +from tests.functional.adapter.utils.fixture_except import ( seeds__data_except_a_csv, seeds__data_except_b_csv, seeds__data_except_a_minus_b_csv, diff --git a/tests/adapter/dbt/tests/adapter/utils/test_generate_series.py b/tests/functional/adapter/utils/test_generate_series.py similarity index 79% rename from tests/adapter/dbt/tests/adapter/utils/test_generate_series.py rename to tests/functional/adapter/utils/test_generate_series.py index afc8d77dd3b..402adb4964a 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_generate_series.py +++ b/tests/functional/adapter/utils/test_generate_series.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.adapter.utils.base_utils import BaseUtils -from dbt.tests.adapter.utils.fixture_generate_series import ( +from tests.functional.adapter.utils.base_utils import BaseUtils +from tests.functional.adapter.utils.fixture_generate_series import ( models__test_generate_series_sql, models__test_generate_series_yml, ) diff --git a/tests/adapter/dbt/tests/adapter/utils/test_get_intervals_between.py b/tests/functional/adapter/utils/test_get_intervals_between.py similarity index 80% rename from tests/adapter/dbt/tests/adapter/utils/test_get_intervals_between.py rename to tests/functional/adapter/utils/test_get_intervals_between.py index 588d2c538d7..b21087ed795 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_get_intervals_between.py +++ b/tests/functional/adapter/utils/test_get_intervals_between.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.adapter.utils.base_utils import BaseUtils -from dbt.tests.adapter.utils.fixture_get_intervals_between import ( +from tests.functional.adapter.utils.base_utils import BaseUtils +from tests.functional.adapter.utils.fixture_get_intervals_between import ( models__test_get_intervals_between_sql, models__test_get_intervals_between_yml, ) diff --git a/tests/adapter/dbt/tests/adapter/utils/test_get_powers_of_two.py b/tests/functional/adapter/utils/test_get_powers_of_two.py similarity index 79% rename from tests/adapter/dbt/tests/adapter/utils/test_get_powers_of_two.py rename to tests/functional/adapter/utils/test_get_powers_of_two.py index aa6f4d1a196..26842140a12 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_get_powers_of_two.py +++ b/tests/functional/adapter/utils/test_get_powers_of_two.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.adapter.utils.base_utils import BaseUtils -from dbt.tests.adapter.utils.fixture_get_powers_of_two import ( +from tests.functional.adapter.utils.base_utils import BaseUtils +from tests.functional.adapter.utils.fixture_get_powers_of_two import ( models__test_get_powers_of_two_sql, models__test_get_powers_of_two_yml, ) diff --git a/tests/adapter/dbt/tests/adapter/utils/test_hash.py b/tests/functional/adapter/utils/test_hash.py similarity index 80% rename from tests/adapter/dbt/tests/adapter/utils/test_hash.py rename to tests/functional/adapter/utils/test_hash.py index d1a12ebab75..dc5a5c0e075 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_hash.py +++ b/tests/functional/adapter/utils/test_hash.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.adapter.utils.base_utils import BaseUtils -from dbt.tests.adapter.utils.fixture_hash import ( +from tests.functional.adapter.utils.base_utils import BaseUtils +from tests.functional.adapter.utils.fixture_hash import ( seeds__data_hash_csv, models__test_hash_sql, models__test_hash_yml, diff --git a/tests/adapter/dbt/tests/adapter/utils/test_intersect.py b/tests/functional/adapter/utils/test_intersect.py similarity index 96% rename from tests/adapter/dbt/tests/adapter/utils/test_intersect.py rename to tests/functional/adapter/utils/test_intersect.py index 737e317c6f2..2d6221b654d 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_intersect.py +++ b/tests/functional/adapter/utils/test_intersect.py @@ -1,7 +1,7 @@ import pytest from dbt.tests.util import run_dbt, check_relations_equal -from dbt.tests.adapter.utils.base_utils import BaseUtils -from dbt.tests.adapter.utils.fixture_intersect import ( +from tests.functional.adapter.utils.base_utils import BaseUtils +from tests.functional.adapter.utils.fixture_intersect import ( seeds__data_intersect_a_csv, seeds__data_intersect_b_csv, seeds__data_intersect_a_overlap_b_csv, diff --git a/tests/adapter/dbt/tests/adapter/utils/test_last_day.py b/tests/functional/adapter/utils/test_last_day.py similarity index 82% rename from tests/adapter/dbt/tests/adapter/utils/test_last_day.py rename to tests/functional/adapter/utils/test_last_day.py index d7d82deae50..030b9b95802 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_last_day.py +++ b/tests/functional/adapter/utils/test_last_day.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.adapter.utils.base_utils import BaseUtils -from dbt.tests.adapter.utils.fixture_last_day import ( +from tests.functional.adapter.utils.base_utils import BaseUtils +from tests.functional.adapter.utils.fixture_last_day import ( seeds__data_last_day_csv, models__test_last_day_sql, models__test_last_day_yml, diff --git a/tests/adapter/dbt/tests/adapter/utils/test_length.py b/tests/functional/adapter/utils/test_length.py similarity index 81% rename from tests/adapter/dbt/tests/adapter/utils/test_length.py rename to tests/functional/adapter/utils/test_length.py index ca047318ad4..24f93c1edbe 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_length.py +++ b/tests/functional/adapter/utils/test_length.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.adapter.utils.base_utils import BaseUtils -from dbt.tests.adapter.utils.fixture_length import ( +from tests.functional.adapter.utils.base_utils import BaseUtils +from tests.functional.adapter.utils.fixture_length import ( seeds__data_length_csv, models__test_length_sql, models__test_length_yml, diff --git a/tests/adapter/dbt/tests/adapter/utils/test_listagg.py b/tests/functional/adapter/utils/test_listagg.py similarity index 84% rename from tests/adapter/dbt/tests/adapter/utils/test_listagg.py rename to tests/functional/adapter/utils/test_listagg.py index 362d835c220..e1e8076ac36 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_listagg.py +++ b/tests/functional/adapter/utils/test_listagg.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.adapter.utils.base_utils import BaseUtils -from dbt.tests.adapter.utils.fixture_listagg import ( +from tests.functional.adapter.utils.base_utils import BaseUtils +from tests.functional.adapter.utils.fixture_listagg import ( seeds__data_listagg_csv, seeds__data_listagg_output_csv, models__test_listagg_sql, diff --git a/tests/adapter/dbt/tests/adapter/utils/test_null_compare.py b/tests/functional/adapter/utils/test_null_compare.py similarity index 88% rename from tests/adapter/dbt/tests/adapter/utils/test_null_compare.py rename to tests/functional/adapter/utils/test_null_compare.py index eac901f3972..f1c1a83cbcc 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_null_compare.py +++ b/tests/functional/adapter/utils/test_null_compare.py @@ -1,7 +1,7 @@ import pytest -from dbt.tests.adapter.utils.base_utils import BaseUtils -from dbt.tests.adapter.utils.fixture_null_compare import ( +from tests.functional.adapter.utils.base_utils import BaseUtils +from tests.functional.adapter.utils.fixture_null_compare import ( MODELS__TEST_MIXED_NULL_COMPARE_SQL, MODELS__TEST_MIXED_NULL_COMPARE_YML, MODELS__TEST_NULL_COMPARE_SQL, diff --git a/tests/adapter/dbt/tests/adapter/utils/test_position.py b/tests/functional/adapter/utils/test_position.py similarity index 82% rename from tests/adapter/dbt/tests/adapter/utils/test_position.py rename to tests/functional/adapter/utils/test_position.py index f4a54eed03a..314048d82d5 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_position.py +++ b/tests/functional/adapter/utils/test_position.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.adapter.utils.base_utils import BaseUtils -from dbt.tests.adapter.utils.fixture_position import ( +from tests.functional.adapter.utils.base_utils import BaseUtils +from tests.functional.adapter.utils.fixture_position import ( seeds__data_position_csv, models__test_position_sql, models__test_position_yml, diff --git a/tests/adapter/dbt/tests/adapter/utils/test_replace.py b/tests/functional/adapter/utils/test_replace.py similarity index 82% rename from tests/adapter/dbt/tests/adapter/utils/test_replace.py rename to tests/functional/adapter/utils/test_replace.py index bab75842d98..b94f9ada4de 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_replace.py +++ b/tests/functional/adapter/utils/test_replace.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.adapter.utils.base_utils import BaseUtils -from dbt.tests.adapter.utils.fixture_replace import ( +from tests.functional.adapter.utils.base_utils import BaseUtils +from tests.functional.adapter.utils.fixture_replace import ( seeds__data_replace_csv, models__test_replace_sql, models__test_replace_yml, diff --git a/tests/adapter/dbt/tests/adapter/utils/test_right.py b/tests/functional/adapter/utils/test_right.py similarity index 80% rename from tests/adapter/dbt/tests/adapter/utils/test_right.py rename to tests/functional/adapter/utils/test_right.py index 215a9228a07..aef52a94615 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_right.py +++ b/tests/functional/adapter/utils/test_right.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.adapter.utils.base_utils import BaseUtils -from dbt.tests.adapter.utils.fixture_right import ( +from tests.functional.adapter.utils.base_utils import BaseUtils +from tests.functional.adapter.utils.fixture_right import ( seeds__data_right_csv, models__test_right_sql, models__test_right_yml, diff --git a/tests/adapter/dbt/tests/adapter/utils/test_safe_cast.py b/tests/functional/adapter/utils/test_safe_cast.py similarity index 83% rename from tests/adapter/dbt/tests/adapter/utils/test_safe_cast.py rename to tests/functional/adapter/utils/test_safe_cast.py index 6c8310c7838..07d82867f2c 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_safe_cast.py +++ b/tests/functional/adapter/utils/test_safe_cast.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.adapter.utils.base_utils import BaseUtils -from dbt.tests.adapter.utils.fixture_safe_cast import ( +from tests.functional.adapter.utils.base_utils import BaseUtils +from tests.functional.adapter.utils.fixture_safe_cast import ( seeds__data_safe_cast_csv, models__test_safe_cast_sql, models__test_safe_cast_yml, diff --git a/tests/adapter/dbt/tests/adapter/utils/test_split_part.py b/tests/functional/adapter/utils/test_split_part.py similarity index 82% rename from tests/adapter/dbt/tests/adapter/utils/test_split_part.py rename to tests/functional/adapter/utils/test_split_part.py index 6caa3afb991..e8e076c43a5 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_split_part.py +++ b/tests/functional/adapter/utils/test_split_part.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.adapter.utils.base_utils import BaseUtils -from dbt.tests.adapter.utils.fixture_split_part import ( +from tests.functional.adapter.utils.base_utils import BaseUtils +from tests.functional.adapter.utils.fixture_split_part import ( seeds__data_split_part_csv, models__test_split_part_sql, models__test_split_part_yml, diff --git a/tests/adapter/dbt/tests/adapter/utils/test_string_literal.py b/tests/functional/adapter/utils/test_string_literal.py similarity index 78% rename from tests/adapter/dbt/tests/adapter/utils/test_string_literal.py rename to tests/functional/adapter/utils/test_string_literal.py index a6b632e1e18..ab0d9ea83a8 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_string_literal.py +++ b/tests/functional/adapter/utils/test_string_literal.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.adapter.utils.base_utils import BaseUtils -from dbt.tests.adapter.utils.fixture_string_literal import ( +from tests.functional.adapter.utils.base_utils import BaseUtils +from tests.functional.adapter.utils.fixture_string_literal import ( models__test_string_literal_sql, models__test_string_literal_yml, ) diff --git a/tests/adapter/dbt/tests/adapter/utils/test_timestamps.py b/tests/functional/adapter/utils/test_timestamps.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/utils/test_timestamps.py rename to tests/functional/adapter/utils/test_timestamps.py diff --git a/tests/adapter/dbt/tests/adapter/utils/test_validate_sql.py b/tests/functional/adapter/utils/test_validate_sql.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/utils/test_validate_sql.py rename to tests/functional/adapter/utils/test_validate_sql.py diff --git a/tests/functional/catalog_tests/test_relation_types.py b/tests/functional/catalog_tests/test_relation_types.py deleted file mode 100644 index 97f64f6b14f..00000000000 --- a/tests/functional/catalog_tests/test_relation_types.py +++ /dev/null @@ -1,5 +0,0 @@ -from dbt.tests.adapter.catalog.relation_types import CatalogRelationTypes - - -class TestCatalogRelationTypes(CatalogRelationTypes): - pass diff --git a/tests/functional/materializations/materialized_view_tests/test_materialized_view.py b/tests/functional/materializations/materialized_view_tests/test_materialized_view.py deleted file mode 100644 index 136186a5ec6..00000000000 --- a/tests/functional/materializations/materialized_view_tests/test_materialized_view.py +++ /dev/null @@ -1,121 +0,0 @@ -from typing import Optional, Tuple - -import pytest - -from dbt.adapters.base.relation import BaseRelation - -from dbt.tests.util import get_model_file, set_model_file -from tests.adapter.dbt.tests.adapter.materialized_view.basic import MaterializedViewBasic -from tests.adapter.dbt.tests.adapter.materialized_view.changes import ( - MaterializedViewChanges, - MaterializedViewChangesApplyMixin, - MaterializedViewChangesContinueMixin, - MaterializedViewChangesFailMixin, -) -from tests.adapter.dbt.tests.adapter.materialized_view.files import MY_TABLE, MY_VIEW -from tests.functional.materializations.materialized_view_tests.utils import ( - query_indexes, - query_relation_type, -) - - -MY_MATERIALIZED_VIEW = """ -{{ config( - materialized='materialized_view', - indexes=[{'columns': ['id']}], -) }} -select * from {{ ref('my_seed') }} -""" - - -class TestPostgresMaterializedViewsBasic(MaterializedViewBasic): - @pytest.fixture(scope="class", autouse=True) - def models(self): - yield { - "my_table.sql": MY_TABLE, - "my_view.sql": MY_VIEW, - "my_materialized_view.sql": MY_MATERIALIZED_VIEW, - } - - @staticmethod - def insert_record(project, table: BaseRelation, record: Tuple[int, int]): - my_id, value = record - project.run_sql(f"insert into {table} (id, value) values ({my_id}, {value})") - - @staticmethod - def refresh_materialized_view(project, materialized_view: BaseRelation): - sql = f"refresh materialized view {materialized_view}" - project.run_sql(sql) - - @staticmethod - def query_row_count(project, relation: BaseRelation) -> int: - sql = f"select count(*) from {relation}" - return project.run_sql(sql, fetch="one")[0] - - @staticmethod - def query_relation_type(project, relation: BaseRelation) -> Optional[str]: - return query_relation_type(project, relation) - - -class PostgresMaterializedViewChanges(MaterializedViewChanges): - @pytest.fixture(scope="class", autouse=True) - def models(self): - yield { - "my_table.sql": MY_TABLE, - "my_view.sql": MY_VIEW, - "my_materialized_view.sql": MY_MATERIALIZED_VIEW, - } - - @staticmethod - def query_relation_type(project, relation: BaseRelation) -> Optional[str]: - return query_relation_type(project, relation) - - @staticmethod - def check_start_state(project, materialized_view): - indexes = query_indexes(project, materialized_view) - assert len(indexes) == 1 - assert indexes[0]["column_names"] == "id" - - @staticmethod - def change_config_via_alter(project, materialized_view): - initial_model = get_model_file(project, materialized_view) - new_model = initial_model.replace( - "indexes=[{'columns': ['id']}]", - "indexes=[{'columns': ['value']}]", - ) - set_model_file(project, materialized_view, new_model) - - @staticmethod - def check_state_alter_change_is_applied(project, materialized_view): - indexes = query_indexes(project, materialized_view) - assert len(indexes) == 1 - assert indexes[0]["column_names"] == "value" - - @staticmethod - def change_config_via_replace(project, materialized_view): - # dbt-postgres does not currently monitor changes of this type - pass - - -class TestPostgresMaterializedViewChangesApply( - PostgresMaterializedViewChanges, MaterializedViewChangesApplyMixin -): - @pytest.mark.skip("dbt-postgres does not currently monitor replace changes.") - def test_change_is_applied_via_replace(self, project, my_materialized_view): - super().test_change_is_applied_via_replace(project, my_materialized_view) - - -class TestPostgresMaterializedViewChangesContinue( - PostgresMaterializedViewChanges, MaterializedViewChangesContinueMixin -): - @pytest.mark.skip("dbt-postgres does not currently monitor replace changes.") - def test_change_is_not_applied_via_replace(self, project, my_materialized_view): - super().test_change_is_not_applied_via_alter(project, my_materialized_view) - - -class TestPostgresMaterializedViewChangesFail( - PostgresMaterializedViewChanges, MaterializedViewChangesFailMixin -): - @pytest.mark.skip("dbt-postgres does not currently monitor replace changes.") - def test_change_is_not_applied_via_replace(self, project, my_materialized_view): - super().test_change_is_not_applied_via_replace(project, my_materialized_view) diff --git a/tests/functional/materializations/materialized_view_tests/utils.py b/tests/functional/materializations/materialized_view_tests/utils.py deleted file mode 100644 index 572f21aeb95..00000000000 --- a/tests/functional/materializations/materialized_view_tests/utils.py +++ /dev/null @@ -1,73 +0,0 @@ -from typing import Dict, List, Optional - -from dbt.adapters.base.relation import BaseRelation - -from dbt.adapters.postgres.relation import PostgresRelation - - -def query_relation_type(project, relation: BaseRelation) -> Optional[str]: - assert isinstance(relation, PostgresRelation) - sql = f""" - select - 'table' as relation_type - from pg_tables - where schemaname = '{relation.schema}' - and tablename = '{relation.identifier}' - union all - select - 'view' as relation_type - from pg_views - where schemaname = '{relation.schema}' - and viewname = '{relation.identifier}' - union all - select - 'materialized_view' as relation_type - from pg_matviews - where schemaname = '{relation.schema}' - and matviewname = '{relation.identifier}' - """ - results = project.run_sql(sql, fetch="all") - if len(results) == 0: - return None - elif len(results) > 1: - raise ValueError(f"More than one instance of {relation.name} found!") - else: - return results[0][0] - - -def query_indexes(project, relation: BaseRelation) -> List[Dict[str, str]]: - assert isinstance(relation, PostgresRelation) - # pulled directly from `postgres__describe_indexes_template` and manually verified - sql = f""" - select - i.relname as name, - m.amname as method, - ix.indisunique as "unique", - array_to_string(array_agg(a.attname), ',') as column_names - from pg_index ix - join pg_class i - on i.oid = ix.indexrelid - join pg_am m - on m.oid=i.relam - join pg_class t - on t.oid = ix.indrelid - join pg_namespace n - on n.oid = t.relnamespace - join pg_attribute a - on a.attrelid = t.oid - and a.attnum = ANY(ix.indkey) - where t.relname ilike '{ relation.identifier }' - and n.nspname ilike '{ relation.schema }' - and t.relkind in ('r', 'm') - group by 1, 2, 3 - order by 1, 2, 3 - """ - raw_indexes = project.run_sql(sql, fetch="all") - indexes = [ - { - header: value - for header, value in zip(["name", "method", "unique", "column_names"], index) - } - for index in raw_indexes - ] - return indexes diff --git a/tests/functional/simple_snapshot/test_hard_delete_snapshot.py b/tests/functional/simple_snapshot/test_hard_delete_snapshot.py index 4b4b9e281a6..53c61bb8c7f 100644 --- a/tests/functional/simple_snapshot/test_hard_delete_snapshot.py +++ b/tests/functional/simple_snapshot/test_hard_delete_snapshot.py @@ -3,13 +3,13 @@ import pytz import pytest from dbt.tests.util import run_dbt, check_relations_equal -from dbt.tests.adapter.utils.test_current_timestamp import is_aware from tests.functional.simple_snapshot.fixtures import ( models__schema_yml, models__ref_snapshot_sql, macros__test_no_overlaps_sql, snapshots_pg__snapshot_sql, ) +from tests.functional.utils import is_aware # These tests uses the same seed data, containing 20 records of which we hard delete the last 10. diff --git a/tests/functional/store_test_failures/test_store_test_failures.py b/tests/functional/store_test_failures/test_store_test_failures.py deleted file mode 100644 index 8783e1903e3..00000000000 --- a/tests/functional/store_test_failures/test_store_test_failures.py +++ /dev/null @@ -1,46 +0,0 @@ -import pytest - -from dbt.tests.adapter.store_test_failures_tests.basic import ( - StoreTestFailuresAsInteractions, - StoreTestFailuresAsProjectLevelOff, - StoreTestFailuresAsProjectLevelView, - StoreTestFailuresAsProjectLevelEphemeral, - StoreTestFailuresAsGeneric, - StoreTestFailuresAsExceptions, -) - - -class PostgresMixin: - audit_schema: str - - @pytest.fixture(scope="function", autouse=True) - def setup_audit_schema(self, project, setup_method): - # postgres only supports schema names of 63 characters - # a schema with a longer name still gets created, but the name gets truncated - self.audit_schema = self.audit_schema[:63] - - -class TestStoreTestFailuresAsInteractions(StoreTestFailuresAsInteractions, PostgresMixin): - pass - - -class TestStoreTestFailuresAsProjectLevelOff(StoreTestFailuresAsProjectLevelOff, PostgresMixin): - pass - - -class TestStoreTestFailuresAsProjectLevelView(StoreTestFailuresAsProjectLevelView, PostgresMixin): - pass - - -class TestStoreTestFailuresAsProjectLevelEphemeral( - StoreTestFailuresAsProjectLevelEphemeral, PostgresMixin -): - pass - - -class TestStoreTestFailuresAsGeneric(StoreTestFailuresAsGeneric, PostgresMixin): - pass - - -class TestStoreTestFailuresAsExceptions(StoreTestFailuresAsExceptions, PostgresMixin): - pass diff --git a/tests/functional/utils.py b/tests/functional/utils.py index ddfe367856b..a82aa378e43 100644 --- a/tests/functional/utils.py +++ b/tests/functional/utils.py @@ -1,5 +1,6 @@ import os from contextlib import contextmanager +from datetime import datetime from typing import Optional from pathlib import Path @@ -12,3 +13,7 @@ def up_one(return_path: Optional[Path] = None): yield finally: os.chdir(return_path or current_path) + + +def is_aware(dt: datetime) -> bool: + return dt.tzinfo is not None and dt.tzinfo.utcoffset(dt) is not None diff --git a/tests/unit/test_adapter_connection_manager.py b/tests/unit/test_adapter_connection_manager.py deleted file mode 100644 index 19c4f309b0f..00000000000 --- a/tests/unit/test_adapter_connection_manager.py +++ /dev/null @@ -1,500 +0,0 @@ -import unittest -from unittest import mock -import sys - -import dbt.adapters.exceptions -import dbt.exceptions - -import psycopg2 - -from dbt.adapters.contracts.connection import Connection -from dbt.adapters.base import BaseConnectionManager -from dbt.adapters.postgres import PostgresCredentials, PostgresConnectionManager -from dbt.adapters.events.logging import AdapterLogger - - -class BaseConnectionManagerTest(unittest.TestCase): - def setUp(self): - self.postgres_credentials = PostgresCredentials( - host="localhost", - user="test-user", - port=1111, - password="test-password", - database="test-db", - schema="test-schema", - ) - self.logger = AdapterLogger("test") - self.postgres_connection = Connection("postgres", None, self.postgres_credentials) - - def test_retry_connection(self): - """Test a dummy handle is set on a connection on the first attempt. - - This test uses a Connection populated with test PostgresCredentials values, and - expects the Connection.handle attribute to be set to True and it's state to - "open", after calling retry_connection. - - Moreover, the attribute should be set in the first attempt as no exception would - be raised for retrying. A mock connect function is used to simulate a real connection - passing on the first attempt. - """ - conn = self.postgres_connection - attempts = 0 - - def connect(): - nonlocal attempts - attempts += 1 - return True - - conn = BaseConnectionManager.retry_connection( - conn, - connect, - self.logger, - retryable_exceptions=[], - ) - - assert conn.state == "open" - assert conn.handle is True - assert attempts == 1 - - def test_retry_connection_fails_unhandled(self): - """Test setting a handle fails upon raising a non-handled exception. - - This test uses a Connection populated with test PostgresCredentials values, and - expects a ValueError to be raised by a mock connect function. As a - result: - * The Connection state should be "fail" and the handle None. - * The resulting attempt count should be 1 as we are not explicitly configured to handle a - ValueError. - * retry_connection should raise a FailedToConnectError with the Exception message. - """ - conn = self.postgres_connection - attempts = 0 - - def connect(): - nonlocal attempts - attempts += 1 - raise ValueError("Something went horribly wrong") - - with self.assertRaisesRegex( - dbt.adapters.exceptions.FailedToConnectError, - "Something went horribly wrong", - ): - - BaseConnectionManager.retry_connection( - conn, - connect, - self.logger, - retry_limit=1, - retry_timeout=lambda attempt: 0, - retryable_exceptions=(TypeError,), - ) - - assert conn.state == "fail" - assert conn.handle is None - assert attempts == 1 - - def test_retry_connection_fails_handled(self): - """Test setting a handle fails upon raising a handled exception. - - This test uses a Connection populated with test PostgresCredentials values, and - expects a ValueError to be raised by a mock connect function. - As a result: - * The Connection state should be "fail" and the handle None. - * The resulting attempt count should be 2 as we are configured to handle a ValueError. - * retry_connection should raise a FailedToConnectError with the Exception message. - """ - conn = self.postgres_connection - attempts = 0 - - def connect(): - nonlocal attempts - attempts += 1 - raise ValueError("Something went horribly wrong") - - with self.assertRaisesRegex( - dbt.adapters.exceptions.FailedToConnectError, - "Something went horribly wrong", - ): - - BaseConnectionManager.retry_connection( - conn, - connect, - self.logger, - retry_timeout=0, - retryable_exceptions=(ValueError,), - retry_limit=1, - ) - - assert conn.state == "fail" - assert conn.handle is None - - def test_retry_connection_passes_handled(self): - """Test setting a handle fails upon raising a handled exception. - - This test uses a Connection populated with test PostgresCredentials values, and - expects a ValueError to be raised by a mock connect function only the first - time is called. Upon handling the exception once, connect should return. - As a result: - * The Connection state should be "open" and the handle True. - * The resulting attempt count should be 2 as we are configured to handle a ValueError. - """ - conn = self.postgres_connection - is_handled = False - attempts = 0 - - def connect(): - nonlocal is_handled - nonlocal attempts - - attempts += 1 - - if is_handled: - return True - - is_handled = True - raise ValueError("Something went horribly wrong") - - conn = BaseConnectionManager.retry_connection( - conn, - connect, - self.logger, - retry_timeout=0, - retryable_exceptions=(ValueError,), - retry_limit=1, - ) - - assert conn.state == "open" - assert conn.handle is True - assert is_handled is True - assert attempts == 2 - - def test_retry_connection_attempts(self): - """Test setting a handle fails upon raising a handled exception multiple times. - - This test uses a Connection populated with test PostgresCredentials values, and - expects a ValueError to be raised by a mock connect function. As a result: - * The Connection state should be "fail" and the handle None, as connect - never returns. - * The resulting attempt count should be 11 as we are configured to handle a ValueError. - * retry_connection should raise a FailedToConnectError with the Exception message. - """ - conn = self.postgres_connection - attempts = 0 - - def connect(): - nonlocal attempts - attempts += 1 - - raise ValueError("Something went horribly wrong") - - with self.assertRaisesRegex( - dbt.adapters.exceptions.FailedToConnectError, - "Something went horribly wrong", - ): - BaseConnectionManager.retry_connection( - conn, - connect, - self.logger, - retry_timeout=0, - retryable_exceptions=(ValueError,), - retry_limit=10, - ) - - assert conn.state == "fail" - assert conn.handle is None - assert attempts == 11 - - def test_retry_connection_fails_handling_all_exceptions(self): - """Test setting a handle fails after exhausting all attempts. - - This test uses a Connection populated with test PostgresCredentials values, and - expects a TypeError to be raised by a mock connect function. As a result: - * The Connection state should be "fail" and the handle None, as connect - never returns. - * The resulting attempt count should be 11 as we are configured to handle all Exceptions. - * retry_connection should raise a FailedToConnectError with the Exception message. - """ - conn = self.postgres_connection - attempts = 0 - - def connect(): - nonlocal attempts - attempts += 1 - - raise TypeError("An unhandled thing went horribly wrong") - - with self.assertRaisesRegex( - dbt.adapters.exceptions.FailedToConnectError, - "An unhandled thing went horribly wrong", - ): - BaseConnectionManager.retry_connection( - conn, - connect, - self.logger, - retry_timeout=0, - retryable_exceptions=[Exception], - retry_limit=15, - ) - - assert conn.state == "fail" - assert conn.handle is None - assert attempts == 16 - - def test_retry_connection_passes_multiple_handled(self): - """Test setting a handle passes upon handling multiple exceptions. - - This test uses a Connection populated with test PostgresCredentials values, and - expects a mock connect to raise a ValueError in the first invocation and a - TypeError in the second invocation. As a result: - * The Connection state should be "open" and the handle True, as connect - returns after both exceptions have been handled. - * The resulting attempt count should be 3. - """ - conn = self.postgres_connection - is_value_err_handled = False - is_type_err_handled = False - attempts = 0 - - def connect(): - nonlocal is_value_err_handled - nonlocal is_type_err_handled - nonlocal attempts - - attempts += 1 - - if is_value_err_handled and is_type_err_handled: - return True - elif is_type_err_handled: - is_value_err_handled = True - raise ValueError("Something went horribly wrong") - else: - is_type_err_handled = True - raise TypeError("An unhandled thing went horribly wrong") - - conn = BaseConnectionManager.retry_connection( - conn, - connect, - self.logger, - retry_timeout=0, - retryable_exceptions=(ValueError, TypeError), - retry_limit=2, - ) - - assert conn.state == "open" - assert conn.handle is True - assert is_type_err_handled is True - assert is_value_err_handled is True - assert attempts == 3 - - def test_retry_connection_passes_none_excluded(self): - """Test setting a handle passes upon handling multiple exceptions. - - This test uses a Connection populated with test PostgresCredentials values, and - expects a mock connect to raise a ValueError in the first invocation and a - TypeError in the second invocation. As a result: - * The Connection state should be "open" and the handle True, as connect - returns after both exceptions have been handled. - * The resulting attempt count should be 3. - """ - conn = self.postgres_connection - is_value_err_handled = False - is_type_err_handled = False - attempts = 0 - - def connect(): - nonlocal is_value_err_handled - nonlocal is_type_err_handled - nonlocal attempts - - attempts += 1 - - if is_value_err_handled and is_type_err_handled: - return True - elif is_type_err_handled: - is_value_err_handled = True - raise ValueError("Something went horribly wrong") - else: - is_type_err_handled = True - raise TypeError("An unhandled thing went horribly wrong") - - conn = BaseConnectionManager.retry_connection( - conn, - connect, - self.logger, - retry_timeout=0, - retryable_exceptions=(ValueError, TypeError), - retry_limit=2, - ) - - assert conn.state == "open" - assert conn.handle is True - assert is_type_err_handled is True - assert is_value_err_handled is True - assert attempts == 3 - - def test_retry_connection_retry_limit(self): - """Test retry_connection raises an exception with a negative retry limit.""" - conn = self.postgres_connection - attempts = 0 - - def connect(): - nonlocal attempts - attempts += 1 - return True - - with self.assertRaisesRegex( - dbt.adapters.exceptions.FailedToConnectError, - "retry_limit cannot be negative", - ): - BaseConnectionManager.retry_connection( - conn, - connect, - self.logger, - retry_timeout=0, - retryable_exceptions=(ValueError,), - retry_limit=-2, - ) - - assert conn.state == "fail" - assert conn.handle is None - assert attempts == 0 - - def test_retry_connection_retry_timeout(self): - """Test retry_connection raises an exception with a negative timeout.""" - conn = self.postgres_connection - attempts = 0 - - def connect(): - nonlocal attempts - attempts += 1 - return True - - for retry_timeout in [-10, -2.5, lambda _: -100, lambda _: -10.1]: - with self.assertRaisesRegex( - dbt.adapters.exceptions.FailedToConnectError, - "retry_timeout cannot be negative or return a negative time", - ): - BaseConnectionManager.retry_connection( - conn, - connect, - self.logger, - retry_timeout=-10, - retryable_exceptions=(ValueError,), - retry_limit=2, - ) - - assert conn.state == "init" - assert conn.handle is None - assert attempts == 0 - - def test_retry_connection_exceeds_recursion_limit(self): - """Test retry_connection raises an exception with retries that exceed recursion limit.""" - conn = self.postgres_connection - attempts = 0 - - def connect(): - nonlocal attempts - attempts += 1 - return True - - with self.assertRaisesRegex( - dbt.adapters.exceptions.FailedToConnectError, - "retry_limit cannot be negative", - ): - BaseConnectionManager.retry_connection( - conn, - connect, - self.logger, - retry_timeout=2, - retryable_exceptions=(ValueError,), - retry_limit=sys.getrecursionlimit() + 1, - ) - - assert conn.state == "fail" - assert conn.handle is None - assert attempts == 0 - - def test_retry_connection_with_exponential_backoff_timeout(self): - """Test retry_connection with an exponential backoff timeout. - - We assert the provided exponential backoff function gets passed the right attempt number - and produces the expected timeouts. - """ - conn = self.postgres_connection - attempts = 0 - timeouts = [] - - def connect(): - nonlocal attempts - attempts += 1 - - if attempts < 12: - raise ValueError("Keep trying!") - return True - - def exp_backoff(n): - nonlocal timeouts - computed = 2**n - # We store the computed values to ensure they match the expected backoff... - timeouts.append((n, computed)) - # but we return 0 as we don't want the test to go on forever. - return 0 - - conn = BaseConnectionManager.retry_connection( - conn, - connect, - self.logger, - retry_timeout=exp_backoff, - retryable_exceptions=(ValueError,), - retry_limit=12, - ) - - assert conn.state == "open" - assert conn.handle is True - assert attempts == 12 - assert timeouts == [(n, 2**n) for n in range(12)] - - -class PostgresConnectionManagerTest(unittest.TestCase): - def setUp(self): - self.credentials = PostgresCredentials( - host="localhost", - user="test-user", - port=1111, - password="test-password", - database="test-db", - schema="test-schema", - retries=2, - ) - self.connection = Connection("postgres", None, self.credentials) - - def test_open(self): - """Test opening a Postgres Connection with failures in the first 3 attempts. - - This test uses a Connection populated with test PostgresCredentials values, and - expects a mock connect to raise a psycopg2.errors.ConnectionFailuer - in the first 3 invocations, after which the mock should return True. As a result: - * The Connection state should be "open" and the handle True, as connect - returns in the 4th attempt. - * The resulting attempt count should be 4. - """ - conn = self.connection - attempt = 0 - - def connect(*args, **kwargs): - nonlocal attempt - attempt += 1 - - if attempt <= 2: - raise psycopg2.errors.ConnectionFailure("Connection has failed") - - return True - - with mock.patch("psycopg2.connect", wraps=connect) as mock_connect: - PostgresConnectionManager.open(conn) - - assert mock_connect.call_count == 3 - - assert attempt == 3 - assert conn.state == "open" - assert conn.handle is True diff --git a/tests/unit/test_adapter_factory.py b/tests/unit/test_adapter_factory.py deleted file mode 100644 index c67b61d7fc0..00000000000 --- a/tests/unit/test_adapter_factory.py +++ /dev/null @@ -1,93 +0,0 @@ -from pathlib import Path -import unittest -from unittest import mock -from dbt.adapters.factory import AdapterContainer -from dbt.adapters.base.plugin import AdapterPlugin -from dbt.include.global_project import ( - PROJECT_NAME as GLOBAL_PROJECT_NAME, -) - - -class TestGetPackageNames(unittest.TestCase): - def setUp(self): - self.root_plugin = AdapterPlugin( - adapter=mock.MagicMock(), - credentials=mock.MagicMock(), - include_path="/path/to/root/plugin", - dependencies=["childa", "childb"], - project_name="root", - ) - self.childa = AdapterPlugin( - adapter=mock.MagicMock(), - credentials=mock.MagicMock(), - include_path="/path/to/childa", - project_name="pkg_childa", - ) - self.childb = AdapterPlugin( - adapter=mock.MagicMock(), - credentials=mock.MagicMock(), - include_path="/path/to/childb", - dependencies=["childc"], - project_name="pkg_childb", - ) - self.childc = AdapterPlugin( - adapter=mock.MagicMock(), - credentials=mock.MagicMock(), - include_path="/path/to/childc", - project_name="pkg_childc", - ) - - self._mock_modules = { - "root": self.root_plugin, - "childa": self.childa, - "childb": self.childb, - "childc": self.childc, - } - - self.factory = AdapterContainer() - - self.load_patch = mock.patch.object(AdapterContainer, "load_plugin") - self.mock_load = self.load_patch.start() - - def mock_load_plugin(name: str): - try: - plugin = self._mock_modules[name] - except KeyError: - raise RuntimeError(f"test could not find adapter type {name}!") - self.factory.plugins[name] = plugin - self.factory.packages[plugin.project_name] = Path(plugin.include_path) - for dep in plugin.dependencies: - self.factory.load_plugin(dep) - - self.mock_load.side_effect = mock_load_plugin - - def tearDown(self): - self.load_patch.stop() - - def test_no_packages(self): - assert self.factory.get_adapter_package_names(None) == [GLOBAL_PROJECT_NAME] - - def test_one_package(self): - self.factory.load_plugin("childc") - assert self.factory.get_adapter_package_names("childc") == [ - "pkg_childc", - GLOBAL_PROJECT_NAME, - ] - - def test_simple_child_packages(self): - self.factory.load_plugin("childb") - assert self.factory.get_adapter_package_names("childb") == [ - "pkg_childb", - "pkg_childc", - GLOBAL_PROJECT_NAME, - ] - - def test_layered_child_packages(self): - self.factory.load_plugin("root") - assert self.factory.get_adapter_package_names("root") == [ - "root", - "pkg_childa", - "pkg_childb", - "pkg_childc", - GLOBAL_PROJECT_NAME, - ] diff --git a/tests/unit/test_base_adapter.py b/tests/unit/test_base_adapter.py deleted file mode 100644 index 66d8af5c5d8..00000000000 --- a/tests/unit/test_base_adapter.py +++ /dev/null @@ -1,184 +0,0 @@ -from argparse import Namespace -from unittest import mock -import pytest - -from dbt.adapters.base.impl import BaseAdapter, ConstraintSupport -import dbt.flags as flags - - -class TestBaseAdapterConstraintRendering: - @pytest.fixture(scope="class", autouse=True) - def setUp(self): - flags.set_from_args(Namespace(), None) - - @pytest.fixture(scope="class") - def connection_manager(request): - mock_connection_manager = mock.Mock() - mock_connection_manager.TYPE = "base" - return mock_connection_manager - - column_constraints = [ - ([{"type": "check"}], ["column_name integer"]), - ([{"type": "check", "name": "test_name"}], ["column_name integer"]), - ( - [{"type": "check", "expression": "test expression"}], - ["column_name integer check (test expression)"], - ), - ([{"type": "not_null"}], ["column_name integer not null"]), - ( - [{"type": "not_null", "expression": "test expression"}], - ["column_name integer not null test expression"], - ), - ([{"type": "unique"}], ["column_name integer unique"]), - ( - [{"type": "unique", "expression": "test expression"}], - ["column_name integer unique test expression"], - ), - ([{"type": "primary_key"}], ["column_name integer primary key"]), - ( - [{"type": "primary_key", "expression": "test expression"}], - ["column_name integer primary key test expression"], - ), - ([{"type": "foreign_key"}], ["column_name integer"]), - ( - [{"type": "foreign_key", "expression": "other_table (c1)"}], - ["column_name integer references other_table (c1)"], - ), - ([{"type": "check"}, {"type": "unique"}], ["column_name integer unique"]), - ] - - @pytest.mark.parametrize("constraints,expected_rendered_constraints", column_constraints) - def test_render_raw_columns_constraints( - self, constraints, expected_rendered_constraints, request - ): - BaseAdapter.ConnectionManager = request.getfixturevalue("connection_manager") - BaseAdapter.CONSTRAINT_SUPPORT = { - constraint: ConstraintSupport.ENFORCED for constraint in BaseAdapter.CONSTRAINT_SUPPORT - } - - rendered_constraints = BaseAdapter.render_raw_columns_constraints( - { - "column_name": { - "name": "column_name", - "data_type": "integer", - "constraints": constraints, - } - } - ) - assert rendered_constraints == expected_rendered_constraints - - column_constraints_unsupported = [ - ([{"type": "check"}], ["column_name integer"]), - ([{"type": "check", "expression": "test expression"}], ["column_name integer"]), - ([{"type": "not_null"}], ["column_name integer"]), - ([{"type": "not_null", "expression": "test expression"}], ["column_name integer"]), - ([{"type": "unique"}], ["column_name integer"]), - ([{"type": "unique", "expression": "test expression"}], ["column_name integer"]), - ([{"type": "primary_key"}], ["column_name integer"]), - ([{"type": "primary_key", "expression": "test expression"}], ["column_name integer"]), - ([{"type": "foreign_key"}], ["column_name integer"]), - ([{"type": "check"}, {"type": "unique"}], ["column_name integer"]), - ] - - @pytest.mark.parametrize( - "constraints,expected_rendered_constraints", column_constraints_unsupported - ) - def test_render_raw_columns_constraints_unsupported( - self, constraints, expected_rendered_constraints, request - ): - BaseAdapter.ConnectionManager = request.getfixturevalue("connection_manager") - BaseAdapter.CONSTRAINT_SUPPORT = { - constraint: ConstraintSupport.NOT_SUPPORTED - for constraint in BaseAdapter.CONSTRAINT_SUPPORT - } - - rendered_constraints = BaseAdapter.render_raw_columns_constraints( - { - "column_name": { - "name": "column_name", - "data_type": "integer", - "constraints": constraints, - } - } - ) - assert rendered_constraints == expected_rendered_constraints - - model_constraints = [ - ([{"type": "check"}], []), - ([{"type": "check", "expression": "test expression"}], ["check (test expression)"]), - ( - [{"type": "check", "expression": "test expression", "name": "test_name"}], - ["constraint test_name check (test expression)"], - ), - ([{"type": "not_null"}], []), - ([{"type": "not_null", "expression": "test expression"}], []), - ([{"type": "unique", "columns": ["c1", "c2"]}], ["unique (c1, c2)"]), - ([{"type": "unique", "columns": ["c1", "c2"]}], ["unique (c1, c2)"]), - ( - [ - { - "type": "unique", - "columns": ["c1", "c2"], - "expression": "test expression", - "name": "test_name", - } - ], - ["constraint test_name unique test expression (c1, c2)"], - ), - ([{"type": "primary_key", "columns": ["c1", "c2"]}], ["primary key (c1, c2)"]), - ( - [{"type": "primary_key", "columns": ["c1", "c2"], "expression": "test expression"}], - ["primary key test expression (c1, c2)"], - ), - ( - [ - { - "type": "primary_key", - "columns": ["c1", "c2"], - "expression": "test expression", - "name": "test_name", - } - ], - ["constraint test_name primary key test expression (c1, c2)"], - ), - ( - [{"type": "foreign_key", "columns": ["c1", "c2"], "expression": "other_table (c1)"}], - ["foreign key (c1, c2) references other_table (c1)"], - ), - ( - [ - { - "type": "foreign_key", - "columns": ["c1", "c2"], - "expression": "other_table (c1)", - "name": "test_name", - } - ], - ["constraint test_name foreign key (c1, c2) references other_table (c1)"], - ), - ] - - @pytest.mark.parametrize("constraints,expected_rendered_constraints", model_constraints) - def test_render_raw_model_constraints( - self, constraints, expected_rendered_constraints, request - ): - BaseAdapter.ConnectionManager = request.getfixturevalue("connection_manager") - BaseAdapter.CONSTRAINT_SUPPORT = { - constraint: ConstraintSupport.ENFORCED for constraint in BaseAdapter.CONSTRAINT_SUPPORT - } - - rendered_constraints = BaseAdapter.render_raw_model_constraints(constraints) - assert rendered_constraints == expected_rendered_constraints - - @pytest.mark.parametrize("constraints,expected_rendered_constraints", model_constraints) - def test_render_raw_model_constraints_unsupported( - self, constraints, expected_rendered_constraints, request - ): - BaseAdapter.ConnectionManager = request.getfixturevalue("connection_manager") - BaseAdapter.CONSTRAINT_SUPPORT = { - constraint: ConstraintSupport.NOT_SUPPORTED - for constraint in BaseAdapter.CONSTRAINT_SUPPORT - } - - rendered_constraints = BaseAdapter.render_raw_model_constraints(constraints) - assert rendered_constraints == [] diff --git a/tests/unit/test_postgres_adapter.py b/tests/unit/test_postgres_adapter.py deleted file mode 100644 index 211862c6a74..00000000000 --- a/tests/unit/test_postgres_adapter.py +++ /dev/null @@ -1,656 +0,0 @@ -import dataclasses - -import agate -import decimal -from multiprocessing import get_context -import unittest -from unittest import mock - -from dbt.adapters.base import BaseRelation -from dbt.adapters.contracts.relation import Path -from dbt.context.manifest import generate_query_header_context -from dbt.task.debug import DebugTask - -from dbt.adapters.postgres import PostgresAdapter -from dbt.adapters.postgres import Plugin as PostgresPlugin -from dbt.contracts.files import FileHash -from dbt.contracts.graph.manifest import ManifestStateCheck -from dbt_common.clients import agate_helper -from dbt.exceptions import DbtConfigError -from dbt_common.exceptions import DbtValidationError -from dbt.context.providers import generate_runtime_macro_context -from psycopg2 import extensions as psycopg2_extensions -from psycopg2 import DatabaseError - -from .utils import ( - config_from_parts_or_dicts, - inject_adapter, - mock_connection, - TestAdapterConversions, - load_internal_manifest_macros, - clear_plugin, -) - -from dbt.flags import set_from_args -from argparse import Namespace - -set_from_args(Namespace(WARN_ERROR=False), None) - - -class TestPostgresAdapter(unittest.TestCase): - def setUp(self): - project_cfg = { - "name": "X", - "version": "0.1", - "profile": "test", - "project-root": "/tmp/dbt/does-not-exist", - "config-version": 2, - } - profile_cfg = { - "outputs": { - "test": { - "type": "postgres", - "dbname": "postgres", - "user": "root", - "host": "thishostshouldnotexist", - "pass": "password", - "port": 5432, - "schema": "public", - } - }, - "target": "test", - } - - self.config = config_from_parts_or_dicts(project_cfg, profile_cfg) - self.mp_context = get_context("spawn") - self._adapter = None - - @property - def adapter(self): - if self._adapter is None: - self._adapter = PostgresAdapter(self.config, self.mp_context) - inject_adapter(self._adapter, PostgresPlugin) - return self._adapter - - @mock.patch("dbt.adapters.postgres.connections.psycopg2") - def test_acquire_connection_validations(self, psycopg2): - try: - connection = self.adapter.acquire_connection("dummy") - except DbtValidationError as e: - self.fail("got DbtValidationError: {}".format(str(e))) - except BaseException as e: - self.fail("acquiring connection failed with unknown exception: {}".format(str(e))) - self.assertEqual(connection.type, "postgres") - - psycopg2.connect.assert_not_called() - connection.handle - psycopg2.connect.assert_called_once() - - @mock.patch("dbt.adapters.postgres.connections.psycopg2") - def test_acquire_connection(self, psycopg2): - connection = self.adapter.acquire_connection("dummy") - - psycopg2.connect.assert_not_called() - connection.handle - self.assertEqual(connection.state, "open") - self.assertNotEqual(connection.handle, None) - psycopg2.connect.assert_called_once() - - def test_cancel_open_connections_empty(self): - self.assertEqual(len(list(self.adapter.cancel_open_connections())), 0) - - def test_cancel_open_connections_master(self): - key = self.adapter.connections.get_thread_identifier() - self.adapter.connections.thread_connections[key] = mock_connection("master") - self.assertEqual(len(list(self.adapter.cancel_open_connections())), 0) - - def test_cancel_open_connections_single(self): - master = mock_connection("master") - model = mock_connection("model") - key = self.adapter.connections.get_thread_identifier() - model.handle.get_backend_pid.return_value = 42 - self.adapter.connections.thread_connections.update( - { - key: master, - 1: model, - } - ) - with mock.patch.object(self.adapter.connections, "add_query") as add_query: - query_result = mock.MagicMock() - add_query.return_value = (None, query_result) - - self.assertEqual(len(list(self.adapter.cancel_open_connections())), 1) - - add_query.assert_called_once_with("select pg_terminate_backend(42)") - - master.handle.get_backend_pid.assert_not_called() - - @mock.patch("dbt.adapters.postgres.connections.psycopg2") - def test_default_connect_timeout(self, psycopg2): - connection = self.adapter.acquire_connection("dummy") - - psycopg2.connect.assert_not_called() - connection.handle - psycopg2.connect.assert_called_once_with( - dbname="postgres", - user="root", - host="thishostshouldnotexist", - password="password", - port=5432, - connect_timeout=10, - application_name="dbt", - ) - - @mock.patch("dbt.adapters.postgres.connections.psycopg2") - def test_changed_connect_timeout(self, psycopg2): - self.config.credentials = self.config.credentials.replace(connect_timeout=30) - connection = self.adapter.acquire_connection("dummy") - - psycopg2.connect.assert_not_called() - connection.handle - psycopg2.connect.assert_called_once_with( - dbname="postgres", - user="root", - host="thishostshouldnotexist", - password="password", - port=5432, - connect_timeout=30, - application_name="dbt", - ) - - @mock.patch("dbt.adapters.postgres.connections.psycopg2") - def test_default_keepalive(self, psycopg2): - connection = self.adapter.acquire_connection("dummy") - - psycopg2.connect.assert_not_called() - connection.handle - psycopg2.connect.assert_called_once_with( - dbname="postgres", - user="root", - host="thishostshouldnotexist", - password="password", - port=5432, - connect_timeout=10, - application_name="dbt", - ) - - @mock.patch("dbt.adapters.postgres.connections.psycopg2") - def test_changed_keepalive(self, psycopg2): - self.config.credentials = self.config.credentials.replace(keepalives_idle=256) - connection = self.adapter.acquire_connection("dummy") - - psycopg2.connect.assert_not_called() - connection.handle - psycopg2.connect.assert_called_once_with( - dbname="postgres", - user="root", - host="thishostshouldnotexist", - password="password", - port=5432, - connect_timeout=10, - keepalives_idle=256, - application_name="dbt", - ) - - @mock.patch("dbt.adapters.postgres.connections.psycopg2") - def test_default_application_name(self, psycopg2): - connection = self.adapter.acquire_connection("dummy") - - psycopg2.connect.assert_not_called() - connection.handle - psycopg2.connect.assert_called_once_with( - dbname="postgres", - user="root", - host="thishostshouldnotexist", - password="password", - port=5432, - connect_timeout=10, - application_name="dbt", - ) - - @mock.patch("dbt.adapters.postgres.connections.psycopg2") - def test_changed_application_name(self, psycopg2): - self.config.credentials = self.config.credentials.replace(application_name="myapp") - connection = self.adapter.acquire_connection("dummy") - - psycopg2.connect.assert_not_called() - connection.handle - psycopg2.connect.assert_called_once_with( - dbname="postgres", - user="root", - host="thishostshouldnotexist", - password="password", - port=5432, - connect_timeout=10, - application_name="myapp", - ) - - @mock.patch("dbt.adapters.postgres.connections.psycopg2") - def test_role(self, psycopg2): - self.config.credentials = self.config.credentials.replace(role="somerole") - connection = self.adapter.acquire_connection("dummy") - - cursor = connection.handle.cursor() - - cursor.execute.assert_called_once_with("set role somerole") - - @mock.patch("dbt.adapters.postgres.connections.psycopg2") - def test_search_path(self, psycopg2): - self.config.credentials = self.config.credentials.replace(search_path="test") - connection = self.adapter.acquire_connection("dummy") - - psycopg2.connect.assert_not_called() - connection.handle - psycopg2.connect.assert_called_once_with( - dbname="postgres", - user="root", - host="thishostshouldnotexist", - password="password", - port=5432, - connect_timeout=10, - application_name="dbt", - options="-c search_path=test", - ) - - @mock.patch("dbt.adapters.postgres.connections.psycopg2") - def test_sslmode(self, psycopg2): - self.config.credentials = self.config.credentials.replace(sslmode="require") - connection = self.adapter.acquire_connection("dummy") - - psycopg2.connect.assert_not_called() - connection.handle - psycopg2.connect.assert_called_once_with( - dbname="postgres", - user="root", - host="thishostshouldnotexist", - password="password", - port=5432, - connect_timeout=10, - sslmode="require", - application_name="dbt", - ) - - @mock.patch("dbt.adapters.postgres.connections.psycopg2") - def test_ssl_parameters(self, psycopg2): - self.config.credentials = self.config.credentials.replace(sslmode="verify-ca") - self.config.credentials = self.config.credentials.replace(sslcert="service.crt") - self.config.credentials = self.config.credentials.replace(sslkey="service.key") - self.config.credentials = self.config.credentials.replace(sslrootcert="ca.crt") - connection = self.adapter.acquire_connection("dummy") - - psycopg2.connect.assert_not_called() - connection.handle - psycopg2.connect.assert_called_once_with( - dbname="postgres", - user="root", - host="thishostshouldnotexist", - password="password", - port=5432, - connect_timeout=10, - sslmode="verify-ca", - sslcert="service.crt", - sslkey="service.key", - sslrootcert="ca.crt", - application_name="dbt", - ) - - @mock.patch("dbt.adapters.postgres.connections.psycopg2") - def test_schema_with_space(self, psycopg2): - self.config.credentials = self.config.credentials.replace(search_path="test test") - connection = self.adapter.acquire_connection("dummy") - - psycopg2.connect.assert_not_called() - connection.handle - psycopg2.connect.assert_called_once_with( - dbname="postgres", - user="root", - host="thishostshouldnotexist", - password="password", - port=5432, - connect_timeout=10, - application_name="dbt", - options="-c search_path=test\ test", # noqa: [W605] - ) - - @mock.patch("dbt.adapters.postgres.connections.psycopg2") - def test_set_zero_keepalive(self, psycopg2): - self.config.credentials = self.config.credentials.replace(keepalives_idle=0) - connection = self.adapter.acquire_connection("dummy") - - psycopg2.connect.assert_not_called() - connection.handle - psycopg2.connect.assert_called_once_with( - dbname="postgres", - user="root", - host="thishostshouldnotexist", - password="password", - port=5432, - connect_timeout=10, - application_name="dbt", - ) - - @mock.patch.object(PostgresAdapter, "execute_macro") - @mock.patch.object(PostgresAdapter, "_get_catalog_relations") - def test_get_catalog_various_schemas(self, mock_get_relations, mock_execute): - self.catalog_test(mock_get_relations, mock_execute, False) - - @mock.patch.object(PostgresAdapter, "execute_macro") - @mock.patch.object(PostgresAdapter, "_get_catalog_relations") - def test_get_filtered_catalog(self, mock_get_relations, mock_execute): - self.catalog_test(mock_get_relations, mock_execute, True) - - def catalog_test(self, mock_get_relations, mock_execute, filtered=False): - column_names = ["table_database", "table_schema", "table_name"] - relations = [ - BaseRelation(path=Path(database="dbt", schema="foo", identifier="bar")), - BaseRelation(path=Path(database="dbt", schema="FOO", identifier="baz")), - BaseRelation(path=Path(database="dbt", schema=None, identifier="bar")), - BaseRelation(path=Path(database="dbt", schema="quux", identifier="bar")), - BaseRelation(path=Path(database="dbt", schema="skip", identifier="bar")), - ] - rows = list(map(lambda x: dataclasses.astuple(x.path), relations)) - mock_execute.return_value = agate.Table(rows=rows, column_names=column_names) - - mock_get_relations.return_value = relations - - relation_configs = [] - used_schemas = {("dbt", "foo"), ("dbt", "quux")} - - if filtered: - catalog, exceptions = self.adapter.get_filtered_catalog( - relation_configs, used_schemas, set([relations[0], relations[3]]) - ) - else: - catalog, exceptions = self.adapter.get_catalog(relation_configs, used_schemas) - - tupled_catalog = set(map(tuple, catalog)) - if filtered: - self.assertEqual(tupled_catalog, {rows[0], rows[3]}) - else: - self.assertEqual(tupled_catalog, {rows[0], rows[1], rows[3]}) - - self.assertEqual(exceptions, []) - - -class TestConnectingPostgresAdapter(unittest.TestCase): - def setUp(self): - self.target_dict = { - "type": "postgres", - "dbname": "postgres", - "user": "root", - "host": "thishostshouldnotexist", - "pass": "password", - "port": 5432, - "schema": "public", - } - - profile_cfg = { - "outputs": { - "test": self.target_dict, - }, - "target": "test", - } - project_cfg = { - "name": "X", - "version": "0.1", - "profile": "test", - "project-root": "/tmp/dbt/does-not-exist", - "quoting": { - "identifier": False, - "schema": True, - }, - "config-version": 2, - } - - self.config = config_from_parts_or_dicts(project_cfg, profile_cfg) - self.mp_context = get_context("spawn") - - self.handle = mock.MagicMock(spec=psycopg2_extensions.connection) - self.cursor = self.handle.cursor.return_value - self.mock_execute = self.cursor.execute - self.patcher = mock.patch("dbt.adapters.postgres.connections.psycopg2") - self.psycopg2 = self.patcher.start() - - # Create the Manifest.state_check patcher - @mock.patch("dbt.parser.manifest.ManifestLoader.build_manifest_state_check") - def _mock_state_check(self): - all_projects = self.all_projects - return ManifestStateCheck( - vars_hash=FileHash.from_contents("vars"), - project_hashes={name: FileHash.from_contents(name) for name in all_projects}, - profile_hash=FileHash.from_contents("profile"), - ) - - self.load_state_check = mock.patch( - "dbt.parser.manifest.ManifestLoader.build_manifest_state_check" - ) - self.mock_state_check = self.load_state_check.start() - self.mock_state_check.side_effect = _mock_state_check - - self.psycopg2.connect.return_value = self.handle - self.adapter = PostgresAdapter(self.config, self.mp_context) - self.adapter.set_macro_resolver(load_internal_manifest_macros(self.config)) - self.adapter.set_macro_context_generator(generate_runtime_macro_context) - self.adapter.connections.set_query_header( - generate_query_header_context(self.config, self.adapter.get_macro_resolver()) - ) - self.qh_patch = mock.patch.object(self.adapter.connections.query_header, "add") - self.mock_query_header_add = self.qh_patch.start() - self.mock_query_header_add.side_effect = lambda q: "/* dbt */\n{}".format(q) - self.adapter.acquire_connection() - inject_adapter(self.adapter, PostgresPlugin) - - def tearDown(self): - # we want a unique self.handle every time. - self.adapter.cleanup_connections() - self.qh_patch.stop() - self.patcher.stop() - self.load_state_check.stop() - clear_plugin(PostgresPlugin) - - def test_quoting_on_drop_schema(self): - relation = self.adapter.Relation.create( - database="postgres", - schema="test_schema", - quote_policy=self.adapter.config.quoting, - ) - self.adapter.drop_schema(relation) - - self.mock_execute.assert_has_calls( - [mock.call('/* dbt */\ndrop schema if exists "test_schema" cascade', None)] - ) - - def test_quoting_on_drop(self): - relation = self.adapter.Relation.create( - database="postgres", - schema="test_schema", - identifier="test_table", - type="table", - quote_policy=self.adapter.config.quoting, - ) - self.adapter.drop_relation(relation) - self.mock_execute.assert_has_calls( - [ - mock.call( - '/* dbt */\ndrop table if exists "postgres"."test_schema".test_table cascade', - None, - ) - ] - ) - - def test_quoting_on_truncate(self): - relation = self.adapter.Relation.create( - database="postgres", - schema="test_schema", - identifier="test_table", - type="table", - quote_policy=self.adapter.config.quoting, - ) - self.adapter.truncate_relation(relation) - self.mock_execute.assert_has_calls( - [mock.call('/* dbt */\ntruncate table "postgres"."test_schema".test_table', None)] - ) - - def test_quoting_on_rename(self): - from_relation = self.adapter.Relation.create( - database="postgres", - schema="test_schema", - identifier="table_a", - type="table", - quote_policy=self.adapter.config.quoting, - ) - to_relation = self.adapter.Relation.create( - database="postgres", - schema="test_schema", - identifier="table_b", - type="table", - quote_policy=self.adapter.config.quoting, - ) - - self.adapter.rename_relation(from_relation=from_relation, to_relation=to_relation) - self.mock_execute.assert_has_calls( - [ - mock.call( - '/* dbt */\nalter table "postgres"."test_schema".table_a rename to table_b', - None, - ) - ] - ) - - def test_debug_connection_ok(self): - DebugTask.validate_connection(self.target_dict) - self.mock_execute.assert_has_calls([mock.call("/* dbt */\nselect 1 as id", None)]) - - def test_debug_connection_fail_nopass(self): - del self.target_dict["pass"] - with self.assertRaises(DbtConfigError): - DebugTask.validate_connection(self.target_dict) - - def test_connection_fail_select(self): - self.mock_execute.side_effect = DatabaseError() - with self.assertRaises(DbtConfigError): - DebugTask.validate_connection(self.target_dict) - self.mock_execute.assert_has_calls([mock.call("/* dbt */\nselect 1 as id", None)]) - - def test_dbname_verification_is_case_insensitive(self): - # Override adapter settings from setUp() - self.target_dict["dbname"] = "Postgres" - profile_cfg = { - "outputs": { - "test": self.target_dict, - }, - "target": "test", - } - project_cfg = { - "name": "X", - "version": "0.1", - "profile": "test", - "project-root": "/tmp/dbt/does-not-exist", - "quoting": { - "identifier": False, - "schema": True, - }, - "config-version": 2, - } - self.config = config_from_parts_or_dicts(project_cfg, profile_cfg) - self.mp_context = get_context("spawn") - self.adapter.cleanup_connections() - self._adapter = PostgresAdapter(self.config, self.mp_context) - self.adapter.verify_database("postgres") - - -class TestPostgresFilterCatalog(unittest.TestCase): - def test__catalog_filter_table(self): - used_schemas = [["a", "B"], ["a", "1234"]] - column_names = ["table_name", "table_database", "table_schema", "something"] - rows = [ - ["foo", "a", "b", "1234"], # include - ["foo", "a", "1234", "1234"], # include, w/ table schema as str - ["foo", "c", "B", "1234"], # skip - ["1234", "A", "B", "1234"], # include, w/ table name as str - ] - table = agate.Table(rows, column_names, agate_helper.DEFAULT_TYPE_TESTER) - - result = PostgresAdapter._catalog_filter_table(table, used_schemas) - assert len(result) == 3 - for row in result.rows: - assert isinstance(row["table_schema"], str) - assert isinstance(row["table_database"], str) - assert isinstance(row["table_name"], str) - assert isinstance(row["something"], decimal.Decimal) - - -class TestPostgresAdapterConversions(TestAdapterConversions): - def test_convert_text_type(self): - rows = [ - ["", "a1", "stringval1"], - ["", "a2", "stringvalasdfasdfasdfa"], - ["", "a3", "stringval3"], - ] - agate_table = self._make_table_of(rows, agate.Text) - expected = ["text", "text", "text"] - for col_idx, expect in enumerate(expected): - assert PostgresAdapter.convert_text_type(agate_table, col_idx) == expect - - def test_convert_number_type(self): - rows = [ - ["", "23.98", "-1"], - ["", "12.78", "-2"], - ["", "79.41", "-3"], - ] - agate_table = self._make_table_of(rows, agate.Number) - expected = ["integer", "float8", "integer"] - for col_idx, expect in enumerate(expected): - assert PostgresAdapter.convert_number_type(agate_table, col_idx) == expect - - def test_convert_boolean_type(self): - rows = [ - ["", "false", "true"], - ["", "false", "false"], - ["", "false", "true"], - ] - agate_table = self._make_table_of(rows, agate.Boolean) - expected = ["boolean", "boolean", "boolean"] - for col_idx, expect in enumerate(expected): - assert PostgresAdapter.convert_boolean_type(agate_table, col_idx) == expect - - def test_convert_datetime_type(self): - rows = [ - ["", "20190101T01:01:01Z", "2019-01-01 01:01:01"], - ["", "20190102T01:01:01Z", "2019-01-01 01:01:01"], - ["", "20190103T01:01:01Z", "2019-01-01 01:01:01"], - ] - agate_table = self._make_table_of( - rows, [agate.DateTime, agate_helper.ISODateTime, agate.DateTime] - ) - expected = [ - "timestamp without time zone", - "timestamp without time zone", - "timestamp without time zone", - ] - for col_idx, expect in enumerate(expected): - assert PostgresAdapter.convert_datetime_type(agate_table, col_idx) == expect - - def test_convert_date_type(self): - rows = [ - ["", "2019-01-01", "2019-01-04"], - ["", "2019-01-02", "2019-01-04"], - ["", "2019-01-03", "2019-01-04"], - ] - agate_table = self._make_table_of(rows, agate.Date) - expected = ["date", "date", "date"] - for col_idx, expect in enumerate(expected): - assert PostgresAdapter.convert_date_type(agate_table, col_idx) == expect - - def test_convert_time_type(self): - # dbt's default type testers actually don't have a TimeDelta at all. - agate.TimeDelta - rows = [ - ["", "120s", "10s"], - ["", "3m", "11s"], - ["", "1h", "12s"], - ] - agate_table = self._make_table_of(rows, agate.TimeDelta) - expected = ["time", "time", "time"] - for col_idx, expect in enumerate(expected): - assert PostgresAdapter.convert_time_type(agate_table, col_idx) == expect diff --git a/tox.ini b/tox.ini index fc84668d20c..a0b0912ab6f 100644 --- a/tox.ini +++ b/tox.ini @@ -31,7 +31,6 @@ passenv = commands = {envpython} -m pytest --cov=core --cov-append --cov-report=xml {posargs} tests/functional -k "not tests/functional/graph_selection" {envpython} -m pytest --cov=core --cov-append --cov-report=xml {posargs} tests/functional/graph_selection - {envpython} -m pytest --cov=core --cov-append --cov-report=xml {posargs} tests/adapter deps = -rdev-requirements.txt