From cf0b71b69b24cbff6fe2fbef6a3fe6c4643e0b7f Mon Sep 17 00:00:00 2001 From: artc95 Date: Thu, 8 Feb 2024 10:20:33 +0100 Subject: [PATCH 01/32] bump for v1.6.0rc1 - __version__.py, setup.py, dev_requirements.txt --- dbt/adapters/synapse/__version__.py | 2 +- dev_requirements.txt | 2 +- setup.py | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/dbt/adapters/synapse/__version__.py b/dbt/adapters/synapse/__version__.py index 9b363e24..7a0130d4 100644 --- a/dbt/adapters/synapse/__version__.py +++ b/dbt/adapters/synapse/__version__.py @@ -1 +1 @@ -version = "1.4.1rc1" +version = "1.6.0rc1" diff --git a/dev_requirements.txt b/dev_requirements.txt index 02e93bd0..f21875fc 100644 --- a/dev_requirements.txt +++ b/dev_requirements.txt @@ -4,7 +4,7 @@ wheel==0.40.0 pre-commit==2.21.0;python_version<"3.8" pre-commit==3.3.1;python_version>="3.8" pytest-dotenv==0.5.2 -dbt-tests-adapter~=1.4.9 +dbt-tests-adapter~=1.6.9 pyodbc==4.0.39 --no-binary :all: aiohttp==3.8.3 azure-mgmt-synapse==2.0.0 diff --git a/setup.py b/setup.py index dec7e0c8..50ff82c5 100644 --- a/setup.py +++ b/setup.py @@ -15,8 +15,8 @@ "Anders Swanson", "Sam Debruyn", ] -dbt_version = "1.4" -dbt_fabric_requirement = "dbt-fabric~=1.4.0rc3" +dbt_version = "1.6" +dbt_fabric_requirement = "dbt-fabric~=1.6.0" description = """An Azure Synapse adapter plugin for dbt""" this_directory = os.path.abspath(os.path.dirname(__file__)) From df687124104a3fa32d5e9e98bc14ba7330dc6d32 Mon Sep 17 00:00:00 2001 From: hellafech Date: Mon, 12 Feb 2024 14:01:51 +0100 Subject: [PATCH 02/32] Added feature dbt-debug (test) --- .gitignore | 1 + CHANGELOG.md | 5 ++ dbt-synapse | 73 +++++++++++++++++++++++ tests/functional/adapter/test_debug.py | 82 ++++++++++++++++++++++++++ 4 files changed, 161 insertions(+) create mode 100644 dbt-synapse create mode 100644 tests/functional/adapter/test_debug.py diff --git a/.gitignore b/.gitignore index d8e2bbd4..18cba89c 100644 --- a/.gitignore +++ b/.gitignore @@ -98,3 +98,4 @@ ENV/ env.bak/ venv.bak/ /test.env +.dbtenv/ diff --git a/CHANGELOG.md b/CHANGELOG.md index 7e3773db..78afb462 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,4 +1,9 @@ # Changelog +### v1.6.0 + +## Features +* Added tests related to dbt-debug to test --connection parameter + ## v1.4.1rc1 #### Under the hood diff --git a/dbt-synapse b/dbt-synapse new file mode 100644 index 00000000..cd5ccd52 --- /dev/null +++ b/dbt-synapse @@ -0,0 +1,73 @@ +# Development of the adapter + +The Synapse adapter uses the [dbt-sqlserver](https://github.com/dbt-msft/dbt-sqlserver) adapter underneath. +This repository mostly contains a set of macros that override the behavior of dbt-sqlserver so that it works with Synapse. + +Python 3.10 is used for developing the adapter. To get started, bootstrap your environment as follows: + +Create a virtual environment, [pyenv](https://github.com/pyenv/pyenv) is used in the example: + +```shell +pyenv install 3.10.7 +pyenv virtualenv 3.10.7 dbt-synapse +pyenv activate dbt-synapse +``` + +Install the development dependencies and pre-commit and get information about possible make commands: + +```shell +make dev +make help +``` + +[Pre-commit](https://pre-commit.com/) helps us to maintain a consistent style and code quality across the entire project. +After running `make dev`, pre-commit will automatically validate your commits and fix any formatting issues whenever possible. + +## Testing + +The functional tests require a running Synapse Dedicated SQL Pool instance. +You can configure the connection to this instance with the file `test.env` in the root of the project. +You can use the provided `test.env.sample` as a base. + +```shell +cp test.env.sample test.env +``` + +You can use the following command to run the functional tests: + +```shell +make functional +``` + +## CI/CD + +We use Docker images that have all the things we need to test the adapter in the CI/CD workflows. +The Dockerfile and image are part of the [dbt-sqlserver](https://github.com/dbt-msft/dbt-sqlserver) repository. + +All CI/CD pipelines are using GitHub Actions. The following pipelines are available: + +* `integration-tests-azure`: runs the integration tests for Azure SQL Server. +* `release-version`: publishes the adapter to PyPI. + +There is an additional [Pre-commit](https://pre-commit.ci/) pipeline that validates the code style. + +### Azure integration tests + +The following environment variables are available: + +* `DBT_SYNAPSE_SERVER`: Name of the Synapse workspace +* `DBT_SYNAPSE_DB`: Name of the Synapse dedicated SQL pool +* `DBT_AZURE_TENANT`: Azure tenant ID +* `DBT_AZURE_SUBSCRIPTION_ID`: Azure subscription ID +* `DBT_AZURE_RESOURCE_GROUP_NAME`: Azure resource group name +* `DBT_AZURE_SP_NAME`: Client/application ID of the service principal used to connect to Azure AD +* `DBT_AZURE_SP_SECRET`: Password of the service principal used to connect to Azure AD + +## Releasing a new version + +Make sure the version number is bumped in `__version__.py`. Then, create a git tag named `v` and push it to GitHub. +A GitHub Actions workflow will be triggered to build the package and push it to PyPI. + +Make sure that the dependency to dbt-sqlserver is bumped to a compatible version in `setup.py`. + +If you're releasing support for a new version of `dbt-core`, also bump the `dbt_version` in `setup.py`. diff --git a/tests/functional/adapter/test_debug.py b/tests/functional/adapter/test_debug.py new file mode 100644 index 00000000..57063e6b --- /dev/null +++ b/tests/functional/adapter/test_debug.py @@ -0,0 +1,82 @@ +import os +import re + +import pytest +import yaml +from dbt.cli.exceptions import DbtUsageException +from dbt.tests.adapter.dbt_debug.test_dbt_debug import BaseDebug, BaseDebugProfileVariable +from dbt.tests.util import run_dbt, run_dbt_and_capture + + +class TestDebugSynapse(BaseDebug): + def test_ok(self, project): + run_dbt(["debug"]) + assert "ERROR" not in self.capsys.readouterr().out + + def test_nopass(self, project): + run_dbt(["debug", "--target", "nopass"], expect_pass=False) + self.assertGotValue(re.compile(r"\s+profiles\.yml file"), "ERROR invalid") + + def test_connection_flag(self, project): + """Testing the --connection flag works as expected, including that output is not lost""" + _, out = run_dbt_and_capture(["debug", "--connection"]) + assert "Skipping steps before connection verification" in out + + _, out = run_dbt_and_capture( + ["debug", "--connection", "--target", "NONE"], expect_pass=False + ) + assert "1 check failed" in out + assert "The profile 'test' does not have a target named 'NONE'." in out + + _, out = run_dbt_and_capture( + ["debug", "--connection", "--profiles-dir", "NONE"], expect_pass=False + ) + assert "Using profiles dir at NONE" + assert "1 check failed" in out + assert "dbt looked for a profiles.yml file in NONE" in out + + def test_wronguser(self, project): + run_dbt(["debug", "--target", "wronguser"], expect_pass=False) + self.assertGotValue(re.compile(r"\s+Connection test"), "ERROR") + + def test_empty_target(self, project): + run_dbt(["debug", "--target", "none_target"], expect_pass=False) + self.assertGotValue(re.compile(r"\s+output 'none_target'"), "misconfigured") + + +class TestDebugProfileVariableSynapse(BaseDebugProfileVariable): + pass + + +class TestDebugInvalidProjectSynapse(BaseDebug): + def test_empty_project(self, project): + with open("dbt_project.yml", "w") as f: # noqa: F841 + pass + + run_dbt(["debug", "--profile", "test"], expect_pass=False) + splitout = self.capsys.readouterr().out.split("\n") + self.check_project(splitout) + + def test_badproject(self, project): + update_project = {"invalid-key": "not a valid key so this is bad project"} + + with open("dbt_project.yml", "w") as f: + yaml.safe_dump(update_project, f) + + run_dbt(["debug", "--profile", "test"], expect_pass=False) + splitout = self.capsys.readouterr().out.split("\n") + self.check_project(splitout) + + def test_not_found_project(self, project): + with pytest.raises(DbtUsageException): + run_dbt(["debug", "--project-dir", "nopass"]) + + def test_invalid_project_outside_current_dir(self, project): + # create a dbt_project.yml + project_config = {"invalid-key": "not a valid key in this project"} + os.makedirs("custom", exist_ok=True) + with open("custom/dbt_project.yml", "w") as f: + yaml.safe_dump(project_config, f, default_flow_style=True) + run_dbt(["debug", "--project-dir", "custom"], expect_pass=False) + splitout = self.capsys.readouterr().out.split("\n") + self.check_project(splitout) From d0f974170e83d96348ae014afd3af47667786d9c Mon Sep 17 00:00:00 2001 From: artc95 Date: Tue, 13 Feb 2024 11:26:59 +0100 Subject: [PATCH 03/32] add get_create_materialized_view_as_sql macro and tests ; undo accidental commits --- .gitignore | 1 - dbt-synapse | 73 ------------------- .../synapse/macros/adapters/relation.sql | 9 ++- .../materialized_view/materialized_view.sql | 14 ++++ .../adapter/test_materialized_views.py | 67 +++++++++++++++++ 5 files changed, 88 insertions(+), 76 deletions(-) delete mode 100644 dbt-synapse create mode 100644 dbt/include/synapse/macros/materializations/models/materialized_view/materialized_view.sql create mode 100644 tests/functional/adapter/test_materialized_views.py diff --git a/.gitignore b/.gitignore index 18cba89c..d8e2bbd4 100644 --- a/.gitignore +++ b/.gitignore @@ -98,4 +98,3 @@ ENV/ env.bak/ venv.bak/ /test.env -.dbtenv/ diff --git a/dbt-synapse b/dbt-synapse deleted file mode 100644 index cd5ccd52..00000000 --- a/dbt-synapse +++ /dev/null @@ -1,73 +0,0 @@ -# Development of the adapter - -The Synapse adapter uses the [dbt-sqlserver](https://github.com/dbt-msft/dbt-sqlserver) adapter underneath. -This repository mostly contains a set of macros that override the behavior of dbt-sqlserver so that it works with Synapse. - -Python 3.10 is used for developing the adapter. To get started, bootstrap your environment as follows: - -Create a virtual environment, [pyenv](https://github.com/pyenv/pyenv) is used in the example: - -```shell -pyenv install 3.10.7 -pyenv virtualenv 3.10.7 dbt-synapse -pyenv activate dbt-synapse -``` - -Install the development dependencies and pre-commit and get information about possible make commands: - -```shell -make dev -make help -``` - -[Pre-commit](https://pre-commit.com/) helps us to maintain a consistent style and code quality across the entire project. -After running `make dev`, pre-commit will automatically validate your commits and fix any formatting issues whenever possible. - -## Testing - -The functional tests require a running Synapse Dedicated SQL Pool instance. -You can configure the connection to this instance with the file `test.env` in the root of the project. -You can use the provided `test.env.sample` as a base. - -```shell -cp test.env.sample test.env -``` - -You can use the following command to run the functional tests: - -```shell -make functional -``` - -## CI/CD - -We use Docker images that have all the things we need to test the adapter in the CI/CD workflows. -The Dockerfile and image are part of the [dbt-sqlserver](https://github.com/dbt-msft/dbt-sqlserver) repository. - -All CI/CD pipelines are using GitHub Actions. The following pipelines are available: - -* `integration-tests-azure`: runs the integration tests for Azure SQL Server. -* `release-version`: publishes the adapter to PyPI. - -There is an additional [Pre-commit](https://pre-commit.ci/) pipeline that validates the code style. - -### Azure integration tests - -The following environment variables are available: - -* `DBT_SYNAPSE_SERVER`: Name of the Synapse workspace -* `DBT_SYNAPSE_DB`: Name of the Synapse dedicated SQL pool -* `DBT_AZURE_TENANT`: Azure tenant ID -* `DBT_AZURE_SUBSCRIPTION_ID`: Azure subscription ID -* `DBT_AZURE_RESOURCE_GROUP_NAME`: Azure resource group name -* `DBT_AZURE_SP_NAME`: Client/application ID of the service principal used to connect to Azure AD -* `DBT_AZURE_SP_SECRET`: Password of the service principal used to connect to Azure AD - -## Releasing a new version - -Make sure the version number is bumped in `__version__.py`. Then, create a git tag named `v` and push it to GitHub. -A GitHub Actions workflow will be triggered to build the package and push it to PyPI. - -Make sure that the dependency to dbt-sqlserver is bumped to a compatible version in `setup.py`. - -If you're releasing support for a new version of `dbt-core`, also bump the `dbt_version` in `setup.py`. diff --git a/dbt/include/synapse/macros/adapters/relation.sql b/dbt/include/synapse/macros/adapters/relation.sql index 8c1d3582..32b74e00 100644 --- a/dbt/include/synapse/macros/adapters/relation.sql +++ b/dbt/include/synapse/macros/adapters/relation.sql @@ -5,16 +5,21 @@ {% endmacro %} {% macro synapse__drop_relation_script(relation) -%} - {% if relation.type == 'view' -%} + {% if relation.type == 'view' or relation.type == 'materialized_view' -%} {% set object_id_type = 'V' %} {% elif relation.type == 'table'%} {% set object_id_type = 'U' %} {%- else -%} invalid target name {% endif %} + if object_id ('{{ relation.include(database=False) }}','{{ object_id_type }}') is not null + {% if relation.type == 'view' or relation.type == 'table' -%} begin drop {{ relation.type }} {{ relation.include(database=False) }} end + {% elif relation.type == 'materialized_view' %} + alter materialized view {{ relation.include(database=False)}} disable + {% endif %} {% endmacro %} @@ -37,6 +42,6 @@ {% macro synapse__truncate_relation(relation) %} {% call statement('truncate_relation') -%} - truncate table {{ relation }} + truncate table {{ relation }} {%- endcall %} {% endmacro %} diff --git a/dbt/include/synapse/macros/materializations/models/materialized_view/materialized_view.sql b/dbt/include/synapse/macros/materializations/models/materialized_view/materialized_view.sql new file mode 100644 index 00000000..4456377d --- /dev/null +++ b/dbt/include/synapse/macros/materializations/models/materialized_view/materialized_view.sql @@ -0,0 +1,14 @@ +{% macro ref(model_name) %} + + {% do return(builtins.ref(model_name).include(database=false)) %} + +{% endmacro %} + +{% macro synapse__get_create_materialized_view_as_sql(relation, sql) %} + {%- set dist = config.get('dist', default="ROUND_ROBIN") -%} + + CREATE materialized view {{ relation.include(database=False) }} + WITH ( DISTRIBUTION = {{dist}} ) + AS {{ sql }}; + +{% endmacro %} diff --git a/tests/functional/adapter/test_materialized_views.py b/tests/functional/adapter/test_materialized_views.py new file mode 100644 index 00000000..ca31f78e --- /dev/null +++ b/tests/functional/adapter/test_materialized_views.py @@ -0,0 +1,67 @@ +import pytest +from dbt.tests.adapter.materialized_view.basic import MaterializedViewBasic +from dbt.tests.util import check_relation_types, get_model_file, run_dbt, set_model_file + +MY_TABLE = """ +{{ config( + materialized='table', +) }} +select i.id, count(i.value) as counted +from {{ ref('my_seed') }} i +group by i.id +""" + + +MY_VIEW = """ +{{ config( + materialized='view', +) }} +select i.id, count(i.value) as counted +from {{ ref('my_seed') }} i +group by i.id +""" + + +MY_MATERIALIZED_VIEW = """ +{{ config( + materialized='materialized_view', +) }} +select i.id, count(*) as counted +from {{ ref('my_seed') }} i +group by i.id +""" + + +class TestMaterializedViewsBasicSynapse(MaterializedViewBasic): + @pytest.fixture(scope="class", autouse=True) + def models(self): + yield { + "my_table.sql": MY_TABLE, + "my_view.sql": MY_VIEW, + "my_materialized_view.sql": MY_MATERIALIZED_VIEW, + } + + @pytest.fixture(scope="class", autouse=True) + def setup(self, project, my_materialized_view): + run_dbt(["seed"]) + run_dbt(["run", "--models", my_materialized_view.identifier, "--full-refresh"]) + + # the tests touch these files, store their contents in memory + initial_model = get_model_file(project, my_materialized_view) + + yield + + # and then reset them after the test runs + set_model_file(project, my_materialized_view, initial_model) + + def test_materialized_view_create(self, project): + # check relation types + expected = { + # sys.objects has no type "materialized view", it's type "view" + "my_materialized_view": "view", + } + return check_relation_types(project.adapter, expected) + + @pytest.mark.skip(reason="rename materialized view not supported") + def test_materialized_view_create_idempotent(self, project, my_materialized_view): + pass From 6f31b574af9c940968402df759918d88d4c12118 Mon Sep 17 00:00:00 2001 From: artc95 Date: Thu, 8 Feb 2024 10:20:33 +0100 Subject: [PATCH 04/32] bump for v1.6.0rc1 - __version__.py, setup.py, dev_requirements.txt --- dbt/adapters/synapse/__version__.py | 2 +- dev_requirements.txt | 2 +- setup.py | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/dbt/adapters/synapse/__version__.py b/dbt/adapters/synapse/__version__.py index 9b363e24..7a0130d4 100644 --- a/dbt/adapters/synapse/__version__.py +++ b/dbt/adapters/synapse/__version__.py @@ -1 +1 @@ -version = "1.4.1rc1" +version = "1.6.0rc1" diff --git a/dev_requirements.txt b/dev_requirements.txt index 06071e8d..c3268f92 100644 --- a/dev_requirements.txt +++ b/dev_requirements.txt @@ -4,7 +4,7 @@ wheel==0.40.0 pre-commit==2.21.0;python_version<"3.8" pre-commit==3.3.1;python_version>="3.8" pytest-dotenv==0.5.2 -dbt-tests-adapter~=1.4.9 +dbt-tests-adapter~=1.6.9 pyodbc==4.0.39 --no-binary :all: # if Windows, remove aiohttp==3.8.3 azure-mgmt-synapse==2.0.0 diff --git a/setup.py b/setup.py index dec7e0c8..50ff82c5 100644 --- a/setup.py +++ b/setup.py @@ -15,8 +15,8 @@ "Anders Swanson", "Sam Debruyn", ] -dbt_version = "1.4" -dbt_fabric_requirement = "dbt-fabric~=1.4.0rc3" +dbt_version = "1.6" +dbt_fabric_requirement = "dbt-fabric~=1.6.0" description = """An Azure Synapse adapter plugin for dbt""" this_directory = os.path.abspath(os.path.dirname(__file__)) From c2023715ecf806162d80ecccffa52b2f7b59f8fa Mon Sep 17 00:00:00 2001 From: hellafech Date: Mon, 12 Feb 2024 14:01:51 +0100 Subject: [PATCH 05/32] Added feature dbt-debug (test) --- .gitignore | 1 + CHANGELOG.md | 5 ++ dbt-synapse | 73 +++++++++++++++++++++++ tests/functional/adapter/test_debug.py | 82 ++++++++++++++++++++++++++ 4 files changed, 161 insertions(+) create mode 100644 dbt-synapse create mode 100644 tests/functional/adapter/test_debug.py diff --git a/.gitignore b/.gitignore index d8e2bbd4..18cba89c 100644 --- a/.gitignore +++ b/.gitignore @@ -98,3 +98,4 @@ ENV/ env.bak/ venv.bak/ /test.env +.dbtenv/ diff --git a/CHANGELOG.md b/CHANGELOG.md index 193369a6..3207f49b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,4 +1,9 @@ # Changelog +### v1.6.0 + +## Features +* Added tests related to dbt-debug to test --connection parameter + ## v1.4.1rc1 #### Under the hood diff --git a/dbt-synapse b/dbt-synapse new file mode 100644 index 00000000..cd5ccd52 --- /dev/null +++ b/dbt-synapse @@ -0,0 +1,73 @@ +# Development of the adapter + +The Synapse adapter uses the [dbt-sqlserver](https://github.com/dbt-msft/dbt-sqlserver) adapter underneath. +This repository mostly contains a set of macros that override the behavior of dbt-sqlserver so that it works with Synapse. + +Python 3.10 is used for developing the adapter. To get started, bootstrap your environment as follows: + +Create a virtual environment, [pyenv](https://github.com/pyenv/pyenv) is used in the example: + +```shell +pyenv install 3.10.7 +pyenv virtualenv 3.10.7 dbt-synapse +pyenv activate dbt-synapse +``` + +Install the development dependencies and pre-commit and get information about possible make commands: + +```shell +make dev +make help +``` + +[Pre-commit](https://pre-commit.com/) helps us to maintain a consistent style and code quality across the entire project. +After running `make dev`, pre-commit will automatically validate your commits and fix any formatting issues whenever possible. + +## Testing + +The functional tests require a running Synapse Dedicated SQL Pool instance. +You can configure the connection to this instance with the file `test.env` in the root of the project. +You can use the provided `test.env.sample` as a base. + +```shell +cp test.env.sample test.env +``` + +You can use the following command to run the functional tests: + +```shell +make functional +``` + +## CI/CD + +We use Docker images that have all the things we need to test the adapter in the CI/CD workflows. +The Dockerfile and image are part of the [dbt-sqlserver](https://github.com/dbt-msft/dbt-sqlserver) repository. + +All CI/CD pipelines are using GitHub Actions. The following pipelines are available: + +* `integration-tests-azure`: runs the integration tests for Azure SQL Server. +* `release-version`: publishes the adapter to PyPI. + +There is an additional [Pre-commit](https://pre-commit.ci/) pipeline that validates the code style. + +### Azure integration tests + +The following environment variables are available: + +* `DBT_SYNAPSE_SERVER`: Name of the Synapse workspace +* `DBT_SYNAPSE_DB`: Name of the Synapse dedicated SQL pool +* `DBT_AZURE_TENANT`: Azure tenant ID +* `DBT_AZURE_SUBSCRIPTION_ID`: Azure subscription ID +* `DBT_AZURE_RESOURCE_GROUP_NAME`: Azure resource group name +* `DBT_AZURE_SP_NAME`: Client/application ID of the service principal used to connect to Azure AD +* `DBT_AZURE_SP_SECRET`: Password of the service principal used to connect to Azure AD + +## Releasing a new version + +Make sure the version number is bumped in `__version__.py`. Then, create a git tag named `v` and push it to GitHub. +A GitHub Actions workflow will be triggered to build the package and push it to PyPI. + +Make sure that the dependency to dbt-sqlserver is bumped to a compatible version in `setup.py`. + +If you're releasing support for a new version of `dbt-core`, also bump the `dbt_version` in `setup.py`. diff --git a/tests/functional/adapter/test_debug.py b/tests/functional/adapter/test_debug.py new file mode 100644 index 00000000..57063e6b --- /dev/null +++ b/tests/functional/adapter/test_debug.py @@ -0,0 +1,82 @@ +import os +import re + +import pytest +import yaml +from dbt.cli.exceptions import DbtUsageException +from dbt.tests.adapter.dbt_debug.test_dbt_debug import BaseDebug, BaseDebugProfileVariable +from dbt.tests.util import run_dbt, run_dbt_and_capture + + +class TestDebugSynapse(BaseDebug): + def test_ok(self, project): + run_dbt(["debug"]) + assert "ERROR" not in self.capsys.readouterr().out + + def test_nopass(self, project): + run_dbt(["debug", "--target", "nopass"], expect_pass=False) + self.assertGotValue(re.compile(r"\s+profiles\.yml file"), "ERROR invalid") + + def test_connection_flag(self, project): + """Testing the --connection flag works as expected, including that output is not lost""" + _, out = run_dbt_and_capture(["debug", "--connection"]) + assert "Skipping steps before connection verification" in out + + _, out = run_dbt_and_capture( + ["debug", "--connection", "--target", "NONE"], expect_pass=False + ) + assert "1 check failed" in out + assert "The profile 'test' does not have a target named 'NONE'." in out + + _, out = run_dbt_and_capture( + ["debug", "--connection", "--profiles-dir", "NONE"], expect_pass=False + ) + assert "Using profiles dir at NONE" + assert "1 check failed" in out + assert "dbt looked for a profiles.yml file in NONE" in out + + def test_wronguser(self, project): + run_dbt(["debug", "--target", "wronguser"], expect_pass=False) + self.assertGotValue(re.compile(r"\s+Connection test"), "ERROR") + + def test_empty_target(self, project): + run_dbt(["debug", "--target", "none_target"], expect_pass=False) + self.assertGotValue(re.compile(r"\s+output 'none_target'"), "misconfigured") + + +class TestDebugProfileVariableSynapse(BaseDebugProfileVariable): + pass + + +class TestDebugInvalidProjectSynapse(BaseDebug): + def test_empty_project(self, project): + with open("dbt_project.yml", "w") as f: # noqa: F841 + pass + + run_dbt(["debug", "--profile", "test"], expect_pass=False) + splitout = self.capsys.readouterr().out.split("\n") + self.check_project(splitout) + + def test_badproject(self, project): + update_project = {"invalid-key": "not a valid key so this is bad project"} + + with open("dbt_project.yml", "w") as f: + yaml.safe_dump(update_project, f) + + run_dbt(["debug", "--profile", "test"], expect_pass=False) + splitout = self.capsys.readouterr().out.split("\n") + self.check_project(splitout) + + def test_not_found_project(self, project): + with pytest.raises(DbtUsageException): + run_dbt(["debug", "--project-dir", "nopass"]) + + def test_invalid_project_outside_current_dir(self, project): + # create a dbt_project.yml + project_config = {"invalid-key": "not a valid key in this project"} + os.makedirs("custom", exist_ok=True) + with open("custom/dbt_project.yml", "w") as f: + yaml.safe_dump(project_config, f, default_flow_style=True) + run_dbt(["debug", "--project-dir", "custom"], expect_pass=False) + splitout = self.capsys.readouterr().out.split("\n") + self.check_project(splitout) From df48b275b6f38c251f5b950881c4e812c6c961fa Mon Sep 17 00:00:00 2001 From: artc95 Date: Tue, 13 Feb 2024 11:26:59 +0100 Subject: [PATCH 06/32] add get_create_materialized_view_as_sql macro and tests ; undo accidental commits --- .gitignore | 1 - dbt-synapse | 73 ------------------- .../synapse/macros/adapters/relation.sql | 9 ++- .../materialized_view/materialized_view.sql | 14 ++++ .../adapter/test_materialized_views.py | 67 +++++++++++++++++ 5 files changed, 88 insertions(+), 76 deletions(-) delete mode 100644 dbt-synapse create mode 100644 dbt/include/synapse/macros/materializations/models/materialized_view/materialized_view.sql create mode 100644 tests/functional/adapter/test_materialized_views.py diff --git a/.gitignore b/.gitignore index 18cba89c..d8e2bbd4 100644 --- a/.gitignore +++ b/.gitignore @@ -98,4 +98,3 @@ ENV/ env.bak/ venv.bak/ /test.env -.dbtenv/ diff --git a/dbt-synapse b/dbt-synapse deleted file mode 100644 index cd5ccd52..00000000 --- a/dbt-synapse +++ /dev/null @@ -1,73 +0,0 @@ -# Development of the adapter - -The Synapse adapter uses the [dbt-sqlserver](https://github.com/dbt-msft/dbt-sqlserver) adapter underneath. -This repository mostly contains a set of macros that override the behavior of dbt-sqlserver so that it works with Synapse. - -Python 3.10 is used for developing the adapter. To get started, bootstrap your environment as follows: - -Create a virtual environment, [pyenv](https://github.com/pyenv/pyenv) is used in the example: - -```shell -pyenv install 3.10.7 -pyenv virtualenv 3.10.7 dbt-synapse -pyenv activate dbt-synapse -``` - -Install the development dependencies and pre-commit and get information about possible make commands: - -```shell -make dev -make help -``` - -[Pre-commit](https://pre-commit.com/) helps us to maintain a consistent style and code quality across the entire project. -After running `make dev`, pre-commit will automatically validate your commits and fix any formatting issues whenever possible. - -## Testing - -The functional tests require a running Synapse Dedicated SQL Pool instance. -You can configure the connection to this instance with the file `test.env` in the root of the project. -You can use the provided `test.env.sample` as a base. - -```shell -cp test.env.sample test.env -``` - -You can use the following command to run the functional tests: - -```shell -make functional -``` - -## CI/CD - -We use Docker images that have all the things we need to test the adapter in the CI/CD workflows. -The Dockerfile and image are part of the [dbt-sqlserver](https://github.com/dbt-msft/dbt-sqlserver) repository. - -All CI/CD pipelines are using GitHub Actions. The following pipelines are available: - -* `integration-tests-azure`: runs the integration tests for Azure SQL Server. -* `release-version`: publishes the adapter to PyPI. - -There is an additional [Pre-commit](https://pre-commit.ci/) pipeline that validates the code style. - -### Azure integration tests - -The following environment variables are available: - -* `DBT_SYNAPSE_SERVER`: Name of the Synapse workspace -* `DBT_SYNAPSE_DB`: Name of the Synapse dedicated SQL pool -* `DBT_AZURE_TENANT`: Azure tenant ID -* `DBT_AZURE_SUBSCRIPTION_ID`: Azure subscription ID -* `DBT_AZURE_RESOURCE_GROUP_NAME`: Azure resource group name -* `DBT_AZURE_SP_NAME`: Client/application ID of the service principal used to connect to Azure AD -* `DBT_AZURE_SP_SECRET`: Password of the service principal used to connect to Azure AD - -## Releasing a new version - -Make sure the version number is bumped in `__version__.py`. Then, create a git tag named `v` and push it to GitHub. -A GitHub Actions workflow will be triggered to build the package and push it to PyPI. - -Make sure that the dependency to dbt-sqlserver is bumped to a compatible version in `setup.py`. - -If you're releasing support for a new version of `dbt-core`, also bump the `dbt_version` in `setup.py`. diff --git a/dbt/include/synapse/macros/adapters/relation.sql b/dbt/include/synapse/macros/adapters/relation.sql index 8c1d3582..32b74e00 100644 --- a/dbt/include/synapse/macros/adapters/relation.sql +++ b/dbt/include/synapse/macros/adapters/relation.sql @@ -5,16 +5,21 @@ {% endmacro %} {% macro synapse__drop_relation_script(relation) -%} - {% if relation.type == 'view' -%} + {% if relation.type == 'view' or relation.type == 'materialized_view' -%} {% set object_id_type = 'V' %} {% elif relation.type == 'table'%} {% set object_id_type = 'U' %} {%- else -%} invalid target name {% endif %} + if object_id ('{{ relation.include(database=False) }}','{{ object_id_type }}') is not null + {% if relation.type == 'view' or relation.type == 'table' -%} begin drop {{ relation.type }} {{ relation.include(database=False) }} end + {% elif relation.type == 'materialized_view' %} + alter materialized view {{ relation.include(database=False)}} disable + {% endif %} {% endmacro %} @@ -37,6 +42,6 @@ {% macro synapse__truncate_relation(relation) %} {% call statement('truncate_relation') -%} - truncate table {{ relation }} + truncate table {{ relation }} {%- endcall %} {% endmacro %} diff --git a/dbt/include/synapse/macros/materializations/models/materialized_view/materialized_view.sql b/dbt/include/synapse/macros/materializations/models/materialized_view/materialized_view.sql new file mode 100644 index 00000000..4456377d --- /dev/null +++ b/dbt/include/synapse/macros/materializations/models/materialized_view/materialized_view.sql @@ -0,0 +1,14 @@ +{% macro ref(model_name) %} + + {% do return(builtins.ref(model_name).include(database=false)) %} + +{% endmacro %} + +{% macro synapse__get_create_materialized_view_as_sql(relation, sql) %} + {%- set dist = config.get('dist', default="ROUND_ROBIN") -%} + + CREATE materialized view {{ relation.include(database=False) }} + WITH ( DISTRIBUTION = {{dist}} ) + AS {{ sql }}; + +{% endmacro %} diff --git a/tests/functional/adapter/test_materialized_views.py b/tests/functional/adapter/test_materialized_views.py new file mode 100644 index 00000000..ca31f78e --- /dev/null +++ b/tests/functional/adapter/test_materialized_views.py @@ -0,0 +1,67 @@ +import pytest +from dbt.tests.adapter.materialized_view.basic import MaterializedViewBasic +from dbt.tests.util import check_relation_types, get_model_file, run_dbt, set_model_file + +MY_TABLE = """ +{{ config( + materialized='table', +) }} +select i.id, count(i.value) as counted +from {{ ref('my_seed') }} i +group by i.id +""" + + +MY_VIEW = """ +{{ config( + materialized='view', +) }} +select i.id, count(i.value) as counted +from {{ ref('my_seed') }} i +group by i.id +""" + + +MY_MATERIALIZED_VIEW = """ +{{ config( + materialized='materialized_view', +) }} +select i.id, count(*) as counted +from {{ ref('my_seed') }} i +group by i.id +""" + + +class TestMaterializedViewsBasicSynapse(MaterializedViewBasic): + @pytest.fixture(scope="class", autouse=True) + def models(self): + yield { + "my_table.sql": MY_TABLE, + "my_view.sql": MY_VIEW, + "my_materialized_view.sql": MY_MATERIALIZED_VIEW, + } + + @pytest.fixture(scope="class", autouse=True) + def setup(self, project, my_materialized_view): + run_dbt(["seed"]) + run_dbt(["run", "--models", my_materialized_view.identifier, "--full-refresh"]) + + # the tests touch these files, store their contents in memory + initial_model = get_model_file(project, my_materialized_view) + + yield + + # and then reset them after the test runs + set_model_file(project, my_materialized_view, initial_model) + + def test_materialized_view_create(self, project): + # check relation types + expected = { + # sys.objects has no type "materialized view", it's type "view" + "my_materialized_view": "view", + } + return check_relation_types(project.adapter, expected) + + @pytest.mark.skip(reason="rename materialized view not supported") + def test_materialized_view_create_idempotent(self, project, my_materialized_view): + pass From 1a8e585127474456e9bde24aa7cabd1afa4d0ff5 Mon Sep 17 00:00:00 2001 From: artc95 Date: Tue, 13 Feb 2024 11:43:36 +0100 Subject: [PATCH 07/32] notify to remove pyodbc for Windows in dev_requirements.txt (rebase onto latest dependency_dbtsqlserver_to_dbtfabric branch) --- dev_requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dev_requirements.txt b/dev_requirements.txt index f21875fc..c3268f92 100644 --- a/dev_requirements.txt +++ b/dev_requirements.txt @@ -5,7 +5,7 @@ pre-commit==2.21.0;python_version<"3.8" pre-commit==3.3.1;python_version>="3.8" pytest-dotenv==0.5.2 dbt-tests-adapter~=1.6.9 -pyodbc==4.0.39 --no-binary :all: +pyodbc==4.0.39 --no-binary :all: # if Windows, remove aiohttp==3.8.3 azure-mgmt-synapse==2.0.0 flaky==3.7.0 From 36d9c54a441abac65ff09bf7f18f73fdd4d06964 Mon Sep 17 00:00:00 2001 From: artc95 Date: Tue, 13 Feb 2024 16:39:20 +0100 Subject: [PATCH 08/32] update to use _make_ref_key_dict (instead of _make_ref_key_msg from dbt-core v1.4.9) --- dbt/adapters/synapse/synapse_adapter.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dbt/adapters/synapse/synapse_adapter.py b/dbt/adapters/synapse/synapse_adapter.py index 9e6dfddb..5c02b579 100644 --- a/dbt/adapters/synapse/synapse_adapter.py +++ b/dbt/adapters/synapse/synapse_adapter.py @@ -1,5 +1,5 @@ from dbt.adapters.base.relation import BaseRelation -from dbt.adapters.cache import _make_ref_key_msg +from dbt.adapters.cache import _make_ref_key_dict from dbt.adapters.fabric import FabricAdapter from dbt.adapters.sql.impl import CREATE_SCHEMA_MACRO_NAME from dbt.events.functions import fire_event @@ -13,7 +13,7 @@ class SynapseAdapter(FabricAdapter): def create_schema(self, relation: BaseRelation) -> None: relation = relation.without_identifier() - fire_event(SchemaCreation(relation=_make_ref_key_msg(relation))) + fire_event(SchemaCreation(relation=_make_ref_key_dict(relation))) macro_name = CREATE_SCHEMA_MACRO_NAME kwargs = { "relation": relation, From fc9c500009e6ff4574eaaabf0c57343fe3bfa8ae Mon Sep 17 00:00:00 2001 From: artc95 Date: Thu, 8 Feb 2024 10:20:33 +0100 Subject: [PATCH 09/32] bump for v1.6.0rc1 - __version__.py, setup.py, dev_requirements.txt --- dbt/adapters/synapse/__version__.py | 2 +- dev_requirements.txt | 2 +- setup.py | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/dbt/adapters/synapse/__version__.py b/dbt/adapters/synapse/__version__.py index 9b363e24..7a0130d4 100644 --- a/dbt/adapters/synapse/__version__.py +++ b/dbt/adapters/synapse/__version__.py @@ -1 +1 @@ -version = "1.4.1rc1" +version = "1.6.0rc1" diff --git a/dev_requirements.txt b/dev_requirements.txt index 7e77f77b..0f3a14da 100644 --- a/dev_requirements.txt +++ b/dev_requirements.txt @@ -4,7 +4,7 @@ wheel==0.40.0 pre-commit==2.21.0;python_version<"3.8" pre-commit==3.3.1;python_version>="3.8" pytest-dotenv==0.5.2 -dbt-tests-adapter~=1.4.9 +dbt-tests-adapter~=1.6.9 aiohttp==3.8.3 azure-mgmt-synapse==2.0.0 flaky==3.7.0 diff --git a/setup.py b/setup.py index dec7e0c8..50ff82c5 100644 --- a/setup.py +++ b/setup.py @@ -15,8 +15,8 @@ "Anders Swanson", "Sam Debruyn", ] -dbt_version = "1.4" -dbt_fabric_requirement = "dbt-fabric~=1.4.0rc3" +dbt_version = "1.6" +dbt_fabric_requirement = "dbt-fabric~=1.6.0" description = """An Azure Synapse adapter plugin for dbt""" this_directory = os.path.abspath(os.path.dirname(__file__)) From c89f2e6528aca3e5f56e5daede15f7850a5b994a Mon Sep 17 00:00:00 2001 From: hellafech Date: Mon, 12 Feb 2024 14:01:51 +0100 Subject: [PATCH 10/32] Added feature dbt-debug (test) --- .gitignore | 1 + CHANGELOG.md | 5 ++ dbt-synapse | 73 +++++++++++++++++++++++ tests/functional/adapter/test_debug.py | 82 ++++++++++++++++++++++++++ 4 files changed, 161 insertions(+) create mode 100644 dbt-synapse create mode 100644 tests/functional/adapter/test_debug.py diff --git a/.gitignore b/.gitignore index d8e2bbd4..18cba89c 100644 --- a/.gitignore +++ b/.gitignore @@ -98,3 +98,4 @@ ENV/ env.bak/ venv.bak/ /test.env +.dbtenv/ diff --git a/CHANGELOG.md b/CHANGELOG.md index 6e1b2e67..0e872b0c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,4 +1,9 @@ # Changelog +### v1.6.0 + +## Features +* Added tests related to dbt-debug to test --connection parameter + ## v1.4.1rc1 #### Under the hood diff --git a/dbt-synapse b/dbt-synapse new file mode 100644 index 00000000..cd5ccd52 --- /dev/null +++ b/dbt-synapse @@ -0,0 +1,73 @@ +# Development of the adapter + +The Synapse adapter uses the [dbt-sqlserver](https://github.com/dbt-msft/dbt-sqlserver) adapter underneath. +This repository mostly contains a set of macros that override the behavior of dbt-sqlserver so that it works with Synapse. + +Python 3.10 is used for developing the adapter. To get started, bootstrap your environment as follows: + +Create a virtual environment, [pyenv](https://github.com/pyenv/pyenv) is used in the example: + +```shell +pyenv install 3.10.7 +pyenv virtualenv 3.10.7 dbt-synapse +pyenv activate dbt-synapse +``` + +Install the development dependencies and pre-commit and get information about possible make commands: + +```shell +make dev +make help +``` + +[Pre-commit](https://pre-commit.com/) helps us to maintain a consistent style and code quality across the entire project. +After running `make dev`, pre-commit will automatically validate your commits and fix any formatting issues whenever possible. + +## Testing + +The functional tests require a running Synapse Dedicated SQL Pool instance. +You can configure the connection to this instance with the file `test.env` in the root of the project. +You can use the provided `test.env.sample` as a base. + +```shell +cp test.env.sample test.env +``` + +You can use the following command to run the functional tests: + +```shell +make functional +``` + +## CI/CD + +We use Docker images that have all the things we need to test the adapter in the CI/CD workflows. +The Dockerfile and image are part of the [dbt-sqlserver](https://github.com/dbt-msft/dbt-sqlserver) repository. + +All CI/CD pipelines are using GitHub Actions. The following pipelines are available: + +* `integration-tests-azure`: runs the integration tests for Azure SQL Server. +* `release-version`: publishes the adapter to PyPI. + +There is an additional [Pre-commit](https://pre-commit.ci/) pipeline that validates the code style. + +### Azure integration tests + +The following environment variables are available: + +* `DBT_SYNAPSE_SERVER`: Name of the Synapse workspace +* `DBT_SYNAPSE_DB`: Name of the Synapse dedicated SQL pool +* `DBT_AZURE_TENANT`: Azure tenant ID +* `DBT_AZURE_SUBSCRIPTION_ID`: Azure subscription ID +* `DBT_AZURE_RESOURCE_GROUP_NAME`: Azure resource group name +* `DBT_AZURE_SP_NAME`: Client/application ID of the service principal used to connect to Azure AD +* `DBT_AZURE_SP_SECRET`: Password of the service principal used to connect to Azure AD + +## Releasing a new version + +Make sure the version number is bumped in `__version__.py`. Then, create a git tag named `v` and push it to GitHub. +A GitHub Actions workflow will be triggered to build the package and push it to PyPI. + +Make sure that the dependency to dbt-sqlserver is bumped to a compatible version in `setup.py`. + +If you're releasing support for a new version of `dbt-core`, also bump the `dbt_version` in `setup.py`. diff --git a/tests/functional/adapter/test_debug.py b/tests/functional/adapter/test_debug.py new file mode 100644 index 00000000..57063e6b --- /dev/null +++ b/tests/functional/adapter/test_debug.py @@ -0,0 +1,82 @@ +import os +import re + +import pytest +import yaml +from dbt.cli.exceptions import DbtUsageException +from dbt.tests.adapter.dbt_debug.test_dbt_debug import BaseDebug, BaseDebugProfileVariable +from dbt.tests.util import run_dbt, run_dbt_and_capture + + +class TestDebugSynapse(BaseDebug): + def test_ok(self, project): + run_dbt(["debug"]) + assert "ERROR" not in self.capsys.readouterr().out + + def test_nopass(self, project): + run_dbt(["debug", "--target", "nopass"], expect_pass=False) + self.assertGotValue(re.compile(r"\s+profiles\.yml file"), "ERROR invalid") + + def test_connection_flag(self, project): + """Testing the --connection flag works as expected, including that output is not lost""" + _, out = run_dbt_and_capture(["debug", "--connection"]) + assert "Skipping steps before connection verification" in out + + _, out = run_dbt_and_capture( + ["debug", "--connection", "--target", "NONE"], expect_pass=False + ) + assert "1 check failed" in out + assert "The profile 'test' does not have a target named 'NONE'." in out + + _, out = run_dbt_and_capture( + ["debug", "--connection", "--profiles-dir", "NONE"], expect_pass=False + ) + assert "Using profiles dir at NONE" + assert "1 check failed" in out + assert "dbt looked for a profiles.yml file in NONE" in out + + def test_wronguser(self, project): + run_dbt(["debug", "--target", "wronguser"], expect_pass=False) + self.assertGotValue(re.compile(r"\s+Connection test"), "ERROR") + + def test_empty_target(self, project): + run_dbt(["debug", "--target", "none_target"], expect_pass=False) + self.assertGotValue(re.compile(r"\s+output 'none_target'"), "misconfigured") + + +class TestDebugProfileVariableSynapse(BaseDebugProfileVariable): + pass + + +class TestDebugInvalidProjectSynapse(BaseDebug): + def test_empty_project(self, project): + with open("dbt_project.yml", "w") as f: # noqa: F841 + pass + + run_dbt(["debug", "--profile", "test"], expect_pass=False) + splitout = self.capsys.readouterr().out.split("\n") + self.check_project(splitout) + + def test_badproject(self, project): + update_project = {"invalid-key": "not a valid key so this is bad project"} + + with open("dbt_project.yml", "w") as f: + yaml.safe_dump(update_project, f) + + run_dbt(["debug", "--profile", "test"], expect_pass=False) + splitout = self.capsys.readouterr().out.split("\n") + self.check_project(splitout) + + def test_not_found_project(self, project): + with pytest.raises(DbtUsageException): + run_dbt(["debug", "--project-dir", "nopass"]) + + def test_invalid_project_outside_current_dir(self, project): + # create a dbt_project.yml + project_config = {"invalid-key": "not a valid key in this project"} + os.makedirs("custom", exist_ok=True) + with open("custom/dbt_project.yml", "w") as f: + yaml.safe_dump(project_config, f, default_flow_style=True) + run_dbt(["debug", "--project-dir", "custom"], expect_pass=False) + splitout = self.capsys.readouterr().out.split("\n") + self.check_project(splitout) From c76052a26d929abb9f5977ae5ee9dc05d6a064b4 Mon Sep 17 00:00:00 2001 From: artc95 Date: Tue, 13 Feb 2024 11:26:59 +0100 Subject: [PATCH 11/32] add get_create_materialized_view_as_sql macro and tests ; undo accidental commits --- .gitignore | 1 - dbt-synapse | 73 ------------------- .../synapse/macros/adapters/relation.sql | 9 ++- .../materialized_view/materialized_view.sql | 14 ++++ .../adapter/test_materialized_views.py | 67 +++++++++++++++++ 5 files changed, 88 insertions(+), 76 deletions(-) delete mode 100644 dbt-synapse create mode 100644 dbt/include/synapse/macros/materializations/models/materialized_view/materialized_view.sql create mode 100644 tests/functional/adapter/test_materialized_views.py diff --git a/.gitignore b/.gitignore index 18cba89c..d8e2bbd4 100644 --- a/.gitignore +++ b/.gitignore @@ -98,4 +98,3 @@ ENV/ env.bak/ venv.bak/ /test.env -.dbtenv/ diff --git a/dbt-synapse b/dbt-synapse deleted file mode 100644 index cd5ccd52..00000000 --- a/dbt-synapse +++ /dev/null @@ -1,73 +0,0 @@ -# Development of the adapter - -The Synapse adapter uses the [dbt-sqlserver](https://github.com/dbt-msft/dbt-sqlserver) adapter underneath. -This repository mostly contains a set of macros that override the behavior of dbt-sqlserver so that it works with Synapse. - -Python 3.10 is used for developing the adapter. To get started, bootstrap your environment as follows: - -Create a virtual environment, [pyenv](https://github.com/pyenv/pyenv) is used in the example: - -```shell -pyenv install 3.10.7 -pyenv virtualenv 3.10.7 dbt-synapse -pyenv activate dbt-synapse -``` - -Install the development dependencies and pre-commit and get information about possible make commands: - -```shell -make dev -make help -``` - -[Pre-commit](https://pre-commit.com/) helps us to maintain a consistent style and code quality across the entire project. -After running `make dev`, pre-commit will automatically validate your commits and fix any formatting issues whenever possible. - -## Testing - -The functional tests require a running Synapse Dedicated SQL Pool instance. -You can configure the connection to this instance with the file `test.env` in the root of the project. -You can use the provided `test.env.sample` as a base. - -```shell -cp test.env.sample test.env -``` - -You can use the following command to run the functional tests: - -```shell -make functional -``` - -## CI/CD - -We use Docker images that have all the things we need to test the adapter in the CI/CD workflows. -The Dockerfile and image are part of the [dbt-sqlserver](https://github.com/dbt-msft/dbt-sqlserver) repository. - -All CI/CD pipelines are using GitHub Actions. The following pipelines are available: - -* `integration-tests-azure`: runs the integration tests for Azure SQL Server. -* `release-version`: publishes the adapter to PyPI. - -There is an additional [Pre-commit](https://pre-commit.ci/) pipeline that validates the code style. - -### Azure integration tests - -The following environment variables are available: - -* `DBT_SYNAPSE_SERVER`: Name of the Synapse workspace -* `DBT_SYNAPSE_DB`: Name of the Synapse dedicated SQL pool -* `DBT_AZURE_TENANT`: Azure tenant ID -* `DBT_AZURE_SUBSCRIPTION_ID`: Azure subscription ID -* `DBT_AZURE_RESOURCE_GROUP_NAME`: Azure resource group name -* `DBT_AZURE_SP_NAME`: Client/application ID of the service principal used to connect to Azure AD -* `DBT_AZURE_SP_SECRET`: Password of the service principal used to connect to Azure AD - -## Releasing a new version - -Make sure the version number is bumped in `__version__.py`. Then, create a git tag named `v` and push it to GitHub. -A GitHub Actions workflow will be triggered to build the package and push it to PyPI. - -Make sure that the dependency to dbt-sqlserver is bumped to a compatible version in `setup.py`. - -If you're releasing support for a new version of `dbt-core`, also bump the `dbt_version` in `setup.py`. diff --git a/dbt/include/synapse/macros/adapters/relation.sql b/dbt/include/synapse/macros/adapters/relation.sql index 8c1d3582..32b74e00 100644 --- a/dbt/include/synapse/macros/adapters/relation.sql +++ b/dbt/include/synapse/macros/adapters/relation.sql @@ -5,16 +5,21 @@ {% endmacro %} {% macro synapse__drop_relation_script(relation) -%} - {% if relation.type == 'view' -%} + {% if relation.type == 'view' or relation.type == 'materialized_view' -%} {% set object_id_type = 'V' %} {% elif relation.type == 'table'%} {% set object_id_type = 'U' %} {%- else -%} invalid target name {% endif %} + if object_id ('{{ relation.include(database=False) }}','{{ object_id_type }}') is not null + {% if relation.type == 'view' or relation.type == 'table' -%} begin drop {{ relation.type }} {{ relation.include(database=False) }} end + {% elif relation.type == 'materialized_view' %} + alter materialized view {{ relation.include(database=False)}} disable + {% endif %} {% endmacro %} @@ -37,6 +42,6 @@ {% macro synapse__truncate_relation(relation) %} {% call statement('truncate_relation') -%} - truncate table {{ relation }} + truncate table {{ relation }} {%- endcall %} {% endmacro %} diff --git a/dbt/include/synapse/macros/materializations/models/materialized_view/materialized_view.sql b/dbt/include/synapse/macros/materializations/models/materialized_view/materialized_view.sql new file mode 100644 index 00000000..4456377d --- /dev/null +++ b/dbt/include/synapse/macros/materializations/models/materialized_view/materialized_view.sql @@ -0,0 +1,14 @@ +{% macro ref(model_name) %} + + {% do return(builtins.ref(model_name).include(database=false)) %} + +{% endmacro %} + +{% macro synapse__get_create_materialized_view_as_sql(relation, sql) %} + {%- set dist = config.get('dist', default="ROUND_ROBIN") -%} + + CREATE materialized view {{ relation.include(database=False) }} + WITH ( DISTRIBUTION = {{dist}} ) + AS {{ sql }}; + +{% endmacro %} diff --git a/tests/functional/adapter/test_materialized_views.py b/tests/functional/adapter/test_materialized_views.py new file mode 100644 index 00000000..ca31f78e --- /dev/null +++ b/tests/functional/adapter/test_materialized_views.py @@ -0,0 +1,67 @@ +import pytest +from dbt.tests.adapter.materialized_view.basic import MaterializedViewBasic +from dbt.tests.util import check_relation_types, get_model_file, run_dbt, set_model_file + +MY_TABLE = """ +{{ config( + materialized='table', +) }} +select i.id, count(i.value) as counted +from {{ ref('my_seed') }} i +group by i.id +""" + + +MY_VIEW = """ +{{ config( + materialized='view', +) }} +select i.id, count(i.value) as counted +from {{ ref('my_seed') }} i +group by i.id +""" + + +MY_MATERIALIZED_VIEW = """ +{{ config( + materialized='materialized_view', +) }} +select i.id, count(*) as counted +from {{ ref('my_seed') }} i +group by i.id +""" + + +class TestMaterializedViewsBasicSynapse(MaterializedViewBasic): + @pytest.fixture(scope="class", autouse=True) + def models(self): + yield { + "my_table.sql": MY_TABLE, + "my_view.sql": MY_VIEW, + "my_materialized_view.sql": MY_MATERIALIZED_VIEW, + } + + @pytest.fixture(scope="class", autouse=True) + def setup(self, project, my_materialized_view): + run_dbt(["seed"]) + run_dbt(["run", "--models", my_materialized_view.identifier, "--full-refresh"]) + + # the tests touch these files, store their contents in memory + initial_model = get_model_file(project, my_materialized_view) + + yield + + # and then reset them after the test runs + set_model_file(project, my_materialized_view, initial_model) + + def test_materialized_view_create(self, project): + # check relation types + expected = { + # sys.objects has no type "materialized view", it's type "view" + "my_materialized_view": "view", + } + return check_relation_types(project.adapter, expected) + + @pytest.mark.skip(reason="rename materialized view not supported") + def test_materialized_view_create_idempotent(self, project, my_materialized_view): + pass From 44763ef8528ba7250dda8aa9ec9ad901b7c96a55 Mon Sep 17 00:00:00 2001 From: artc95 Date: Thu, 8 Feb 2024 10:20:33 +0100 Subject: [PATCH 12/32] bump for v1.6.0rc1 - __version__.py, setup.py, dev_requirements.txt --- dev_requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/dev_requirements.txt b/dev_requirements.txt index 0f3a14da..f21875fc 100644 --- a/dev_requirements.txt +++ b/dev_requirements.txt @@ -5,6 +5,7 @@ pre-commit==2.21.0;python_version<"3.8" pre-commit==3.3.1;python_version>="3.8" pytest-dotenv==0.5.2 dbt-tests-adapter~=1.6.9 +pyodbc==4.0.39 --no-binary :all: aiohttp==3.8.3 azure-mgmt-synapse==2.0.0 flaky==3.7.0 From 47fe55598279c967240f0d723fb444fada50a453 Mon Sep 17 00:00:00 2001 From: hellafech Date: Mon, 12 Feb 2024 14:01:51 +0100 Subject: [PATCH 13/32] Added feature dbt-debug (test) --- .gitignore | 1 + dbt-synapse | 73 +++++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 74 insertions(+) create mode 100644 dbt-synapse diff --git a/.gitignore b/.gitignore index d8e2bbd4..18cba89c 100644 --- a/.gitignore +++ b/.gitignore @@ -98,3 +98,4 @@ ENV/ env.bak/ venv.bak/ /test.env +.dbtenv/ diff --git a/dbt-synapse b/dbt-synapse new file mode 100644 index 00000000..cd5ccd52 --- /dev/null +++ b/dbt-synapse @@ -0,0 +1,73 @@ +# Development of the adapter + +The Synapse adapter uses the [dbt-sqlserver](https://github.com/dbt-msft/dbt-sqlserver) adapter underneath. +This repository mostly contains a set of macros that override the behavior of dbt-sqlserver so that it works with Synapse. + +Python 3.10 is used for developing the adapter. To get started, bootstrap your environment as follows: + +Create a virtual environment, [pyenv](https://github.com/pyenv/pyenv) is used in the example: + +```shell +pyenv install 3.10.7 +pyenv virtualenv 3.10.7 dbt-synapse +pyenv activate dbt-synapse +``` + +Install the development dependencies and pre-commit and get information about possible make commands: + +```shell +make dev +make help +``` + +[Pre-commit](https://pre-commit.com/) helps us to maintain a consistent style and code quality across the entire project. +After running `make dev`, pre-commit will automatically validate your commits and fix any formatting issues whenever possible. + +## Testing + +The functional tests require a running Synapse Dedicated SQL Pool instance. +You can configure the connection to this instance with the file `test.env` in the root of the project. +You can use the provided `test.env.sample` as a base. + +```shell +cp test.env.sample test.env +``` + +You can use the following command to run the functional tests: + +```shell +make functional +``` + +## CI/CD + +We use Docker images that have all the things we need to test the adapter in the CI/CD workflows. +The Dockerfile and image are part of the [dbt-sqlserver](https://github.com/dbt-msft/dbt-sqlserver) repository. + +All CI/CD pipelines are using GitHub Actions. The following pipelines are available: + +* `integration-tests-azure`: runs the integration tests for Azure SQL Server. +* `release-version`: publishes the adapter to PyPI. + +There is an additional [Pre-commit](https://pre-commit.ci/) pipeline that validates the code style. + +### Azure integration tests + +The following environment variables are available: + +* `DBT_SYNAPSE_SERVER`: Name of the Synapse workspace +* `DBT_SYNAPSE_DB`: Name of the Synapse dedicated SQL pool +* `DBT_AZURE_TENANT`: Azure tenant ID +* `DBT_AZURE_SUBSCRIPTION_ID`: Azure subscription ID +* `DBT_AZURE_RESOURCE_GROUP_NAME`: Azure resource group name +* `DBT_AZURE_SP_NAME`: Client/application ID of the service principal used to connect to Azure AD +* `DBT_AZURE_SP_SECRET`: Password of the service principal used to connect to Azure AD + +## Releasing a new version + +Make sure the version number is bumped in `__version__.py`. Then, create a git tag named `v` and push it to GitHub. +A GitHub Actions workflow will be triggered to build the package and push it to PyPI. + +Make sure that the dependency to dbt-sqlserver is bumped to a compatible version in `setup.py`. + +If you're releasing support for a new version of `dbt-core`, also bump the `dbt_version` in `setup.py`. From d72f2482b4fbe814045bcee75252b4532993be19 Mon Sep 17 00:00:00 2001 From: artc95 Date: Tue, 13 Feb 2024 11:26:59 +0100 Subject: [PATCH 14/32] add get_create_materialized_view_as_sql macro and tests ; undo accidental commits --- .gitignore | 1 - dbt-synapse | 73 ----------------------------------------------------- 2 files changed, 74 deletions(-) delete mode 100644 dbt-synapse diff --git a/.gitignore b/.gitignore index 18cba89c..d8e2bbd4 100644 --- a/.gitignore +++ b/.gitignore @@ -98,4 +98,3 @@ ENV/ env.bak/ venv.bak/ /test.env -.dbtenv/ diff --git a/dbt-synapse b/dbt-synapse deleted file mode 100644 index cd5ccd52..00000000 --- a/dbt-synapse +++ /dev/null @@ -1,73 +0,0 @@ -# Development of the adapter - -The Synapse adapter uses the [dbt-sqlserver](https://github.com/dbt-msft/dbt-sqlserver) adapter underneath. -This repository mostly contains a set of macros that override the behavior of dbt-sqlserver so that it works with Synapse. - -Python 3.10 is used for developing the adapter. To get started, bootstrap your environment as follows: - -Create a virtual environment, [pyenv](https://github.com/pyenv/pyenv) is used in the example: - -```shell -pyenv install 3.10.7 -pyenv virtualenv 3.10.7 dbt-synapse -pyenv activate dbt-synapse -``` - -Install the development dependencies and pre-commit and get information about possible make commands: - -```shell -make dev -make help -``` - -[Pre-commit](https://pre-commit.com/) helps us to maintain a consistent style and code quality across the entire project. -After running `make dev`, pre-commit will automatically validate your commits and fix any formatting issues whenever possible. - -## Testing - -The functional tests require a running Synapse Dedicated SQL Pool instance. -You can configure the connection to this instance with the file `test.env` in the root of the project. -You can use the provided `test.env.sample` as a base. - -```shell -cp test.env.sample test.env -``` - -You can use the following command to run the functional tests: - -```shell -make functional -``` - -## CI/CD - -We use Docker images that have all the things we need to test the adapter in the CI/CD workflows. -The Dockerfile and image are part of the [dbt-sqlserver](https://github.com/dbt-msft/dbt-sqlserver) repository. - -All CI/CD pipelines are using GitHub Actions. The following pipelines are available: - -* `integration-tests-azure`: runs the integration tests for Azure SQL Server. -* `release-version`: publishes the adapter to PyPI. - -There is an additional [Pre-commit](https://pre-commit.ci/) pipeline that validates the code style. - -### Azure integration tests - -The following environment variables are available: - -* `DBT_SYNAPSE_SERVER`: Name of the Synapse workspace -* `DBT_SYNAPSE_DB`: Name of the Synapse dedicated SQL pool -* `DBT_AZURE_TENANT`: Azure tenant ID -* `DBT_AZURE_SUBSCRIPTION_ID`: Azure subscription ID -* `DBT_AZURE_RESOURCE_GROUP_NAME`: Azure resource group name -* `DBT_AZURE_SP_NAME`: Client/application ID of the service principal used to connect to Azure AD -* `DBT_AZURE_SP_SECRET`: Password of the service principal used to connect to Azure AD - -## Releasing a new version - -Make sure the version number is bumped in `__version__.py`. Then, create a git tag named `v` and push it to GitHub. -A GitHub Actions workflow will be triggered to build the package and push it to PyPI. - -Make sure that the dependency to dbt-sqlserver is bumped to a compatible version in `setup.py`. - -If you're releasing support for a new version of `dbt-core`, also bump the `dbt_version` in `setup.py`. From 8694d6bca1b1fdadc17faf63847665437345199e Mon Sep 17 00:00:00 2001 From: artc95 Date: Tue, 13 Feb 2024 11:43:36 +0100 Subject: [PATCH 15/32] notify to remove pyodbc for Windows in dev_requirements.txt (rebase onto latest dependency_dbtsqlserver_to_dbtfabric branch) --- dev_requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dev_requirements.txt b/dev_requirements.txt index f21875fc..c3268f92 100644 --- a/dev_requirements.txt +++ b/dev_requirements.txt @@ -5,7 +5,7 @@ pre-commit==2.21.0;python_version<"3.8" pre-commit==3.3.1;python_version>="3.8" pytest-dotenv==0.5.2 dbt-tests-adapter~=1.6.9 -pyodbc==4.0.39 --no-binary :all: +pyodbc==4.0.39 --no-binary :all: # if Windows, remove aiohttp==3.8.3 azure-mgmt-synapse==2.0.0 flaky==3.7.0 From ad7ef7fa52f4d38c78481999f23170a0e7b7d2dc Mon Sep 17 00:00:00 2001 From: artc95 Date: Tue, 13 Feb 2024 16:39:20 +0100 Subject: [PATCH 16/32] update to use _make_ref_key_dict (instead of _make_ref_key_msg from dbt-core v1.4.9) --- dbt/adapters/synapse/synapse_adapter.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dbt/adapters/synapse/synapse_adapter.py b/dbt/adapters/synapse/synapse_adapter.py index 9e6dfddb..5c02b579 100644 --- a/dbt/adapters/synapse/synapse_adapter.py +++ b/dbt/adapters/synapse/synapse_adapter.py @@ -1,5 +1,5 @@ from dbt.adapters.base.relation import BaseRelation -from dbt.adapters.cache import _make_ref_key_msg +from dbt.adapters.cache import _make_ref_key_dict from dbt.adapters.fabric import FabricAdapter from dbt.adapters.sql.impl import CREATE_SCHEMA_MACRO_NAME from dbt.events.functions import fire_event @@ -13,7 +13,7 @@ class SynapseAdapter(FabricAdapter): def create_schema(self, relation: BaseRelation) -> None: relation = relation.without_identifier() - fire_event(SchemaCreation(relation=_make_ref_key_msg(relation))) + fire_event(SchemaCreation(relation=_make_ref_key_dict(relation))) macro_name = CREATE_SCHEMA_MACRO_NAME kwargs = { "relation": relation, From 4a0c21e291a4506698ecaae1bf641e29b7f546b0 Mon Sep 17 00:00:00 2001 From: artc95 Date: Wed, 14 Feb 2024 11:53:51 +0100 Subject: [PATCH 17/32] remove pyodbc requirement --- dev_requirements.txt | 1 - 1 file changed, 1 deletion(-) diff --git a/dev_requirements.txt b/dev_requirements.txt index c3268f92..0f3a14da 100644 --- a/dev_requirements.txt +++ b/dev_requirements.txt @@ -5,7 +5,6 @@ pre-commit==2.21.0;python_version<"3.8" pre-commit==3.3.1;python_version>="3.8" pytest-dotenv==0.5.2 dbt-tests-adapter~=1.6.9 -pyodbc==4.0.39 --no-binary :all: # if Windows, remove aiohttp==3.8.3 azure-mgmt-synapse==2.0.0 flaky==3.7.0 From 440e4a97a8bca982ce1492211fb426037813dda9 Mon Sep 17 00:00:00 2001 From: artc95 Date: Wed, 14 Feb 2024 14:04:22 +0100 Subject: [PATCH 18/32] fix macro synapse__split_part to handle negative ordinal (failing test_utils.py::TestSplitPartSynapse) --- dbt/include/synapse/macros/utils/split_part.sql | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/dbt/include/synapse/macros/utils/split_part.sql b/dbt/include/synapse/macros/utils/split_part.sql index ee01de1d..0e7055d1 100644 --- a/dbt/include/synapse/macros/utils/split_part.sql +++ b/dbt/include/synapse/macros/utils/split_part.sql @@ -2,8 +2,18 @@ We have to replace the macro from dbt-sqlserver since that one uses XML which is an unsupported data type in Synapse. The function below is not supported in Synapse Dedicated SQL according to the documentation, but it seems to work. #} + {% macro synapse__split_part(string_text, delimiter_text, part_number) %} - (select value from string_split({{ string_text }}, {{ delimiter_text }}, 1) where ordinal = {{ part_number }}) + {% if part_number >= 0 %} + + (select value from string_split({{ string_text }}, {{ delimiter_text }}, 1) where ordinal = {{ part_number }}) + + {% else %} + + (select value from string_split({{ string_text }}, {{ delimiter_text }}, 1) + where ordinal = len(replace({{ string_text }}, {{delimiter_text}}, '')) + 1 + {{ part_number }}) + + {% endif %} {% endmacro %} From 399fb8f30f4004e4f551b1a50e355584fcc44a27 Mon Sep 17 00:00:00 2001 From: artc95 Date: Wed, 14 Feb 2024 21:05:13 +0100 Subject: [PATCH 19/32] drop materialized view (which stops reference to underlying table), instead of disable; to-be-fixed macro get_replace_materialized_view_as_sql, for test_materialized_views.py test_materialized_view_create_idempotent --- .../synapse/macros/adapters/relation.sql | 8 ++++--- .../materialized_view/materialized_view.sql | 23 ++++++++++++++++++- .../adapter/test_materialized_views.py | 15 ++++++++++-- 3 files changed, 40 insertions(+), 6 deletions(-) diff --git a/dbt/include/synapse/macros/adapters/relation.sql b/dbt/include/synapse/macros/adapters/relation.sql index 32b74e00..29ad42b3 100644 --- a/dbt/include/synapse/macros/adapters/relation.sql +++ b/dbt/include/synapse/macros/adapters/relation.sql @@ -13,12 +13,14 @@ {% endif %} if object_id ('{{ relation.include(database=False) }}','{{ object_id_type }}') is not null - {% if relation.type == 'view' or relation.type == 'table' -%} + {% if relation.type == 'view' or relation.type == 'materialized_view' -%} + begin + drop view {{ relation.include(database=False) }} + end + {% elif relation.type == 'table' %} begin drop {{ relation.type }} {{ relation.include(database=False) }} end - {% elif relation.type == 'materialized_view' %} - alter materialized view {{ relation.include(database=False)}} disable {% endif %} {% endmacro %} diff --git a/dbt/include/synapse/macros/materializations/models/materialized_view/materialized_view.sql b/dbt/include/synapse/macros/materializations/models/materialized_view/materialized_view.sql index 4456377d..5c43226a 100644 --- a/dbt/include/synapse/macros/materializations/models/materialized_view/materialized_view.sql +++ b/dbt/include/synapse/macros/materializations/models/materialized_view/materialized_view.sql @@ -4,11 +4,32 @@ {% endmacro %} +{% macro synapse__get_replace_materialized_view_as_sql(relation, sql, existing_relation, backup_relation, intermediate_relation) %} + {# Synapse does not have ALTER...RENAME function, so use existing macro synapse__rename_relation #} + + {{- synapse__get_create_materialized_view_as_sql(intermediate_relation, sql) -}} GO + + if object_id ('{{ backup_relation.include(database=False) }}','V') is not null + begin + drop view {{ backup_relation.include(database=False) }} + end + + if object_id ('{{ backup_relation.include(database=False) }}','U') is not null + begin + drop table {{ backup_relation.include(database=False) }} + end + + rename object {{ existing_relation.include(database=False) }} to {{ backup_relation.identifier }} + + rename object {{ intermediate_relation.include(database=False) }} to {{ existing_relation.identifier }} + +{% endmacro %} + {% macro synapse__get_create_materialized_view_as_sql(relation, sql) %} {%- set dist = config.get('dist', default="ROUND_ROBIN") -%} CREATE materialized view {{ relation.include(database=False) }} WITH ( DISTRIBUTION = {{dist}} ) - AS {{ sql }}; + AS {{ sql }} {% endmacro %} diff --git a/tests/functional/adapter/test_materialized_views.py b/tests/functional/adapter/test_materialized_views.py index ca31f78e..8abbdb3f 100644 --- a/tests/functional/adapter/test_materialized_views.py +++ b/tests/functional/adapter/test_materialized_views.py @@ -62,6 +62,17 @@ def test_materialized_view_create(self, project): } return check_relation_types(project.adapter, expected) - @pytest.mark.skip(reason="rename materialized view not supported") def test_materialized_view_create_idempotent(self, project, my_materialized_view): - pass + # setup creates it once; verify it's there and run once + expected = { + # sys.objects has no type "materialized view", it's type "view" + "my_materialized_view": "view", + } + check_relation_types(project.adapter, expected) + + run_dbt(["run", "--models", my_materialized_view.identifier]) + expected = { + # sys.objects has no type "materialized view", it's type "view" + "my_materialized_view": "view", + } + check_relation_types(project.adapter, expected) From ac3412229c7b2ae4a8acd293c2f2c43a7f3b6499 Mon Sep 17 00:00:00 2001 From: artc95 Date: Thu, 15 Feb 2024 12:13:49 +0100 Subject: [PATCH 20/32] split rename_relation macro into rename_relation_script macro, for reuseability ; update get_replace_materialized_view_as_sql macro to pass test_materialized_view_create_idempotent --- .../synapse/macros/adapters/relation.sql | 12 +++++++--- .../materialized_view/materialized_view.sql | 24 +++++++------------ 2 files changed, 18 insertions(+), 18 deletions(-) diff --git a/dbt/include/synapse/macros/adapters/relation.sql b/dbt/include/synapse/macros/adapters/relation.sql index 29ad42b3..e5136e56 100644 --- a/dbt/include/synapse/macros/adapters/relation.sql +++ b/dbt/include/synapse/macros/adapters/relation.sql @@ -24,9 +24,16 @@ {% endif %} {% endmacro %} - {% macro synapse__rename_relation(from_relation, to_relation) -%} - {% call statement('rename_relation') -%} + {# dbt needs this 'call' macro, but it overwrites other SQL when reused in other macros #} + {# so '_script' macro is reuseable script, for other macros to combine with more SQL #} + + {% call statement('rename_relation') %} + {{ synapse__rename_relation_script(from_relation, to_relation) }} + {%- endcall %} +{% endmacro %} + +{% macro synapse__rename_relation_script(from_relation, to_relation) -%} -- drop all object types with to_relation.identifier name, to avoid error "new name already in use...duplicate...not permitted" if object_id ('{{ to_relation.include(database=False) }}','V') is not null begin @@ -39,7 +46,6 @@ end rename object {{ from_relation.include(database=False) }} to {{ to_relation.identifier }} - {%- endcall %} {% endmacro %} {% macro synapse__truncate_relation(relation) %} diff --git a/dbt/include/synapse/macros/materializations/models/materialized_view/materialized_view.sql b/dbt/include/synapse/macros/materializations/models/materialized_view/materialized_view.sql index 5c43226a..2dff1b4b 100644 --- a/dbt/include/synapse/macros/materializations/models/materialized_view/materialized_view.sql +++ b/dbt/include/synapse/macros/materializations/models/materialized_view/materialized_view.sql @@ -5,23 +5,17 @@ {% endmacro %} {% macro synapse__get_replace_materialized_view_as_sql(relation, sql, existing_relation, backup_relation, intermediate_relation) %} - {# Synapse does not have ALTER...RENAME function, so use existing macro synapse__rename_relation #} + {# Synapse does not have ALTER...RENAME function, so use synapse__rename_relation_script #} - {{- synapse__get_create_materialized_view_as_sql(intermediate_relation, sql) -}} GO - - if object_id ('{{ backup_relation.include(database=False) }}','V') is not null - begin - drop view {{ backup_relation.include(database=False) }} - end - - if object_id ('{{ backup_relation.include(database=False) }}','U') is not null - begin - drop table {{ backup_relation.include(database=False) }} - end - - rename object {{ existing_relation.include(database=False) }} to {{ backup_relation.identifier }} + {%- set dist = config.get('dist', default="ROUND_ROBIN") -%} + EXEC(' + CREATE materialized view {{ intermediate_relation.include(database=False) }} + WITH ( DISTRIBUTION = {{dist}} ) + AS {{ sql }} + '); - rename object {{ intermediate_relation.include(database=False) }} to {{ existing_relation.identifier }} + {{ synapse__rename_relation_script(existing_relation, backup_relation) }} + {{ synapse__rename_relation_script(intermediate_relation, relation) }} {% endmacro %} From 8f6e5ab4caa0c32d5b20b564bb14b2dfc2fe9910 Mon Sep 17 00:00:00 2001 From: artc95 Date: Thu, 15 Feb 2024 14:43:12 +0100 Subject: [PATCH 21/32] to-be-fixed added rest of TestMaterializedViewsBasicSynapse tests (skipped unsupported updates_after_refresh), test_view_replaces_materialized_view passes individually but not when with other tests --- .../adapter/test_materialized_views.py | 90 ++++++++++++++++++- 1 file changed, 89 insertions(+), 1 deletion(-) diff --git a/tests/functional/adapter/test_materialized_views.py b/tests/functional/adapter/test_materialized_views.py index 8abbdb3f..67fb6235 100644 --- a/tests/functional/adapter/test_materialized_views.py +++ b/tests/functional/adapter/test_materialized_views.py @@ -1,6 +1,13 @@ import pytest from dbt.tests.adapter.materialized_view.basic import MaterializedViewBasic -from dbt.tests.util import check_relation_types, get_model_file, run_dbt, set_model_file +from dbt.tests.util import ( + assert_message_in_logs, + check_relation_types, + get_model_file, + run_dbt, + run_dbt_and_capture, + set_model_file, +) MY_TABLE = """ {{ config( @@ -76,3 +83,84 @@ def test_materialized_view_create_idempotent(self, project, my_materialized_view "my_materialized_view": "view", } check_relation_types(project.adapter, expected) + + def test_materialized_view_full_refresh(self, project, my_materialized_view): + _, logs = run_dbt_and_capture( + ["--debug", "run", "--models", my_materialized_view.identifier, "--full-refresh"] + ) + expected = { + # sys.objects has no type "materialized view", it's type "view" + "my_materialized_view": "view", + } + check_relation_types(project.adapter, expected) + assert_message_in_logs(f"Applying REPLACE to: {my_materialized_view}", logs) + + def test_materialized_view_replaces_table(self, project, my_table): + run_dbt(["run", "--models", my_table.identifier]) + expected = { + "my_table": "table", + } + check_relation_types(project.adapter, expected) + + self.swap_table_to_materialized_view(project, my_table) + + run_dbt(["run", "--models", my_table.identifier]) + expected = { + # sys.objects has no type "materialized view", it's type "view" + "my_table": "view", + } + check_relation_types(project.adapter, expected) + + def test_materialized_view_replaces_view(self, project, my_view): + run_dbt(["run", "--models", my_view.identifier]) + expected = { + "my_view": "view", + } + check_relation_types(project.adapter, expected) + + self.swap_view_to_materialized_view(project, my_view) + + run_dbt(["run", "--models", my_view.identifier]) + expected = { + # sys.objects has no type "materialized view", it's type "view" + "my_view": "view", + } + check_relation_types(project.adapter, expected) + + def test_table_replaces_materialized_view(self, project, my_materialized_view): + run_dbt(["run", "--models", my_materialized_view.identifier]) + expected = { + # sys.objects has no type "materialized view", it's type "view" + "my_materialized_view": "view", + } + check_relation_types(project.adapter, expected) + + self.swap_materialized_view_to_table(project, my_materialized_view) + + run_dbt(["run", "--models", my_materialized_view.identifier]) + expected = { + "my_materialized_view": "table", + } + check_relation_types(project.adapter, expected) + + def test_view_replaces_materialized_view(self, project, my_materialized_view): + run_dbt(["run", "--models", my_materialized_view.identifier]) + expected = { + # sys.objects has no type "materialized view", it's type "view" + "my_materialized_view": "view", + } + check_relation_types(project.adapter, expected) + + self.swap_materialized_view_to_view(project, my_materialized_view) + + run_dbt(["run", "--models", my_materialized_view.identifier]) + expected = { + "my_materialized_view": "view", + } + check_relation_types(project.adapter, expected) + + @pytest.mark.skip(reason="Synapse materialized view is always updated") + def test_materialized_view_only_updates_after_refresh( + self, project, my_materialized_view, my_seed + ): + pass From 1f51b8537a9a75c4251e3331d2000f99b177e5f3 Mon Sep 17 00:00:00 2001 From: artc95 Date: Thu, 15 Feb 2024 15:57:11 +0100 Subject: [PATCH 22/32] to-be-fixed hotfix that resets my_materialized_view .sql --- tests/functional/adapter/test_materialized_views.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/functional/adapter/test_materialized_views.py b/tests/functional/adapter/test_materialized_views.py index 67fb6235..955ea640 100644 --- a/tests/functional/adapter/test_materialized_views.py +++ b/tests/functional/adapter/test_materialized_views.py @@ -144,6 +144,7 @@ def test_table_replaces_materialized_view(self, project, my_materialized_view): check_relation_types(project.adapter, expected) def test_view_replaces_materialized_view(self, project, my_materialized_view): + self.swap_table_to_materialized_view(project, my_materialized_view) # hotfix run_dbt(["run", "--models", my_materialized_view.identifier]) expected = { # sys.objects has no type "materialized view", it's type "view" From a0130825f229917689a4a753f3dd56442e4972d6 Mon Sep 17 00:00:00 2001 From: artc95 Date: Thu, 15 Feb 2024 16:43:15 +0100 Subject: [PATCH 23/32] drop python 3.7 support ; update CHANGELOG --- .github/workflows/integration-tests-azure.yml | 2 +- CHANGELOG.md | 15 +++++++++++---- setup.py | 1 - 3 files changed, 12 insertions(+), 6 deletions(-) diff --git a/.github/workflows/integration-tests-azure.yml b/.github/workflows/integration-tests-azure.yml index 1e3e76b4..f870dc84 100644 --- a/.github/workflows/integration-tests-azure.yml +++ b/.github/workflows/integration-tests-azure.yml @@ -14,7 +14,7 @@ jobs: name: Integration tests on Azure strategy: matrix: - python_version: ["3.7", "3.8", "3.9", "3.10", "3.11"] + python_version: ["3.8", "3.9", "3.10", "3.11"] msodbc_version: ["17", "18"] runs-on: ubuntu-latest container: diff --git a/CHANGELOG.md b/CHANGELOG.md index 0e872b0c..60ab2086 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,15 +1,22 @@ # Changelog -### v1.6.0 +## v1.6.0rc1 + +* Support for [dbt-core 1.6](https://github.com/dbt-labs/dbt-core/releases/tag/v1.6.0) + +#### Breaking Changes +* Dropped support for Python 3.7 ([#7082](https://github.com/dbt-labs/dbt-core/issues/7082https://github.com/dbt-labs/dbt-core/issues/7082)) ## Features -* Added tests related to dbt-debug to test --connection parameter +* Add support for materialized views ([#6911](https://github.com/dbt-labs/dbt-core/issues/6911)) +* ~~dbt clone ([#7258](https://github.com/dbt-labs/dbt-core/issues/7258)~~ Synapse does not support CLONE) +* Revamp dbt debug ([#7104](https://github.com/dbt-labs/dbt-core/issues/7104)) ## v1.4.1rc1 #### Under the hood -* Switch dependency from dbt-sqlserver to dbt-fabric (per https://github.com/dbt-msft/dbt-sqlserver/issues/441) +* Switch dependency from dbt-sqlserver to dbt-fabric ([dbt-msft/dbt-sqlserver/#441](https://github.com/dbt-msft/dbt-sqlserver/issues/441)) * for Mac users, before running `make dev`, add `pyodbc==4.0.39 --no-binary :all:` in dev_requirements.txt - * about pyodbc "Symbol not found: _SQLAllocHandle" error https://stackoverflow.com/questions/66731036/unable-to-import-pyodbc-on-apple-silicon-symbol-not-found-sqlallochandle + * [StackOverflow](https://stackoverflow.com/questions/66731036/unable-to-import-pyodbc-on-apple-silicon-symbol-not-found-sqlallochandle) about pyodbc "Symbol not found: _SQLAllocHandle" error ## v1.4.0 diff --git a/setup.py b/setup.py index 50ff82c5..f8d3e3b3 100644 --- a/setup.py +++ b/setup.py @@ -83,7 +83,6 @@ def run(self): "Operating System :: Microsoft :: Windows", "Operating System :: MacOS :: MacOS X", "Operating System :: POSIX :: Linux", - "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", From 9d2ed8f672827795d9355dd026aa80e60af3903c Mon Sep 17 00:00:00 2001 From: nszoni Date: Fri, 16 Feb 2024 10:11:57 +0100 Subject: [PATCH 24/32] refactor model and run hooks for threadid --- tests/functional/adapter/data/seed_model.sql | 3 +- tests/functional/adapter/data/seed_run.sql | 3 +- tests/functional/adapter/test_model_hooks.py | 138 ++----------------- tests/functional/adapter/test_run_hooks.py | 106 +------------- 4 files changed, 18 insertions(+), 232 deletions(-) diff --git a/tests/functional/adapter/data/seed_model.sql b/tests/functional/adapter/data/seed_model.sql index acf53349..52c2ce4e 100644 --- a/tests/functional/adapter/data/seed_model.sql +++ b/tests/functional/adapter/data/seed_model.sql @@ -10,7 +10,8 @@ create table {schema}.on_model_hook target_pass VARCHAR(100), target_threads INTEGER, run_started_at VARCHAR(100), - invocation_id VARCHAR(100) + invocation_id VARCHAR(100), + thread_id VARCHAR(100) ) WITH( DISTRIBUTION = ROUND_ROBIN, diff --git a/tests/functional/adapter/data/seed_run.sql b/tests/functional/adapter/data/seed_run.sql index 85a02e26..2c0d23b9 100644 --- a/tests/functional/adapter/data/seed_run.sql +++ b/tests/functional/adapter/data/seed_run.sql @@ -13,7 +13,8 @@ create table {schema}.on_run_hook target_pass VARCHAR(100), target_threads INTEGER, run_started_at VARCHAR(100), - invocation_id VARCHAR(100) + invocation_id VARCHAR(100), + thread_id VARCHAR(100) ) WITH( DISTRIBUTION = ROUND_ROBIN, diff --git a/tests/functional/adapter/test_model_hooks.py b/tests/functional/adapter/test_model_hooks.py index 05a757d3..d765705c 100644 --- a/tests/functional/adapter/test_model_hooks.py +++ b/tests/functional/adapter/test_model_hooks.py @@ -1,15 +1,13 @@ -from pathlib import Path - import pytest from dbt.tests.adapter.hooks.fixtures import ( - models__hooked, models__hooks, models__hooks_configured, models__hooks_kwargs, - models__post, - models__pre, ) from dbt.tests.adapter.hooks.test_model_hooks import ( + MODEL_POST_HOOK, + MODEL_PRE_HOOK, + BaseTestPrePost, TestDuplicateHooksInConfigs, TestHooksRefsOnSeeds, TestPrePostModelHooksOnSeeds, @@ -20,93 +18,8 @@ ) from dbt.tests.util import run_dbt -MODEL_PRE_HOOK = """ - insert into {{this.schema}}.on_model_hook ( - test_state, - target_dbname, - target_host, - target_name, - target_schema, - target_type, - target_user, - target_pass, - target_threads, - run_started_at, - invocation_id - ) VALUES ( - 'start', - '{{ target.dbname }}', - '{{ target.host }}', - '{{ target.name }}', - '{{ target.schema }}', - '{{ target.type }}', - '{{ target.user }}', - '{{ target.get("pass", "") }}', - {{ target.threads }}, - '{{ run_started_at }}', - '{{ invocation_id }}' - ) -""" - -MODEL_POST_HOOK = """ - insert into {{this.schema}}.on_model_hook ( - test_state, - target_dbname, - target_host, - target_name, - target_schema, - target_type, - target_user, - target_pass, - target_threads, - run_started_at, - invocation_id - ) VALUES ( - 'end', - '{{ target.dbname }}', - '{{ target.host }}', - '{{ target.name }}', - '{{ target.schema }}', - '{{ target.type }}', - '{{ target.user }}', - '{{ target.get("pass", "") }}', - {{ target.threads }}, - '{{ run_started_at }}', - '{{ invocation_id }}' - ) -""" - - -class BaseTestPrePost(object): - @pytest.fixture(scope="class", autouse=True) - def setUp(self, project): - project.run_sql_file(project.test_data_dir / Path("seed_model.sql")) - - def get_ctx_vars(self, state, count, project): - fields = [ - "test_state", - "target_dbname", - "target_host", - "target_name", - "target_schema", - "target_threads", - "target_type", - "target_user", - "target_pass", - "run_started_at", - "invocation_id", - ] - field_list = ", ".join(['"{}"'.format(f) for f in fields]) - query = f""" - select {field_list} from {project.test_schema}.on_model_hook where test_state = '{state}' - """ - - vals = project.run_sql(query, fetch="all") - assert len(vals) != 0, "nothing inserted into hooks table" - assert len(vals) >= count, "too few rows in hooks table" - assert len(vals) <= count, "too many rows in hooks table" - return [{k: v for k, v in zip(fields, val)} for val in vals] +class BaseTestPrePostSynapse(BaseTestPrePost): def check_hooks(self, state, project, host, count=1): ctxs = self.get_ctx_vars(state, count=count, project=project) for ctx in ctxs: @@ -126,9 +39,10 @@ def check_hooks(self, state, project, host, count=1): assert ( ctx["invocation_id"] is not None and len(ctx["invocation_id"]) > 0 ), "invocation_id was not set" + assert ctx["thread_id"].startswith("Thread-") -class PrePostModelHooksInConfigSetup(BaseTestPrePost): +class PrePostModelHooksInConfigSetup(BaseTestPrePostSynapse): @pytest.fixture(scope="class") def project_config_update(self): return { @@ -140,42 +54,8 @@ def models(self): return {"hooks.sql": models__hooks_configured} -class TestHookRefs(BaseTestPrePost): - @pytest.fixture(scope="class") - def project_config_update(self): - return { - "models": { - "test": { - "hooked": { - "post-hook": [ - """ - insert into {{this.schema}}.on_model_hook select - test_state, - '{{ target.dbname }}' as target_dbname, - '{{ target.host }}' as target_host, - '{{ target.name }}' as target_name, - '{{ target.schema }}' as target_schema, - '{{ target.type }}' as target_type, - '{{ target.user }}' as target_user, - '{{ target.get(pass, "") }}' as target_pass, - {{ target.threads }} as target_threads, - '{{ run_started_at }}' as run_started_at, - '{{ invocation_id }}' as invocation_id - from {{ ref('post') }}""".strip() - ], - } - }, - } - } - - @pytest.fixture(scope="class") - def models(self): - return {"hooked.sql": models__hooked, "post.sql": models__post, "pre.sql": models__pre} - - def test_pre_post_model_hooks_refed(self, project, dbt_profile_target): - run_dbt() - self.check_hooks("start", project, dbt_profile_target.get("host", None)) - self.check_hooks("end", project, dbt_profile_target.get("host", None)) +class TestHookRefs(BaseTestPrePostSynapse): + pass class TestPrePostModelHooksOnSeeds(TestPrePostModelHooksOnSeeds): @@ -293,7 +173,7 @@ class TestDuplicateHooksInConfigs(TestDuplicateHooksInConfigs): # vacuum command is removed because not supported in synapse -class TestPrePostModelHooks(BaseTestPrePost): +class TestPrePostModelHooks(BaseTestPrePostSynapse): @pytest.fixture(scope="class") def project_config_update(self): return { diff --git a/tests/functional/adapter/test_run_hooks.py b/tests/functional/adapter/test_run_hooks.py index e8baad8d..cf2a32ee 100644 --- a/tests/functional/adapter/test_run_hooks.py +++ b/tests/functional/adapter/test_run_hooks.py @@ -2,18 +2,10 @@ from pathlib import Path import pytest -from dbt.tests.adapter.hooks.fixtures import ( - macros__before_and_after, - macros__hook, - macros_missing_column, - models__hooks, - models__missing_column, - seeds__example_seed_csv, -) -from dbt.tests.util import check_table_does_not_exist, run_dbt +from dbt.tests.adapter.hooks.test_run_hooks import TestAfterRunHooks, TestPrePostRunHooks -class TestPrePostRunHooks(object): +class TestPrePostRunHooks(TestPrePostRunHooks): @pytest.fixture(scope="function") def setUp(self, project): project.run_sql_file(project.test_data_dir / Path("seed_run.sql")) @@ -31,18 +23,6 @@ def setUp(self, project): ) os.environ["TERM_TEST"] = "TESTING" - @pytest.fixture(scope="class") - def macros(self): - return {"hook.sql": macros__hook, "before-and-after.sql": macros__before_and_after} - - @pytest.fixture(scope="class") - def models(self): - return {"hooks.sql": models__hooks} - - @pytest.fixture(scope="class") - def seeds(self): - return {"example_seed.csv": seeds__example_seed_csv} - @pytest.fixture(scope="class") def project_config_update(self): return { @@ -74,45 +54,6 @@ def project_config_update(self): }, } - def get_ctx_vars(self, state, project): - fields = [ - "test_state", - "target_dbname", - "target_host", - "target_name", - "target_schema", - "target_threads", - "target_type", - "target_user", - "target_pass", - "run_started_at", - "invocation_id", - ] - field_list = ", ".join(['"{}"'.format(f) for f in fields]) - query = f""" - select {field_list} from {project.test_schema}.on_run_hook - where test_state = '{state}' - """ - - vals = project.run_sql(query, fetch="all") - assert len(vals) != 0, "nothing inserted into on_run_hook table" - assert len(vals) == 1, "too many rows in hooks table" - ctx = dict([(k, v) for (k, v) in zip(fields, vals[0])]) - - return ctx - - def assert_used_schemas(self, project): - schemas_query = "select * from {}.schemas".format(project.test_schema) - results = project.run_sql(schemas_query, fetch="all") - assert len(results) == 1 - assert results[0][0] == project.test_schema - - db_schemas_query = "select * from {}.db_schemas".format(project.test_schema) - results = project.run_sql(db_schemas_query, fetch="all") - assert len(results) == 1 - assert results[0][0] == project.database - assert results[0][1] == project.test_schema - def check_hooks(self, state, project, host): ctx = self.get_ctx_vars(state, project) @@ -132,45 +73,8 @@ def check_hooks(self, state, project, host): assert ( ctx["invocation_id"] is not None and len(ctx["invocation_id"]) > 0 ), "invocation_id was not set" + assert ctx["thread_id"].startswith("Thread-") or ctx["thread_id"] == "MainThread" - def test_pre_and_post_run_hooks(self, setUp, project, dbt_profile_target): - run_dbt(["run"]) - - self.check_hooks("start", project, dbt_profile_target.get("host", None)) - self.check_hooks("end", project, dbt_profile_target.get("host", None)) - - check_table_does_not_exist(project.adapter, "start_hook_order_test") - check_table_does_not_exist(project.adapter, "end_hook_order_test") - self.assert_used_schemas(project) - - def test_pre_and_post_seed_hooks(self, setUp, project, dbt_profile_target): - run_dbt(["seed"]) - - self.check_hooks("start", project, dbt_profile_target.get("host", None)) - self.check_hooks("end", project, dbt_profile_target.get("host", None)) - - check_table_does_not_exist(project.adapter, "start_hook_order_test") - check_table_does_not_exist(project.adapter, "end_hook_order_test") - self.assert_used_schemas(project) - - -class TestAfterRunHooks(object): - @pytest.fixture(scope="class") - def macros(self): - return {"temp_macro.sql": macros_missing_column} - - @pytest.fixture(scope="class") - def models(self): - return {"test_column.sql": models__missing_column} - - @pytest.fixture(scope="class") - def project_config_update(self): - return { - # The create and drop table statements here validate that these hooks run - # in the same order that they are defined. Drop before create is an error. - # Also check that the table does not exist below. - "on-run-start": "- {{ export_table_check() }}" - } - def test_missing_column_pre_hook(self, project): - run_dbt(["run"], expect_pass=False) +class TestAfterRunHooks(TestAfterRunHooks): + pass From 0307e6dcb597e5f7ec7d8d0d82fb6ed204b0f63a Mon Sep 17 00:00:00 2001 From: nszoni Date: Fri, 16 Feb 2024 11:43:28 +0100 Subject: [PATCH 25/32] add new contraint test zones --- dbt/adapters/synapse/synapse_adapter.py | 2 +- tests/functional/adapter/test_constraints.py | 129 ++++++++++++++++++- 2 files changed, 128 insertions(+), 3 deletions(-) diff --git a/dbt/adapters/synapse/synapse_adapter.py b/dbt/adapters/synapse/synapse_adapter.py index c628d02a..664ea133 100644 --- a/dbt/adapters/synapse/synapse_adapter.py +++ b/dbt/adapters/synapse/synapse_adapter.py @@ -70,7 +70,7 @@ def render_raw_columns_constraints(cls, raw_columns: Dict[str, Dict[str, Any]]) rendered_column_constraints = [] for v in raw_columns.values(): - rendered_column_constraint = [f"{v['name']} {v['data_type']}"] + rendered_column_constraint = [f"[{v['name']}] {v['data_type']}"] for con in v.get("constraints", None): constraint = cls._parse_column_constraint(con) c = cls.process_parsed_constraint(constraint, cls.render_column_constraint) diff --git a/tests/functional/adapter/test_constraints.py b/tests/functional/adapter/test_constraints.py index 1e9e4d08..fe93747b 100644 --- a/tests/functional/adapter/test_constraints.py +++ b/tests/functional/adapter/test_constraints.py @@ -12,12 +12,14 @@ my_model_view_wrong_name_sql, my_model_view_wrong_order_sql, my_model_with_nulls_sql, + my_model_with_quoted_column_name_sql, my_model_wrong_name_sql, my_model_wrong_order_depends_on_fk_sql, my_model_wrong_order_sql, ) from dbt.tests.adapter.constraints.test_constraints import ( BaseConstraintsRuntimeDdlEnforcement, + BaseContractSqlHeader, BaseModelConstraintsRuntimeEnforcement, ) from dbt.tests.util import ( @@ -246,6 +248,77 @@ - type: not_null """ +model_contract_header_schema_yml = """ +version: 2 +models: + - name: my_model_contract_sql_header + config: + contract: + enforced: true + columns: + - name: column_name + data_type: int +""" + + +# no current_timezone() in Synapse +my_model_contract_sql_header_sql = """ +{{ + config( + materialized = "table" + ) +}} + +{% call set_sql_header(config) %} +set session time zone 'Asia/Kolkata'; +{%- endcall %} +select datepart(tzoffset, sysdatetimeoffset()) as column_name +""" + +my_model_incremental_contract_sql_header_sql = """ +{{ + config( + materialized = "incremental", + on_schema_change="append_new_columns" + ) +}} + +{% call set_sql_header(config) %} +set session time zone 'Asia/Kolkata'; +{%- endcall %} +select datepart(tzoffset, sysdatetimeoffset()) as column_name +""" + +model_quoted_column_schema_yml = """ +version: 2 +models: + - name: my_model + config: + contract: + enforced: true + materialized: table + constraints: + - type: check + # this one is the on the user + expression: ("from" = 'blue') + columns: [ '"from"' ] + columns: + - name: id + data_type: integer + description: hello + constraints: + - type: not_null + tests: + - unique + - name: from # reserved word + quote: true + data_type: varchar(100) + constraints: + - type: not_null + - name: date_day + data_type: varchar(100) +""" + class BaseConstraintsColumnsEqual: """ @@ -395,7 +468,7 @@ def expected_sql(self): if object_id is not null begin drop table end exec('create view as -- depends_on: select ''blue'' as color,1 as id,''2019-01-01'' as date_day;'); - create table (id int not null,color varchar(100),date_day varchar(100)) + create table ([id] int not null,[color] varchar(100),[date_day] varchar(100)) with(distribution = round_robin,heap) insert into ([id],[color],[date_day]) select [id],[color],[date_day] from @@ -435,7 +508,7 @@ def expected_sql(self): if object_id is not null begin drop table end exec('create view as -- depends_on: select ''blue'' as color,1 as id,''2019-01-01'' as date_day;'); - create table (id int not null,color varchar(100),date_day varchar(100)) + create table ([id] int not null,[color] varchar(100),[date_day] varchar(100)) with(distribution = round_robin,heap) alter table add constraint primary key nonclustered(id)not enforced; @@ -548,6 +621,46 @@ def null_model_sql(self): return my_model_incremental_with_nulls_sql +class BaseTableContractSqlHeader(BaseContractSqlHeader): + @pytest.fixture(scope="class") + def models(self): + return { + "my_model_contract_sql_header.sql": my_model_contract_sql_header_sql, + "constraints_schema.yml": model_contract_header_schema_yml, + } + + +class BaseIncrementalContractSqlHeader(BaseContractSqlHeader): + @pytest.fixture(scope="class") + def models(self): + return { + "my_model_contract_sql_header.sql": my_model_incremental_contract_sql_header_sql, + "constraints_schema.yml": model_contract_header_schema_yml, + } + + +class BaseConstraintQuotedColumn(BaseConstraintsRuntimeDdlEnforcement): + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": my_model_with_quoted_column_name_sql, + "constraints_schema.yml": model_quoted_column_schema_yml, + } + + @pytest.fixture(scope="class") + def expected_sql(self): + return """ + if object_id is not null begin drop view end + if object_id is not null begin drop table end + exec(\'create view as select \'\'blue\'\' as "from",1 as id,\'\'2019-01-01\'\' as date_day;\'); + create table ([id] integer not null,[from] varchar(100)not null,[date_day] varchar(100)) + with(distribution = round_robin,heap) + insert into ([id],[from],[date_day]) + select [id],[from],[date_day] from + if object_id is not null begin drop view end + """ + + class TestTableConstraintsRuntimeDdlEnforcementSynapse(BaseConstraintsRuntimeDdlEnforcement): pass @@ -580,3 +693,15 @@ class TestTableConstraintsRollbackSynapse(BaseConstraintsRollback): class TestIncrementalConstraintsRollbackSynapse(BaseIncrementalConstraintsRollback): pass + + +class TestTableContractSqlHeaderSynapse(BaseTableContractSqlHeader): + pass + + +class TestIncrementalContractSqlHeaderSynapse(BaseIncrementalContractSqlHeader): + pass + + +class TestConstraintQuotedColumnSynapse(BaseConstraintQuotedColumn): + pass From 7061004e8e6df3ebfca23d6050f01f03341e96fc Mon Sep 17 00:00:00 2001 From: nszoni Date: Fri, 16 Feb 2024 11:55:46 +0100 Subject: [PATCH 26/32] add equals test zone --- tests/functional/adapter/test_equals.py | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 tests/functional/adapter/test_equals.py diff --git a/tests/functional/adapter/test_equals.py b/tests/functional/adapter/test_equals.py new file mode 100644 index 00000000..f2f496b4 --- /dev/null +++ b/tests/functional/adapter/test_equals.py @@ -0,0 +1,5 @@ +from dbt.tests.adapter.utils.test_equals import BaseEquals + + +class TestEqualsSynapse(BaseEquals): + pass From 79bee0c1c96f256bb40ffc7ce512125fc4ce7700 Mon Sep 17 00:00:00 2001 From: nszoni Date: Fri, 16 Feb 2024 11:56:33 +0100 Subject: [PATCH 27/32] add null compare test zone --- tests/functional/adapter/test_null_compare.py | 9 +++++++++ 1 file changed, 9 insertions(+) create mode 100644 tests/functional/adapter/test_null_compare.py diff --git a/tests/functional/adapter/test_null_compare.py b/tests/functional/adapter/test_null_compare.py new file mode 100644 index 00000000..e4f51020 --- /dev/null +++ b/tests/functional/adapter/test_null_compare.py @@ -0,0 +1,9 @@ +from dbt.tests.adapter.utils.test_null_compare import BaseMixedNullCompare, BaseNullCompare + + +class TestMixedNullCompareSynapse(BaseMixedNullCompare): + pass + + +class TestNullCompareSynapse(BaseNullCompare): + pass From 3453e6b6db6d32cf7f7c906b7ae5dab9fd534a65 Mon Sep 17 00:00:00 2001 From: nszoni Date: Fri, 16 Feb 2024 11:57:22 +0100 Subject: [PATCH 28/32] add validate sql test zone --- tests/functional/adapter/test_validate_sql.py | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 tests/functional/adapter/test_validate_sql.py diff --git a/tests/functional/adapter/test_validate_sql.py b/tests/functional/adapter/test_validate_sql.py new file mode 100644 index 00000000..b370b01e --- /dev/null +++ b/tests/functional/adapter/test_validate_sql.py @@ -0,0 +1,5 @@ +from dbt.tests.adapter.utils.test_validate_sql import BaseValidateSqlMethod + + +class TestValidateSqlMethodSynapse(BaseValidateSqlMethod): + pass From feebccf580d6c2f0dfbf9a58a8aa3042ddae4c7c Mon Sep 17 00:00:00 2001 From: nszoni Date: Fri, 16 Feb 2024 13:19:03 +0100 Subject: [PATCH 29/32] add dbt clone test zone --- tests/functional/adapter/test_dbt_clone.py | 238 +++++++++++++++++++++ 1 file changed, 238 insertions(+) create mode 100644 tests/functional/adapter/test_dbt_clone.py diff --git a/tests/functional/adapter/test_dbt_clone.py b/tests/functional/adapter/test_dbt_clone.py new file mode 100644 index 00000000..d2b72abb --- /dev/null +++ b/tests/functional/adapter/test_dbt_clone.py @@ -0,0 +1,238 @@ +import os +import shutil +from collections import Counter +from copy import deepcopy + +import pytest +from dbt.exceptions import DbtRuntimeError +from dbt.tests.adapter.dbt_clone.fixtures import ( + custom_can_clone_tables_false_macros_sql, + ephemeral_model_sql, + exposures_yml, + get_schema_name_sql, + infinite_macros_sql, + macros_sql, + schema_yml, + seed_csv, + snapshot_sql, + table_model_sql, + view_model_sql, +) +from dbt.tests.util import run_dbt + + +class BaseClone: + @pytest.fixture(scope="class") + def models(self): + return { + "table_model.sql": table_model_sql, + "view_model.sql": view_model_sql, + "ephemeral_model.sql": ephemeral_model_sql, + "schema.yml": schema_yml, + "exposures.yml": exposures_yml, + } + + @pytest.fixture(scope="class") + def macros(self): + return { + "macros.sql": macros_sql, + "infinite_macros.sql": infinite_macros_sql, + "get_schema_name.sql": get_schema_name_sql, + } + + @pytest.fixture(scope="class") + def seeds(self): + return { + "seed.csv": seed_csv, + } + + @pytest.fixture(scope="class") + def snapshots(self): + return { + "snapshot.sql": snapshot_sql, + } + + @pytest.fixture(scope="class") + def other_schema(self, unique_schema): + return unique_schema + "_other" + + @property + def project_config_update(self): + return { + "seeds": { + "test": { + "quote_columns": False, + } + } + } + + @pytest.fixture(scope="class") + def profiles_config_update(self, dbt_profile_target, unique_schema, other_schema): + outputs = {"default": dbt_profile_target, "otherschema": deepcopy(dbt_profile_target)} + outputs["default"]["schema"] = unique_schema + outputs["otherschema"]["schema"] = other_schema + return {"test": {"outputs": outputs, "target": "default"}} + + def copy_state(self, project_root): + state_path = os.path.join(project_root, "state") + if not os.path.exists(state_path): + os.makedirs(state_path) + shutil.copyfile( + f"{project_root}/target/manifest.json", f"{project_root}/state/manifest.json" + ) + + def run_and_save_state(self, project_root, with_snapshot=False): + results = run_dbt(["seed"]) + assert len(results) == 1 + assert not any(r.node.deferred for r in results) + results = run_dbt(["run"]) + assert len(results) == 2 + assert not any(r.node.deferred for r in results) + results = run_dbt(["test"]) + assert len(results) == 2 + + if with_snapshot: + results = run_dbt(["snapshot"]) + assert len(results) == 1 + assert not any(r.node.deferred for r in results) + + # copy files + self.copy_state(project_root) + + +# -- Below we define base classes for tests you import the one based on +# -- if your adapter uses dbt clone or not -- +class BaseClonePossible(BaseClone): + @pytest.mark.skip(reason="Synapse does not support cloning from different state") + def test_can_clone_true(self, project, unique_schema, other_schema): + project.create_test_schema(other_schema) + self.run_and_save_state(project.project_root, with_snapshot=True) + + clone_args = [ + "clone", + "--state", + "state", + "--target", + "otherschema", + ] + + results = run_dbt(clone_args) + assert len(results) == 4 + + schema_relations = project.adapter.list_relations( + database=project.database, schema=other_schema + ) + types = [r.type for r in schema_relations] + count_types = Counter(types) + assert count_types == Counter({"table": 3, "view": 1}) + + # objects already exist, so this is a no-op + results = run_dbt(clone_args) + assert len(results) == 4 + assert all("ok" in r.message.lower() for r in results) + + # recreate all objects + results = run_dbt([*clone_args, "--full-refresh"]) + assert len(results) == 4 + + # select only models this time + results = run_dbt([*clone_args, "--resource-type", "model"]) + assert len(results) == 2 + assert all("ok" in r.message.lower() for r in results) + + def test_clone_no_state(self, project, unique_schema, other_schema): + project.create_test_schema(other_schema) + self.run_and_save_state(project.project_root, with_snapshot=True) + + clone_args = [ + "clone", + "--target", + "otherschema", + ] + + with pytest.raises( + DbtRuntimeError, + match="--state or --defer-state are required for deferral, but neither was provided", + ): + run_dbt(clone_args) + + +class BaseCloneNotPossible(BaseClone): + @pytest.fixture(scope="class") + def macros(self): + return { + "macros.sql": macros_sql, + "my_can_clone_tables.sql": custom_can_clone_tables_false_macros_sql, + "infinite_macros.sql": infinite_macros_sql, + "get_schema_name.sql": get_schema_name_sql, + } + + def test_can_clone_false(self, project, unique_schema, other_schema): + project.create_test_schema(other_schema) + self.run_and_save_state(project.project_root, with_snapshot=True) + + clone_args = [ + "clone", + "--state", + "state", + "--target", + "otherschema", + ] + + results = run_dbt(clone_args) + assert len(results) == 4 + + schema_relations = project.adapter.list_relations( + database=project.database, schema=other_schema + ) + assert all(r.type == "view" for r in schema_relations) + + # objects already exist, so this is a no-op + results = run_dbt(clone_args) + assert len(results) == 4 + assert all("ok" in r.message.lower() for r in results) + + # recreate all objects + results = run_dbt([*clone_args, "--full-refresh"]) + assert len(results) == 4 + + # select only models this time + results = run_dbt([*clone_args, "--resource-type", "model"]) + assert len(results) == 2 + assert all("ok" in r.message.lower() for r in results) + + +class TestCloneNotPossibleSynapse(BaseCloneNotPossible): + @pytest.fixture(autouse=True) + def clean_up(self, project): + yield + with project.adapter.connection_named("__test"): + relation = project.adapter.Relation.create( + database=project.database, schema=f"{project.test_schema}_seeds" + ) + project.adapter.drop_schema(relation) + + relation = project.adapter.Relation.create( + database=project.database, schema=project.test_schema + ) + project.adapter.drop_schema(relation) + + pass + + +class TestClonePossibleSynapse(BaseClonePossible): + @pytest.fixture(autouse=True) + def clean_up(self, project): + yield + with project.adapter.connection_named("__test"): + relation = project.adapter.Relation.create( + database=project.database, schema=f"{project.test_schema}_seeds" + ) + project.adapter.drop_schema(relation) + + relation = project.adapter.Relation.create( + database=project.database, schema=project.test_schema + ) + project.adapter.drop_schema(relation) + + pass From 1b65540b650ff6acdc30d474b1e653d64ec3bf4b Mon Sep 17 00:00:00 2001 From: artc95 Date: Fri, 16 Feb 2024 15:49:50 +0100 Subject: [PATCH 30/32] replace hotfix with drop_cascade to properly teardown each test in test_materialized_views --- CHANGELOG.md | 1 + .../adapter/test_materialized_views.py | 71 +++++++++++++++---- 2 files changed, 59 insertions(+), 13 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index dd97cdc1..b5b30d10 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,6 +8,7 @@ ## Features * Add support for materialized views ([#6911](https://github.com/dbt-labs/dbt-core/issues/6911)) + * important note! unlike [dbt's materialized view](https://docs.getdbt.com/docs/build/materializations), [Synapse's materialized view](https://learn.microsoft.com/en-us/sql/t-sql/statements/create-materialized-view-as-select-transact-sql?view=azure-sqldw-latest&context=%2Fazure%2Fsynapse-analytics%2Fcontext%2Fcontext) must be created using aggregation and/or "GROUP BY"! * ~~dbt clone ([#7258](https://github.com/dbt-labs/dbt-core/issues/7258)~~ Synapse does not support CLONE) * Revamp dbt debug ([#7104](https://github.com/dbt-labs/dbt-core/issues/7104)) diff --git a/tests/functional/adapter/test_materialized_views.py b/tests/functional/adapter/test_materialized_views.py index 955ea640..06e86220 100644 --- a/tests/functional/adapter/test_materialized_views.py +++ b/tests/functional/adapter/test_materialized_views.py @@ -39,6 +39,36 @@ """ +def drop_cascade(project, test_model_identifier): + # SYNAPSE HAS NO "DROP SCHEMA...CASCADE" + # so drop all test materializations, to allow drop my_seed + # "my_materialized_view" always created in setup(), so always need to be dropped before my_seed + for identifier in ["my_materialized_view", test_model_identifier]: + project.run_sql( + f""" + if object_id ('"{project.test_schema}"."{identifier}"','V') is not null + begin + drop view "{project.test_schema}"."{identifier}" + end + + if object_id ('"{project.test_schema}"."{identifier}"','U') is not null + begin + drop table "{project.test_schema}"."{identifier}" + end + """ + ) + # then drop object my_seed, to allow drop schema + project.run_sql( + f""" + if object_id ('"{project.test_schema}"."my_seed"','U') is not null + begin + drop table "{project.test_schema}"."my_seed" + end + """ + ) + # finally drop schema can proceed in setup function + + class TestMaterializedViewsBasicSynapse(MaterializedViewBasic): @pytest.fixture(scope="class", autouse=True) def models(self): @@ -48,7 +78,7 @@ def models(self): "my_materialized_view.sql": MY_MATERIALIZED_VIEW, } - @pytest.fixture(scope="class", autouse=True) + @pytest.fixture(scope="function", autouse=True) def setup(self, project, my_materialized_view): run_dbt(["seed"]) run_dbt(["run", "--models", my_materialized_view.identifier, "--full-refresh"]) @@ -60,6 +90,8 @@ def setup(self, project, my_materialized_view): # and then reset them after the test runs set_model_file(project, my_materialized_view, initial_model) + # Synapse no support "if exists" and "cascade" + project.run_sql(f"drop schema {project.test_schema}") def test_materialized_view_create(self, project): # check relation types @@ -67,7 +99,9 @@ def test_materialized_view_create(self, project): # sys.objects has no type "materialized view", it's type "view" "my_materialized_view": "view", } - return check_relation_types(project.adapter, expected) + check_relation_types(project.adapter, expected) + + drop_cascade(project, "my_materialized_view") def test_materialized_view_create_idempotent(self, project, my_materialized_view): # setup creates it once; verify it's there and run once @@ -80,25 +114,29 @@ def test_materialized_view_create_idempotent(self, project, my_materialized_view run_dbt(["run", "--models", my_materialized_view.identifier]) expected = { # sys.objects has no type "materialized view", it's type "view" - "my_materialized_view": "view", + my_materialized_view.identifier: "view", } check_relation_types(project.adapter, expected) + drop_cascade(project, my_materialized_view.identifier) + def test_materialized_view_full_refresh(self, project, my_materialized_view): _, logs = run_dbt_and_capture( ["--debug", "run", "--models", my_materialized_view.identifier, "--full-refresh"] ) expected = { # sys.objects has no type "materialized view", it's type "view" - "my_materialized_view": "view", + my_materialized_view.identifier: "view", } check_relation_types(project.adapter, expected) assert_message_in_logs(f"Applying REPLACE to: {my_materialized_view}", logs) + drop_cascade(project, my_materialized_view.identifier) + def test_materialized_view_replaces_table(self, project, my_table): run_dbt(["run", "--models", my_table.identifier]) expected = { - "my_table": "table", + my_table.identifier: "table", } check_relation_types(project.adapter, expected) @@ -107,14 +145,16 @@ def test_materialized_view_replaces_table(self, project, my_table): run_dbt(["run", "--models", my_table.identifier]) expected = { # sys.objects has no type "materialized view", it's type "view" - "my_table": "view", + my_table.identifier: "view", } check_relation_types(project.adapter, expected) + drop_cascade(project, my_table.identifier) + def test_materialized_view_replaces_view(self, project, my_view): run_dbt(["run", "--models", my_view.identifier]) expected = { - "my_view": "view", + my_view.identifier: "view", } check_relation_types(project.adapter, expected) @@ -123,15 +163,17 @@ def test_materialized_view_replaces_view(self, project, my_view): run_dbt(["run", "--models", my_view.identifier]) expected = { # sys.objects has no type "materialized view", it's type "view" - "my_view": "view", + my_view.identifier: "view", } check_relation_types(project.adapter, expected) + drop_cascade(project, my_view.identifier) + def test_table_replaces_materialized_view(self, project, my_materialized_view): run_dbt(["run", "--models", my_materialized_view.identifier]) expected = { # sys.objects has no type "materialized view", it's type "view" - "my_materialized_view": "view", + my_materialized_view.identifier: "view", } check_relation_types(project.adapter, expected) @@ -139,16 +181,17 @@ def test_table_replaces_materialized_view(self, project, my_materialized_view): run_dbt(["run", "--models", my_materialized_view.identifier]) expected = { - "my_materialized_view": "table", + my_materialized_view.identifier: "table", } check_relation_types(project.adapter, expected) + drop_cascade(project, my_materialized_view.identifier) + def test_view_replaces_materialized_view(self, project, my_materialized_view): - self.swap_table_to_materialized_view(project, my_materialized_view) # hotfix run_dbt(["run", "--models", my_materialized_view.identifier]) expected = { # sys.objects has no type "materialized view", it's type "view" - "my_materialized_view": "view", + my_materialized_view.identifier: "view", } check_relation_types(project.adapter, expected) @@ -156,10 +199,12 @@ def test_view_replaces_materialized_view(self, project, my_materialized_view): run_dbt(["run", "--models", my_materialized_view.identifier]) expected = { - "my_materialized_view": "view", + my_materialized_view.identifier: "view", } check_relation_types(project.adapter, expected) + drop_cascade(project, my_materialized_view.identifier) + @pytest.mark.skip(reason="Synapse materialized view is always updated") def test_materialized_view_only_updates_after_refresh( self, project, my_materialized_view, my_seed From 1746c91719c8934df7520388b5fb221393896065 Mon Sep 17 00:00:00 2001 From: nszoni Date: Tue, 20 Feb 2024 14:41:06 +0100 Subject: [PATCH 31/32] allow materialized simple copy test --- tests/functional/adapter/test_simple_copy.py | 87 ++++++++++++++++++-- 1 file changed, 78 insertions(+), 9 deletions(-) diff --git a/tests/functional/adapter/test_simple_copy.py b/tests/functional/adapter/test_simple_copy.py index 57cae1d7..bda9fcfb 100644 --- a/tests/functional/adapter/test_simple_copy.py +++ b/tests/functional/adapter/test_simple_copy.py @@ -1,13 +1,81 @@ from pathlib import Path import pytest +from dbt.adapters.factory import get_adapter_by_type from dbt.tests.adapter.simple_copy.fixtures import _SEEDS__SEED_UPDATE from dbt.tests.adapter.simple_copy.test_simple_copy import SimpleCopySetup -from dbt.tests.fixtures.project import TestProjInfo -from dbt.tests.util import check_relations_equal, rm_file, run_dbt, write_file +from dbt.tests.util import ( + check_relations_equal, + get_connection, + rm_file, + run_dbt, + run_sql_with_adapter, + write_file, +) + + +class TestProjInfoSynapse: + __test__ = False + + def __init__( + self, + project_root, + profiles_dir, + adapter_type, + test_dir, + shared_data_dir, + test_data_dir, + test_schema, + database, + test_config, + ): + self.project_root = project_root + self.profiles_dir = profiles_dir + self.adapter_type = adapter_type + self.test_dir = test_dir + self.shared_data_dir = shared_data_dir + self.test_data_dir = test_data_dir + self.test_schema = test_schema + self.database = database + self.test_config = test_config + self.created_schemas = [] + + @property + def adapter(self): + # This returns the last created "adapter" from the adapter factory. Each + # dbt command will create a new one. This allows us to avoid patching the + # providers 'get_adapter' function. + return get_adapter_by_type(self.adapter_type) + + # Run sql from a path + def run_sql_file(self, sql_path, fetch=None): + with open(sql_path, "r") as f: + statements = f.read().split(";") + for statement in statements: + self.run_sql(statement, fetch) + + # Run sql from a string, using adapter saved at test startup + def run_sql(self, sql, fetch=None): + return run_sql_with_adapter(self.adapter, sql, fetch=fetch) + + # Create the unique test schema. Used in test setup, so that we're + # ready for initial sql prior to a run_dbt command. + def create_test_schema(self, schema_name=None): + if schema_name is None: + schema_name = self.test_schema + with get_connection(self.adapter): + relation = self.adapter.Relation.create(database=self.database, schema=schema_name) + self.adapter.create_schema(relation) + self.created_schemas.append(schema_name) + + # Drop the unique test schema, usually called in test cleanup + def drop_test_schema(self): + with get_connection(self.adapter): + for schema_name in self.created_schemas: + relation = self.adapter.Relation.create(database=self.database, schema=schema_name) + self.adapter.drop_schema(relation) + self.created_schemas = [] - -class SynapseTestProjInfo(TestProjInfo): # This return a dictionary of table names to 'view' or 'table' values. # Override class because Synapse doesnt have 'ILIKE' def synapse_get_tables_in_schema(self): @@ -30,7 +98,7 @@ def synapse_get_tables_in_schema(self): @pytest.fixture def synapse_project(project): # Replace the original class with the new one - project.__class__ = SynapseTestProjInfo + project.__class__ = TestProjInfoSynapse return project @@ -63,20 +131,21 @@ def test_simple_copy(self, synapse_project): ["seed", "view_model", "incremental", "materialized", "get_and_ref"], ) - @pytest.mark.skip(reason="We are not supporting materialized views yet") + # in Synapse materialized views must be created with aggregation and distribution option def test_simple_copy_with_materialized_views(self, synapse_project): synapse_project.run_sql( f"create table {synapse_project.test_schema}.unrelated_table (id int)" ) sql = f""" - create materialized view {synapse_project.test_schema}.unrelated_materialized_view as ( - select * from {synapse_project.test_schema}.unrelated_table + create materialized view {synapse_project.test_schema}.unrelated_materialized_view + with ( distribution = round_robin ) as ( + select id from {synapse_project.test_schema}.unrelated_table group by id ) """ synapse_project.run_sql(sql) sql = f""" create view {synapse_project.test_schema}.unrelated_view as ( - select * from {synapse_project.test_schema}.unrelated_materialized_view + select id from {synapse_project.test_schema}.unrelated_materialized_view ) """ synapse_project.run_sql(sql) From 1ae430dd303767635fce349959bafa5296575578 Mon Sep 17 00:00:00 2001 From: nszoni Date: Tue, 20 Feb 2024 14:44:12 +0100 Subject: [PATCH 32/32] update changelog --- CHANGELOG.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index e72a0e31..1a76fd61 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -11,6 +11,12 @@ * important note! unlike [dbt's materialized view](https://docs.getdbt.com/docs/build/materializations), [Synapse's materialized view](https://learn.microsoft.com/en-us/sql/t-sql/statements/create-materialized-view-as-select-transact-sql?view=azure-sqldw-latest&context=%2Fazure%2Fsynapse-analytics%2Fcontext%2Fcontext) must be created using aggregation and/or "GROUP BY"! * ~~dbt clone ([#7258](https://github.com/dbt-labs/dbt-core/issues/7258)~~ Synapse does not support CLONE) * Revamp dbt debug ([#7104](https://github.com/dbt-labs/dbt-core/issues/7104)) +* Added new adapter zone tests + - constraints + - null_compare + - validate_sql + - equals + - dbt_clone ## v.1.5.0rc1