diff --git a/CHANGELOG.md b/CHANGELOG.md index 33e708e7..3b3b4404 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,21 @@ # Changelog +### v1.6.1 + +## Features + +* Fabric DW now supports sp_rename. Starting v1.6.1 sp_rename is metadata operation +* Enabled table clone feature + +## Enhancements + +* Addressed [Issue 53](https://github.com/microsoft/dbt-fabric/issues/53) +* Added explicit support for [Issue 76 - ActiveDirectoryServicePrincipal authentication](https://github.com/microsoft/dbt-fabric/issues/74) +* Removed port number support in connection string as it is no longer required in Microsoft Fabric DW +* Removed MSI authentication as it does not make sense for Microsoft Fabric. +* Table lock hints are not supported by Fabric DW +* Supported authentication modes are ActiveDirectory* and AZ CLI + ### v1.6.0 ## Features diff --git a/dbt/adapters/fabric/__version__.py b/dbt/adapters/fabric/__version__.py index 38ec8ede..cead7e89 100644 --- a/dbt/adapters/fabric/__version__.py +++ b/dbt/adapters/fabric/__version__.py @@ -1 +1 @@ -version = "1.6.0" +version = "1.6.1" diff --git a/dbt/adapters/fabric/fabric_connection_manager.py b/dbt/adapters/fabric/fabric_connection_manager.py index d045e874..6b1349e5 100644 --- a/dbt/adapters/fabric/fabric_connection_manager.py +++ b/dbt/adapters/fabric/fabric_connection_manager.py @@ -9,13 +9,7 @@ import dbt.exceptions import pyodbc from azure.core.credentials import AccessToken -from azure.identity import ( - AzureCliCredential, - ClientSecretCredential, - DefaultAzureCredential, - EnvironmentCredential, - ManagedIdentityCredential, -) +from azure.identity import AzureCliCredential, DefaultAzureCredential, EnvironmentCredential from dbt.adapters.sql import SQLConnectionManager from dbt.clients.agate_helper import empty_table from dbt.contracts.connection import AdapterResponse, Connection, ConnectionState @@ -113,24 +107,6 @@ def get_cli_access_token(credentials: FabricCredentials) -> AccessToken: return token -def get_msi_access_token(credentials: FabricCredentials) -> AccessToken: - """ - Get an Azure access token from the system's managed identity - - Parameters - ----------- - credentials: FabricCredentials - Credentials. - - Returns - ------- - out : AccessToken - The access token. - """ - token = ManagedIdentityCredential().get_token(AZURE_CREDENTIAL_SCOPE) - return token - - def get_auto_access_token(credentials: FabricCredentials) -> AccessToken: """ Get an Azure access token automatically through azure-identity @@ -167,30 +143,8 @@ def get_environment_access_token(credentials: FabricCredentials) -> AccessToken: return token -def get_sp_access_token(credentials: FabricCredentials) -> AccessToken: - """ - Get an Azure access token using the SP credentials. - - Parameters - ---------- - credentials : FabricCredentials - Credentials. - - Returns - ------- - out : AccessToken - The access token. - """ - token = ClientSecretCredential( - str(credentials.tenant_id), str(credentials.client_id), str(credentials.client_secret) - ).get_token(AZURE_CREDENTIAL_SCOPE) - return token - - AZURE_AUTH_FUNCTIONS: Mapping[str, AZURE_AUTH_FUNCTION_TYPE] = { - "serviceprincipal": get_sp_access_token, "cli": get_cli_access_token, - "msi": get_msi_access_token, "auto": get_auto_access_token, "environment": get_environment_access_token, } @@ -335,7 +289,7 @@ def open(cls, connection: Connection) -> Connection: # SQL Server named instance. In this case then port number has to be omitted. con_str.append(f"SERVER={credentials.host}") else: - con_str.append(f"SERVER={credentials.host},{credentials.port}") + con_str.append(f"SERVER={credentials.host}") con_str.append(f"Database={credentials.database}") @@ -347,14 +301,16 @@ def open(cls, connection: Connection) -> Connection: if credentials.authentication == "ActiveDirectoryPassword": con_str.append(f"UID={{{credentials.UID}}}") con_str.append(f"PWD={{{credentials.PWD}}}") + if credentials.authentication == "ActiveDirectoryServicePrincipal": + con_str.append(f"UID={{{credentials.client_id}}}") + con_str.append(f"PWD={{{credentials.client_secret}}}") elif credentials.authentication == "ActiveDirectoryInteractive": con_str.append(f"UID={{{credentials.UID}}}") elif credentials.windows_login: con_str.append("trusted_connection=Yes") elif credentials.authentication == "sql": - con_str.append(f"UID={{{credentials.UID}}}") - con_str.append(f"PWD={{{credentials.PWD}}}") + raise pyodbc.DatabaseError("SQL Authentication is not supported by Microsoft Fabric") # https://docs.microsoft.com/en-us/sql/relational-databases/native-client/features/using-encryption-without-validation?view=sql-server-ver15 assert credentials.encrypt is not None diff --git a/dbt/adapters/fabric/fabric_credentials.py b/dbt/adapters/fabric/fabric_credentials.py index 7e3a79e3..60da8b4f 100644 --- a/dbt/adapters/fabric/fabric_credentials.py +++ b/dbt/adapters/fabric/fabric_credentials.py @@ -10,14 +10,13 @@ class FabricCredentials(Credentials): host: str database: str schema: str - port: Optional[int] = 1433 UID: Optional[str] = None PWD: Optional[str] = None windows_login: Optional[bool] = False tenant_id: Optional[str] = None client_id: Optional[str] = None client_secret: Optional[str] = None - authentication: Optional[str] = "sql" + authentication: Optional[str] = "ActiveDirectoryServicePrincipal" encrypt: Optional[bool] = True # default value in MS ODBC Driver 18 as well trust_cert: Optional[bool] = False # default value in MS ODBC Driver 18 as well retries: int = 1 @@ -53,7 +52,6 @@ def _connection_keys(self): "server", "database", "schema", - "port", "UID", "client_id", "authentication", diff --git a/dbt/include/fabric/macros/adapters/metadata.sql b/dbt/include/fabric/macros/adapters/metadata.sql index ea4db857..47ceba34 100644 --- a/dbt/include/fabric/macros/adapters/metadata.sql +++ b/dbt/include/fabric/macros/adapters/metadata.sql @@ -1,12 +1,3 @@ -{% macro use_database_hint() %} - {{ return(adapter.dispatch('use_database_hint')()) }} -{% endmacro %} - -{% macro default__use_database_hint() %}{% endmacro %} -{% macro fabric__use_database_hint() %} - {# USE [{{ relation.database }}]; #} -{% endmacro %} - {% macro information_schema_hints() %} {{ return(adapter.dispatch('information_schema_hints')()) }} {% endmacro %} @@ -24,7 +15,7 @@ name as principal_name, principal_id as principal_id from - sys.database_principals + sys.database_principals {{ information_schema_hints() }} ), schemas as ( @@ -33,7 +24,7 @@ schema_id as schema_id, principal_id as principal_id from - sys.schemas + sys.schemas {{ information_schema_hints() }} ), tables as ( @@ -43,7 +34,7 @@ principal_id as principal_id, 'BASE TABLE' as table_type from - sys.tables + sys.tables {{ information_schema_hints() }} ), tables_with_metadata as ( @@ -64,7 +55,7 @@ principal_id as principal_id, 'VIEW' as table_type from - sys.views + sys.views {{ information_schema_hints() }} ), views_with_metadata as ( @@ -107,7 +98,7 @@ column_name, ordinal_position as column_index, data_type as column_type - from INFORMATION_SCHEMA.COLUMNS + from INFORMATION_SCHEMA.COLUMNS {{ information_schema_hints() }} ) @@ -138,9 +129,9 @@ {% macro fabric__list_schemas(database) %} {% call statement('list_schemas', fetch_result=True, auto_begin=False) -%} - {{ use_database_hint() }} + select name as [schema] - from sys.schemas + from sys.schemas {{ information_schema_hints() }} {% endcall %} {{ return(load_result('list_schemas').table) }} {% endmacro %} @@ -164,7 +155,7 @@ else table_type end as table_type - from [{{ schema_relation.database }}].INFORMATION_SCHEMA.TABLES + from [{{ schema_relation.database }}].INFORMATION_SCHEMA.TABLES {{ information_schema_hints() }} where table_schema like '{{ schema_relation.schema }}' {% endcall %} {{ return(load_result('list_relations_without_caching').table) }} diff --git a/dbt/include/fabric/macros/adapters/relation.sql b/dbt/include/fabric/macros/adapters/relation.sql index 09c2533d..585fb83b 100644 --- a/dbt/include/fabric/macros/adapters/relation.sql +++ b/dbt/include/fabric/macros/adapters/relation.sql @@ -16,7 +16,7 @@ {% if relation.type == 'view' -%} {% call statement('find_references', fetch_result=true) %} - {{ use_database_hint() }} + USE [{{ relation.database }}]; select sch.name as schema_name, obj.name as view_name @@ -38,118 +38,20 @@ type="view", path={"schema": reference[0], "identifier": reference[1]})) }} {% endfor %} - {% elif relation.type == 'table'%} - {%- else -%} - {{ exceptions.raise_not_implemented('Invalid relation being dropped: ' ~ relation) }} - {% endif %} - - {{ use_database_hint() }} + {% elif relation.type == 'table'%} + {% set object_id_type = 'U' %} + {%- else -%} + {{ exceptions.raise_not_implemented('Invalid relation being dropped: ' ~ relation) }} + {% endif %} + USE [{{ relation.database }}]; EXEC('DROP {{ relation.type }} IF EXISTS {{ relation.include(database=False) }};'); - {% endmacro %} {% macro fabric__rename_relation(from_relation, to_relation) -%} - {% if to_relation.type == 'view' %} - {% call statement('get_view_definition', fetch_result=True) %} - SELECT m.[definition] AS VIEW_DEFINITION - FROM sys.objects o - INNER JOIN sys.sql_modules m - ON m.[object_id] = o.[object_id] - INNER JOIN sys.views v - ON o.[object_id] = v.[object_id] - INNER JOIN sys.schemas s - ON o.schema_id = s.schema_id - AND s.schema_id = v.schema_id - WHERE s.name = '{{ from_relation.schema }}' - AND v.name = '{{ from_relation.identifier }}' - AND o.[type] = 'V'; - {% endcall %} - - {% set view_def_full = load_result('get_view_definition')['data'][0][0] %} - {# Jinja does not allow bitwise operators and we need re.I | re.M here. So calculated manually this becomes 10. #} - {% set final_view_sql = modules.re.sub("create\s+view\s+.*?\s+as\s+","",view_def_full, 10) %} - - {% call statement('create_new_view') %} - {{ create_view_as(to_relation, final_view_sql) }} - {% endcall %} - {% call statement('drop_old_view') %} - EXEC('DROP VIEW IF EXISTS {{ from_relation.include(database=False) }};'); - {% endcall %} - {% endif %} - {% if to_relation.type == 'table' %} - {% call statement('rename_relation') %} - EXEC('create table {{ to_relation.include(database=False) }} as select * from {{ from_relation.include(database=False) }}'); - {%- endcall %} - -- Getting constraints from the old table - {% call statement('get_table_constraints', fetch_result=True) %} - SELECT DISTINCT Contraint_statement FROM - ( - SELECT DISTINCT - CASE - WHEN tc.CONSTRAINT_TYPE = 'PRIMARY KEY' - THEN 'ALTER TABLE <> ADD CONSTRAINT PK_<>_'+ccu.COLUMN_NAME+' PRIMARY KEY NONCLUSTERED('+ccu.COLUMN_NAME+') NOT ENFORCED' - WHEN tc.CONSTRAINT_TYPE = 'UNIQUE' - THEN 'ALTER TABLE <> ADD CONSTRAINT UK_<>_'+ccu.COLUMN_NAME+' UNIQUE NONCLUSTERED('+ccu.COLUMN_NAME+') NOT ENFORCED' - END AS Contraint_statement - FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS tc INNER JOIN - INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE ccu - ON tc.CONSTRAINT_NAME = ccu.CONSTRAINT_NAME - WHERE tc.TABLE_NAME = '{{ from_relation.identifier }}' and tc.TABLE_SCHEMA = '{{ from_relation.schema }}' - UNION ALL - SELECT - 'ALTER TABLE <> ADD CONSTRAINT FK_<>_'+CU.COLUMN_NAME+' FOREIGN KEY('+CU.COLUMN_NAME+') references '+PK.TABLE_SCHEMA+'.'+PK.TABLE_NAME+' ('+PT.COLUMN_NAME+') not enforced' AS Contraint_statement - FROM INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS C - INNER JOIN INFORMATION_SCHEMA.TABLE_CONSTRAINTS FK ON C.CONSTRAINT_NAME = FK.CONSTRAINT_NAME - INNER JOIN INFORMATION_SCHEMA.TABLE_CONSTRAINTS PK ON C.UNIQUE_CONSTRAINT_NAME=PK.CONSTRAINT_NAME - INNER JOIN INFORMATION_SCHEMA.KEY_COLUMN_USAGE CU ON C.CONSTRAINT_NAME = CU.CONSTRAINT_NAME - INNER JOIN ( - SELECT i1.TABLE_NAME, i2.COLUMN_NAME, i1.TABLE_SCHEMA, i2.TABLE_SCHEMA AS CU_TableSchema - FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS i1 - INNER JOIN INFORMATION_SCHEMA.KEY_COLUMN_USAGE i2 ON i1.CONSTRAINT_NAME =i2.CONSTRAINT_NAME - WHERE i1.CONSTRAINT_TYPE = 'PRIMARY KEY' - ) PT ON PT.TABLE_NAME = PK.TABLE_NAME AND PT.TABLE_SCHEMA = PK.TABLE_SCHEMA AND PT.CU_TableSchema = PK.TABLE_SCHEMA - WHERE FK.TABLE_NAME = '{{ from_relation.identifier }}' and FK.TABLE_SCHEMA = '{{ from_relation.schema }}' - and PK.TABLE_SCHEMA = '{{ from_relation.schema }}' and PT.TABLE_SCHEMA = '{{ from_relation.schema }}' - ) T WHERE Contraint_statement IS NOT NULL - {% endcall %} - - {%call statement('drop_table_constraints', fetch_result= True)%} - SELECT drop_constraint_statement FROM - ( - SELECT 'ALTER TABLE ['+TABLE_SCHEMA+'].['+TABLE_NAME+'] DROP CONSTRAINT ' + CONSTRAINT_NAME AS drop_constraint_statement - FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS - WHERE TABLE_NAME = '{{ from_relation.identifier }}' and TABLE_SCHEMA = '{{ from_relation.schema }}' - ) T WHERE drop_constraint_statement IS NOT NULL - - {% endcall %} - - {% set references = load_result('get_table_constraints')['data'] %} - {% set drop_references = load_result('drop_table_constraints')['data'] %} - - {% for reference in drop_references -%} - {% set drop_constraint = reference[0]%} - - {% call statement('Drop_Constraints') %} - {{ log("Constraints to drop: "~reference[0], info=True) }} - EXEC('{{drop_constraint}}'); - {% endcall %} - {% endfor %} - - {% set targetTableNameConstraint = to_relation.include(database=False)%} - {% set targetTableNameConstraint = (targetTableNameConstraint|string).strip().replace("\"","").replace(".","_")%} - {% set targetTableName = to_relation.include(database=False) %} - - {% for reference in references -%} - {% set constraint_name = reference[0].replace("<>",targetTableNameConstraint)%} - {% set alter_create_table_constraint_script = constraint_name.replace("<>", (targetTableName|string).strip()) %} - {{ log("Constraints to create: "~alter_create_table_constraint_script, info=True) }} - {% call statement('Drop_Create_Constraints') %} - EXEC('{{alter_create_table_constraint_script}}'); - {% endcall %} - {% endfor %} - - {{ fabric__drop_relation(from_relation) }} - {% endif %} + {% call statement('rename_relation') -%} + USE [{{ from_relation.database }}]; + EXEC sp_rename '{{ from_relation.schema }}.{{ from_relation.identifier }}', '{{ to_relation.identifier }}' + {%- endcall %} {% endmacro %} -- DROP fabric__truncate_relation when TRUNCATE TABLE is supported diff --git a/dbt/include/fabric/macros/adapters/schema.sql b/dbt/include/fabric/macros/adapters/schema.sql index 298d585c..7fa10837 100644 --- a/dbt/include/fabric/macros/adapters/schema.sql +++ b/dbt/include/fabric/macros/adapters/schema.sql @@ -1,6 +1,6 @@ {% macro fabric__create_schema(relation) -%} {% call statement('create_schema') -%} - {{ use_database_hint() }} + USE [{{ relation.database }}]; IF NOT EXISTS (SELECT * FROM sys.schemas WHERE name = '{{ relation.schema }}') BEGIN EXEC('CREATE SCHEMA [{{ relation.schema }}]') @@ -10,7 +10,7 @@ {% macro fabric__create_schema_with_authorization(relation, schema_authorization) -%} {% call statement('create_schema') -%} - {{ use_database_hint() }} + USE [{{ relation.database }}]; IF NOT EXISTS (SELECT * FROM sys.schemas WHERE name = '{{ relation.schema }}') BEGIN EXEC('CREATE SCHEMA [{{ relation.schema }}] AUTHORIZATION [{{ schema_authorization }}]') diff --git a/dbt/include/fabric/macros/materializations/models/table/clone.sql b/dbt/include/fabric/macros/materializations/models/table/clone.sql new file mode 100644 index 00000000..110dd03f --- /dev/null +++ b/dbt/include/fabric/macros/materializations/models/table/clone.sql @@ -0,0 +1,48 @@ +{% macro fabric__can_clone_table() %} + {{ return(True) }} +{% endmacro %} + +{% macro fabric__create_or_replace_clone(target_relation, defer_relation) %} + CREATE TABLE {{target_relation}} + AS CLONE OF {{defer_relation}} +{% endmacro %} + + +{%- materialization clone, adapter='fabric' -%} + + {%- set relations = {'relations': []} -%} + {%- if not defer_relation -%} + -- nothing to do + {{ log("No relation found in state manifest for " ~ model.unique_id, info=True) }} + {{ return(relations) }} + {%- endif -%} + + {%- set other_existing_relation = load_cached_relation(defer_relation) -%} + {% set can_clone_table = can_clone_table() %} + + {%- if other_existing_relation and other_existing_relation.type == 'table' and can_clone_table -%} + {%- set target_relation = this.incorporate(type='table') -%} + + {% call statement('main') %} + {{ fabric__drop_relation_script(target_relation) }} + {{ create_or_replace_clone(target_relation, defer_relation) }} + {% endcall %} + {{ return({'relations': [target_relation]}) }} + {%- else -%} + + {%- set target_relation = this.incorporate(type='view') -%} + + -- reuse the view materialization + -- TODO: support actual dispatch for materialization macros + -- Tracking ticket: https://github.com/dbt-labs/dbt-core/issues/7799 + {% set search_name = "materialization_view_" ~ adapter.type() %} + {% if not search_name in context %} + {% set search_name = "materialization_view_default" %} + {% endif %} + {% set materialization_macro = context[search_name] %} + {% set relations = materialization_macro() %} + {{ return(relations) }} + {%- endif -%} + + +{%- endmaterialization -%} diff --git a/dbt/include/fabric/macros/materializations/models/table/create_table_as.sql b/dbt/include/fabric/macros/materializations/models/table/create_table_as.sql index e45c9a9f..4ac51460 100644 --- a/dbt/include/fabric/macros/materializations/models/table/create_table_as.sql +++ b/dbt/include/fabric/macros/materializations/models/table/create_table_as.sql @@ -11,8 +11,8 @@ {{ fabric__create_view_as(tmp_relation, sql) }} {% if contract_config.enforced %} - CREATE TABLE {{ relation.include(database=False) }} - {{ fabric__table_columns_and_constraints(relation.include(database=False)) }} + CREATE TABLE [{{relation.database}}].[{{relation.schema}}].[{{relation.identifier}}] + {{ fabric__table_columns_and_constraints(relation) }} {{ get_assert_columns_equivalent(sql) }} {% set listColumns %} @@ -21,11 +21,11 @@ {% endfor %} {%endset%} - INSERT INTO {{ relation.include(database=False) }} - ({{listColumns}}) SELECT {{listColumns}} FROM {{ tmp_relation.include(database=False) }}; + INSERT INTO [{{relation.database}}].[{{relation.schema}}].[{{relation.identifier}}] + ({{listColumns}}) SELECT {{listColumns}} FROM [{{tmp_relation.database}}].[{{tmp_relation.schema}}].[{{tmp_relation.identifier}}]; {%- else %} - EXEC('CREATE TABLE {{ relation.include(database=False) }} AS (SELECT * FROM {{ tmp_relation.include(database=False) }});'); + EXEC('CREATE TABLE [{{relation.database}}].[{{relation.schema}}].[{{relation.identifier}}] AS (SELECT * FROM [{{tmp_relation.database}}].[{{tmp_relation.schema}}].[{{tmp_relation.identifier}}]);'); {% endif %} {{ fabric__drop_relation_script(tmp_relation) }} diff --git a/dbt/include/fabric/macros/materializations/models/view/create_view_as.sql b/dbt/include/fabric/macros/materializations/models/view/create_view_as.sql index 7ae9cee6..0cfdd061 100644 --- a/dbt/include/fabric/macros/materializations/models/view/create_view_as.sql +++ b/dbt/include/fabric/macros/materializations/models/view/create_view_as.sql @@ -5,8 +5,7 @@ {% macro fabric__create_view_exec(relation, sql) -%} {%- set temp_view_sql = sql.replace("'", "''") -%} - {{ use_database_hint() }} - + USE [{{ relation.database }}]; {% set contract_config = config.get('contract') %} {% if contract_config.enforced %} {{ get_assert_columns_equivalent(sql) }} diff --git a/dbt/include/fabric/macros/materializations/snapshots/snapshot.sql b/dbt/include/fabric/macros/materializations/snapshots/snapshot.sql index a8719c86..3b5ddc73 100644 --- a/dbt/include/fabric/macros/materializations/snapshots/snapshot.sql +++ b/dbt/include/fabric/macros/materializations/snapshots/snapshot.sql @@ -15,12 +15,12 @@ {% endset %} {% set tempTableName %} - {{ relation.schema }}.{{ relation.identifier }}_{{ range(1300, 19000) | random }} + [{{relation.database}}].[{{ relation.schema }}].[{{ relation.identifier }}_{{ range(1300, 19000) | random }}] {% endset %} {% set tempTable %} CREATE TABLE {{tempTableName}} - AS SELECT * {{columns}} FROM {{ relation.schema }}.{{ relation.identifier }} + AS SELECT * {{columns}} FROM [{{relation.database}}].[{{ relation.schema }}].[{{ relation.identifier }}] {{ information_schema_hints() }} {% endset %} {% call statement('create_temp_table') -%} @@ -28,7 +28,7 @@ {%- endcall %} {% set dropTable %} - DROP TABLE {{ relation.schema }}.{{ relation.identifier }} + DROP TABLE [{{relation.database}}].[{{ relation.schema }}].[{{ relation.identifier }}] {% endset %} {% call statement('drop_table') -%} @@ -36,8 +36,8 @@ {%- endcall %} {% set createTable %} - CREATE TABLE {{ relation.schema }}.{{ relation.identifier }} - AS SELECT * FROM {{tempTableName}} + CREATE TABLE {{ relation }} + AS SELECT * FROM {{tempTableName}} {{ information_schema_hints() }} {% endset %} {% call statement('create_Table') -%} diff --git a/tests/conftest.py b/tests/conftest.py index 828b13e2..1e3670cb 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -43,7 +43,6 @@ def _all_profiles_base(): return { "type": "fabric", "driver": os.getenv("FABRIC_TEST_DRIVER", "ODBC Driver 18 for SQL Server"), - "port": int(os.getenv("FABRIC_TEST_PORT", "1433")), "retries": 2, } diff --git a/tests/functional/adapter/test_constraints.py b/tests/functional/adapter/test_constraints.py index cb38a76a..4a94c640 100644 --- a/tests/functional/adapter/test_constraints.py +++ b/tests/functional/adapter/test_constraints.py @@ -502,8 +502,8 @@ def test__constraints_ddl(self, project, expected_sql): generated_sql_generic = _find_and_replace( generated_sql_generic, "foreign_key_model", "" ) - - assert _normalize_whitespace(expected_sql) == _normalize_whitespace(generated_sql_generic) + generated_sql_wodb = generated_sql_generic.replace("USE [" + project.database + "];", "") + assert _normalize_whitespace(expected_sql) == _normalize_whitespace(generated_sql_wodb) class TestTableConstraintsRuntimeDdlEnforcement(BaseConstraintsRuntimeDdlEnforcement): @@ -563,8 +563,8 @@ def test__model_constraints_ddl(self, project, expected_sql): generated_sql_generic = _find_and_replace( generated_sql_generic, "foreign_key_model", "" ) - - assert _normalize_whitespace(expected_sql) == _normalize_whitespace(generated_sql_generic) + generated_sql_wodb = generated_sql_generic.replace("USE [" + project.database + "];", "") + assert _normalize_whitespace(expected_sql) == _normalize_whitespace(generated_sql_wodb) class TestModelConstraintsRuntimeEnforcement(BaseModelConstraintsRuntimeEnforcement): diff --git a/tests/functional/adapter/test_dbt_clone.py b/tests/functional/adapter/test_dbt_clone.py new file mode 100644 index 00000000..ba3d6064 --- /dev/null +++ b/tests/functional/adapter/test_dbt_clone.py @@ -0,0 +1,236 @@ +import os +import shutil +from collections import Counter +from copy import deepcopy + +import pytest +from dbt.exceptions import DbtRuntimeError +from dbt.tests.adapter.dbt_clone.fixtures import ( + custom_can_clone_tables_false_macros_sql, + ephemeral_model_sql, + exposures_yml, + get_schema_name_sql, + infinite_macros_sql, + macros_sql, + schema_yml, + seed_csv, + snapshot_sql, + table_model_sql, + view_model_sql, +) +from dbt.tests.util import run_dbt + + +class BaseClone: + @pytest.fixture(scope="class") + def models(self): + return { + "table_model.sql": table_model_sql, + "view_model.sql": view_model_sql, + "ephemeral_model.sql": ephemeral_model_sql, + "schema.yml": schema_yml, + "exposures.yml": exposures_yml, + } + + @pytest.fixture(scope="class") + def macros(self): + return { + "macros.sql": macros_sql, + "infinite_macros.sql": infinite_macros_sql, + "get_schema_name.sql": get_schema_name_sql, + } + + @pytest.fixture(scope="class") + def seeds(self): + return { + "seed.csv": seed_csv, + } + + @pytest.fixture(scope="class") + def snapshots(self): + return { + "snapshot.sql": snapshot_sql, + } + + @pytest.fixture(scope="class") + def other_schema(self, unique_schema): + return unique_schema + "_other" + + @property + def project_config_update(self): + return { + "seeds": { + "test": { + "quote_columns": False, + } + } + } + + @pytest.fixture(scope="class") + def profiles_config_update(self, dbt_profile_target, unique_schema, other_schema): + outputs = {"default": dbt_profile_target, "otherschema": deepcopy(dbt_profile_target)} + outputs["default"]["schema"] = unique_schema + outputs["otherschema"]["schema"] = other_schema + return {"test": {"outputs": outputs, "target": "default"}} + + def copy_state(self, project_root): + state_path = os.path.join(project_root, "state") + if not os.path.exists(state_path): + os.makedirs(state_path) + shutil.copyfile( + f"{project_root}/target/manifest.json", f"{project_root}/state/manifest.json" + ) + + def run_and_save_state(self, project_root, with_snapshot=False): + results = run_dbt(["seed"]) + assert len(results) == 1 + assert not any(r.node.deferred for r in results) + results = run_dbt(["run"]) + assert len(results) == 2 + assert not any(r.node.deferred for r in results) + results = run_dbt(["test"]) + assert len(results) == 2 + + if with_snapshot: + results = run_dbt(["snapshot"]) + assert len(results) == 1 + assert not any(r.node.deferred for r in results) + + # copy files + self.copy_state(project_root) + + +# -- Below we define base classes for tests you import the one based on if your adapter uses dbt clone or not -- +class BaseClonePossible(BaseClone): + def test_can_clone_true(self, project, unique_schema, other_schema): + project.create_test_schema(other_schema) + self.run_and_save_state(project.project_root, with_snapshot=True) + + clone_args = [ + "clone", + "--state", + "state", + "--target", + "otherschema", + ] + + results = run_dbt(clone_args) + assert len(results) == 4 + + schema_relations = project.adapter.list_relations( + database=project.database, schema=other_schema + ) + types = [r.type for r in schema_relations] + count_types = Counter(types) + assert count_types == Counter({"table": 3, "view": 1}) + + # objects already exist, so this is a no-op + results = run_dbt(clone_args) + assert len(results) == 4 + assert all("ok" in r.message.lower() for r in results) + + # recreate all objects + results = run_dbt([*clone_args, "--full-refresh"]) + assert len(results) == 4 + + # select only models this time + results = run_dbt([*clone_args, "--resource-type", "model"]) + assert len(results) == 2 + assert all("ok" in r.message.lower() for r in results) + + def test_clone_no_state(self, project, unique_schema, other_schema): + project.create_test_schema(other_schema) + self.run_and_save_state(project.project_root, with_snapshot=True) + + clone_args = [ + "clone", + "--target", + "otherschema", + ] + + with pytest.raises( + DbtRuntimeError, + match="--state or --defer-state are required for deferral, but neither was provided", + ): + run_dbt(clone_args) + + +class BaseCloneNotPossible(BaseClone): + @pytest.fixture(scope="class") + def macros(self): + return { + "macros.sql": macros_sql, + "my_can_clone_tables.sql": custom_can_clone_tables_false_macros_sql, + "infinite_macros.sql": infinite_macros_sql, + "get_schema_name.sql": get_schema_name_sql, + } + + def test_can_clone_false(self, project, unique_schema, other_schema): + project.create_test_schema(other_schema) + self.run_and_save_state(project.project_root, with_snapshot=True) + + clone_args = [ + "clone", + "--state", + "state", + "--target", + "otherschema", + ] + + results = run_dbt(clone_args) + assert len(results) == 4 + + schema_relations = project.adapter.list_relations( + database=project.database, schema=other_schema + ) + assert all(r.type == "view" for r in schema_relations) + + # objects already exist, so this is a no-op + results = run_dbt(clone_args) + assert len(results) == 4 + assert all("ok" in r.message.lower() for r in results) + + # recreate all objects + results = run_dbt([*clone_args, "--full-refresh"]) + assert len(results) == 4 + + # select only models this time + results = run_dbt([*clone_args, "--resource-type", "model"]) + assert len(results) == 2 + assert all("ok" in r.message.lower() for r in results) + + +class TestFabricCloneNotPossible(BaseCloneNotPossible): + @pytest.fixture(autouse=True) + def clean_up(self, project): + yield + with project.adapter.connection_named("__test"): + relation = project.adapter.Relation.create( + database=project.database, schema=f"{project.test_schema}_seeds" + ) + project.adapter.drop_schema(relation) + + relation = project.adapter.Relation.create( + database=project.database, schema=project.test_schema + ) + project.adapter.drop_schema(relation) + + pass + + +class TestFabricClonePossible(BaseClonePossible): + @pytest.fixture(autouse=True) + def clean_up(self, project): + yield + with project.adapter.connection_named("__test"): + relation = project.adapter.Relation.create( + database=project.database, schema=f"{project.test_schema}_seeds" + ) + project.adapter.drop_schema(relation) + + relation = project.adapter.Relation.create( + database=project.database, schema=project.test_schema + ) + project.adapter.drop_schema(relation) + + pass diff --git a/tests/functional/adapter/test_query_comment.py b/tests/functional/adapter/test_query_comment.py index 8bf02bfe..19605f04 100644 --- a/tests/functional/adapter/test_query_comment.py +++ b/tests/functional/adapter/test_query_comment.py @@ -1,32 +1,158 @@ -from dbt.tests.adapter.query_comment.test_query_comment import ( - BaseEmptyQueryComments, - BaseMacroArgsQueryComments, - BaseMacroInvalidQueryComments, - BaseMacroQueryComments, - BaseNullQueryComments, - BaseQueryComments, -) +import json +import pytest +from dbt.exceptions import DbtRuntimeError +from dbt.tests.util import run_dbt_and_capture +from dbt.version import __version__ as dbt_version -class TestQueryCommentsFabric(BaseQueryComments): +MACROS__MACRO_SQL = """ +{%- macro query_header_no_args() -%} +{%- set x = "are pretty cool" -%} +{{ "dbt macros" }} +{{ x }} +{%- endmacro -%} + + +{%- macro query_header_args(message) -%} + {%- set comment_dict = dict( + app='dbt++', + macro_version='0.1.0', + dbt_version=dbt_version, + message='blah: '~ message) -%} + {{ return(comment_dict) }} +{%- endmacro -%} + + +{%- macro ordered_to_json(dct) -%} +{{ tojson(dct, sort_keys=True) }} +{%- endmacro %} + + +{% macro invalid_query_header() -%} +{{ "Here is an invalid character for you: */" }} +{% endmacro %} + +""" + +MODELS__X_SQL = """ +{% do run_query('select 2 as inner_id') %} +select 1 as outer_id +""" + + +class BaseDefaultQueryComments: + @pytest.fixture(scope="class") + def models(self): + return { + "x.sql": MODELS__X_SQL, + } + + @pytest.fixture(scope="class") + def macros(self): + return { + "macro.sql": MACROS__MACRO_SQL, + } + + def run_get_json(self, expect_pass=True): + res, raw_logs = run_dbt_and_capture( + ["--debug", "--log-format=json", "run"], expect_pass=expect_pass + ) + + # empty lists evaluate as False + assert len(res) > 0 + return raw_logs + + +# Base setup to be inherited # +class BaseQueryComments(BaseDefaultQueryComments): + @pytest.fixture(scope="class") + def project_config_update(self): + return {"query-comment": "pradeep"} + + def test_matches_comment(self, project) -> bool: + logs = self.run_get_json() + assert r"pradeep" in logs + + +class BaseMacroQueryComments(BaseDefaultQueryComments): + @pytest.fixture(scope="class") + def project_config_update(self): + return {"query-comment": "{{ query_header_no_args() }}"} + + def test_matches_comment(self, project) -> bool: + logs = self.run_get_json() + assert r"/* dbt macros\nare pretty cool */\n" in logs + + +class BaseMacroArgsQueryComments(BaseDefaultQueryComments): + @pytest.fixture(scope="class") + def project_config_update(self): + return {"query-comment": "{{ return(ordered_to_json(query_header_args(target.name))) }}"} + + def test_matches_comment(self, project) -> bool: + logs = self.run_get_json() + expected_dct = { + "app": "dbt++", + "dbt_version": dbt_version, + "macro_version": "0.1.0", + "message": f"blah: {project.adapter.config.target_name}", + } + expected = r"/* {} */\n".format(json.dumps(expected_dct, sort_keys=True)).replace( + '"', r"\"" + ) + assert expected in logs + + +class BaseMacroInvalidQueryComments(BaseDefaultQueryComments): + @pytest.fixture(scope="class") + def project_config_update(self): + return {"query-comment": "{{ invalid_query_header() }}"} + + def test_run_assert_comments(self, project): + with pytest.raises(DbtRuntimeError): + self.run_get_json(expect_pass=False) + + +class BaseNullQueryComments(BaseDefaultQueryComments): + @pytest.fixture(scope="class") + def project_config_update(self): + return {"query-comment": None} + + def test_matches_comment(self, project) -> bool: + logs = self.run_get_json() + assert "/*" not in logs or "*/" not in logs + + +class BaseEmptyQueryComments(BaseDefaultQueryComments): + @pytest.fixture(scope="class") + def project_config_update(self): + return {"query-comment": ""} + + def test_matches_comment(self, project) -> bool: + logs = self.run_get_json() + assert "/*" not in logs or "*/" not in logs + + +# Tests # +class TestQueryComments(BaseQueryComments): pass -class TestMacroQueryCommentsFabric(BaseMacroQueryComments): +class TestMacroQueryComments(BaseMacroQueryComments): pass -class TestMacroArgsQueryCommentsFabric(BaseMacroArgsQueryComments): +class TestMacroArgsQueryComments(BaseMacroArgsQueryComments): pass -class TestMacroInvalidQueryCommentsFabric(BaseMacroInvalidQueryComments): +class TestMacroInvalidQueryComments(BaseMacroInvalidQueryComments): pass -class TestNullQueryCommentsFabric(BaseNullQueryComments): +class TestNullQueryComments(BaseNullQueryComments): pass -class TestEmptyQueryCommentsFabric(BaseEmptyQueryComments): +class TestEmptyQueryComments(BaseEmptyQueryComments): pass