Skip to content

Commit

Permalink
Merge pull request #202 from microsoft/dbtsynapse1.6_dataroots
Browse files Browse the repository at this point in the history
v1.6.0rc1
  • Loading branch information
prdpsvs authored Feb 22, 2024
2 parents df19a7a + 1ae430d commit 8c117ef
Show file tree
Hide file tree
Showing 21 changed files with 857 additions and 256 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/integration-tests-azure.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ jobs:
name: Integration tests on Azure
strategy:
matrix:
python_version: ["3.7", "3.8", "3.9", "3.10", "3.11"]
python_version: ["3.8", "3.9", "3.10", "3.11"]
msodbc_version: ["17", "18"]
runs-on: ubuntu-latest
permissions:
Expand Down
19 changes: 19 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,23 @@
# Changelog
## v1.6.0rc1

* Support for [dbt-core 1.6](https://github.com/dbt-labs/dbt-core/releases/tag/v1.6.0)

#### Breaking Changes
* Dropped support for Python 3.7 ([#7082](https://github.com/dbt-labs/dbt-core/issues/7082https://github.com/dbt-labs/dbt-core/issues/7082))

## Features
* Add support for materialized views ([#6911](https://github.com/dbt-labs/dbt-core/issues/6911))
* important note! unlike [dbt's materialized view](https://docs.getdbt.com/docs/build/materializations), [Synapse's materialized view](https://learn.microsoft.com/en-us/sql/t-sql/statements/create-materialized-view-as-select-transact-sql?view=azure-sqldw-latest&context=%2Fazure%2Fsynapse-analytics%2Fcontext%2Fcontext) must be created using aggregation and/or "GROUP BY"!
* ~~dbt clone ([#7258](https://github.com/dbt-labs/dbt-core/issues/7258)~~ Synapse does not support CLONE)
* Revamp dbt debug ([#7104](https://github.com/dbt-labs/dbt-core/issues/7104))
* Added new adapter zone tests
- constraints
- null_compare
- validate_sql
- equals
- dbt_clone

## v.1.5.0rc1

* Support for [dbt-core 1.5](https://github.com/dbt-labs/dbt-core/releases/tag/v1.5.0)
Expand Down
2 changes: 1 addition & 1 deletion dbt/adapters/synapse/__version__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
version = "1.5.0rc1"
version = "1.6.0rc1"
2 changes: 1 addition & 1 deletion dbt/adapters/synapse/synapse_adapter.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ def render_raw_columns_constraints(cls, raw_columns: Dict[str, Dict[str, Any]])
rendered_column_constraints = []

for v in raw_columns.values():
rendered_column_constraint = [f"{v['name']} {v['data_type']}"]
rendered_column_constraint = [f"[{v['name']}] {v['data_type']}"]
for con in v.get("constraints", None):
constraint = cls._parse_column_constraint(con)
c = cls.process_parsed_constraint(constraint, cls.render_column_constraint)
Expand Down
23 changes: 18 additions & 5 deletions dbt/include/synapse/macros/adapters/relation.sql
Original file line number Diff line number Diff line change
Expand Up @@ -5,21 +5,35 @@
{% endmacro %}

{% macro synapse__drop_relation_script(relation) -%}
{% if relation.type == 'view' -%}
{% if relation.type == 'view' or relation.type == 'materialized_view' -%}
{% set object_id_type = 'V' %}
{% elif relation.type == 'table'%}
{% set object_id_type = 'U' %}
{%- else -%} invalid target name
{% endif %}

if object_id ('{{ relation.include(database=False) }}','{{ object_id_type }}') is not null
{% if relation.type == 'view' or relation.type == 'materialized_view' -%}
begin
drop view {{ relation.include(database=False) }}
end
{% elif relation.type == 'table' %}
begin
drop {{ relation.type }} {{ relation.include(database=False) }}
end
{% endif %}
{% endmacro %}


{% macro synapse__rename_relation(from_relation, to_relation) -%}
{% call statement('rename_relation') -%}
{# dbt needs this 'call' macro, but it overwrites other SQL when reused in other macros #}
{# so '_script' macro is reuseable script, for other macros to combine with more SQL #}

{% call statement('rename_relation') %}
{{ synapse__rename_relation_script(from_relation, to_relation) }}
{%- endcall %}
{% endmacro %}

{% macro synapse__rename_relation_script(from_relation, to_relation) -%}
-- drop all object types with to_relation.identifier name, to avoid error "new name already in use...duplicate...not permitted"
if object_id ('{{ to_relation.include(database=False) }}','V') is not null
begin
Expand All @@ -32,11 +46,10 @@
end

rename object {{ from_relation.include(database=False) }} to {{ to_relation.identifier }}
{%- endcall %}
{% endmacro %}

{% macro synapse__truncate_relation(relation) %}
{% call statement('truncate_relation') -%}
truncate table {{ relation }}
truncate table {{ relation }}
{%- endcall %}
{% endmacro %}
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
{% macro ref(model_name) %}

{% do return(builtins.ref(model_name).include(database=false)) %}

{% endmacro %}

{% macro synapse__get_replace_materialized_view_as_sql(relation, sql, existing_relation, backup_relation, intermediate_relation) %}
{# Synapse does not have ALTER...RENAME function, so use synapse__rename_relation_script #}

{%- set dist = config.get('dist', default="ROUND_ROBIN") -%}
EXEC('
CREATE materialized view {{ intermediate_relation.include(database=False) }}
WITH ( DISTRIBUTION = {{dist}} )
AS {{ sql }}
');

{{ synapse__rename_relation_script(existing_relation, backup_relation) }}
{{ synapse__rename_relation_script(intermediate_relation, relation) }}

{% endmacro %}

{% macro synapse__get_create_materialized_view_as_sql(relation, sql) %}
{%- set dist = config.get('dist', default="ROUND_ROBIN") -%}

CREATE materialized view {{ relation.include(database=False) }}
WITH ( DISTRIBUTION = {{dist}} )
AS {{ sql }}

{% endmacro %}
12 changes: 11 additions & 1 deletion dbt/include/synapse/macros/utils/split_part.sql
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,18 @@
We have to replace the macro from dbt-sqlserver since that one uses XML which is an unsupported data type in Synapse.
The function below is not supported in Synapse Dedicated SQL according to the documentation, but it seems to work.
#}

{% macro synapse__split_part(string_text, delimiter_text, part_number) %}

(select value from string_split({{ string_text }}, {{ delimiter_text }}, 1) where ordinal = {{ part_number }})
{% if part_number >= 0 %}

(select value from string_split({{ string_text }}, {{ delimiter_text }}, 1) where ordinal = {{ part_number }})

{% else %}

(select value from string_split({{ string_text }}, {{ delimiter_text }}, 1)
where ordinal = len(replace({{ string_text }}, {{delimiter_text}}, '')) + 1 + {{ part_number }})

{% endif %}

{% endmacro %}
2 changes: 1 addition & 1 deletion dev_requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ wheel==0.41.1
pre-commit==2.21.0;python_version<"3.8"
pre-commit==3.3.3;python_version>="3.8"
pytest-dotenv==0.5.2
dbt-tests-adapter~=1.5.9
dbt-tests-adapter~=1.6.9
aiohttp==3.8.3
azure-mgmt-synapse==2.0.0
flaky==3.7.0
Expand Down
5 changes: 2 additions & 3 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,8 @@
"Anders Swanson",
"Sam Debruyn",
]
dbt_version = "1.5"
dbt_fabric_requirement = "dbt-fabric~=1.5.0"
dbt_version = "1.6"
dbt_fabric_requirement = "dbt-fabric~=1.6.0"
description = """An Azure Synapse adapter plugin for dbt"""

this_directory = os.path.abspath(os.path.dirname(__file__))
Expand Down Expand Up @@ -83,7 +83,6 @@ def run(self):
"Operating System :: Microsoft :: Windows",
"Operating System :: MacOS :: MacOS X",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
Expand Down
3 changes: 2 additions & 1 deletion tests/functional/adapter/data/seed_model.sql
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,8 @@ create table {schema}.on_model_hook
target_pass VARCHAR(100),
target_threads INTEGER,
run_started_at VARCHAR(100),
invocation_id VARCHAR(100)
invocation_id VARCHAR(100),
thread_id VARCHAR(100)
)
WITH(
DISTRIBUTION = ROUND_ROBIN,
Expand Down
3 changes: 2 additions & 1 deletion tests/functional/adapter/data/seed_run.sql
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,8 @@ create table {schema}.on_run_hook
target_pass VARCHAR(100),
target_threads INTEGER,
run_started_at VARCHAR(100),
invocation_id VARCHAR(100)
invocation_id VARCHAR(100),
thread_id VARCHAR(100)
)
WITH(
DISTRIBUTION = ROUND_ROBIN,
Expand Down
129 changes: 127 additions & 2 deletions tests/functional/adapter/test_constraints.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,12 +12,14 @@
my_model_view_wrong_name_sql,
my_model_view_wrong_order_sql,
my_model_with_nulls_sql,
my_model_with_quoted_column_name_sql,
my_model_wrong_name_sql,
my_model_wrong_order_depends_on_fk_sql,
my_model_wrong_order_sql,
)
from dbt.tests.adapter.constraints.test_constraints import (
BaseConstraintsRuntimeDdlEnforcement,
BaseContractSqlHeader,
BaseModelConstraintsRuntimeEnforcement,
)
from dbt.tests.util import (
Expand Down Expand Up @@ -246,6 +248,77 @@
- type: not_null
"""

model_contract_header_schema_yml = """
version: 2
models:
- name: my_model_contract_sql_header
config:
contract:
enforced: true
columns:
- name: column_name
data_type: int
"""


# no current_timezone() in Synapse
my_model_contract_sql_header_sql = """
{{
config(
materialized = "table"
)
}}
{% call set_sql_header(config) %}
set session time zone 'Asia/Kolkata';
{%- endcall %}
select datepart(tzoffset, sysdatetimeoffset()) as column_name
"""

my_model_incremental_contract_sql_header_sql = """
{{
config(
materialized = "incremental",
on_schema_change="append_new_columns"
)
}}
{% call set_sql_header(config) %}
set session time zone 'Asia/Kolkata';
{%- endcall %}
select datepart(tzoffset, sysdatetimeoffset()) as column_name
"""

model_quoted_column_schema_yml = """
version: 2
models:
- name: my_model
config:
contract:
enforced: true
materialized: table
constraints:
- type: check
# this one is the on the user
expression: ("from" = 'blue')
columns: [ '"from"' ]
columns:
- name: id
data_type: integer
description: hello
constraints:
- type: not_null
tests:
- unique
- name: from # reserved word
quote: true
data_type: varchar(100)
constraints:
- type: not_null
- name: date_day
data_type: varchar(100)
"""


class BaseConstraintsColumnsEqual:
"""
Expand Down Expand Up @@ -395,7 +468,7 @@ def expected_sql(self):
if object_id <model_identifier> is not null begin drop table <model_identifier> end
exec('create view <model_identifier> as -- depends_on: <foreign_key_model_identifier>
select ''blue'' as color,1 as id,''2019-01-01'' as date_day;');
create table <model_identifier>(id int not null,color varchar(100),date_day varchar(100))
create table <model_identifier>([id] int not null,[color] varchar(100),[date_day] varchar(100))
with(distribution = round_robin,heap)
insert into <model_identifier>([id],[color],[date_day])
select [id],[color],[date_day] from <model_identifier>
Expand Down Expand Up @@ -435,7 +508,7 @@ def expected_sql(self):
if object_id <model_identifier> is not null begin drop table <model_identifier> end
exec('create view <model_identifier> as -- depends_on: <foreign_key_model_identifier>
select ''blue'' as color,1 as id,''2019-01-01'' as date_day;');
create table <model_identifier>(id int not null,color varchar(100),date_day varchar(100))
create table <model_identifier>([id] int not null,[color] varchar(100),[date_day] varchar(100))
with(distribution = round_robin,heap)
alter table <model_identifier> add constraint <model_identifier>
primary key nonclustered(id)not enforced;
Expand Down Expand Up @@ -548,6 +621,46 @@ def null_model_sql(self):
return my_model_incremental_with_nulls_sql


class BaseTableContractSqlHeader(BaseContractSqlHeader):
@pytest.fixture(scope="class")
def models(self):
return {
"my_model_contract_sql_header.sql": my_model_contract_sql_header_sql,
"constraints_schema.yml": model_contract_header_schema_yml,
}


class BaseIncrementalContractSqlHeader(BaseContractSqlHeader):
@pytest.fixture(scope="class")
def models(self):
return {
"my_model_contract_sql_header.sql": my_model_incremental_contract_sql_header_sql,
"constraints_schema.yml": model_contract_header_schema_yml,
}


class BaseConstraintQuotedColumn(BaseConstraintsRuntimeDdlEnforcement):
@pytest.fixture(scope="class")
def models(self):
return {
"my_model.sql": my_model_with_quoted_column_name_sql,
"constraints_schema.yml": model_quoted_column_schema_yml,
}

@pytest.fixture(scope="class")
def expected_sql(self):
return """
if object_id <model_identifier> is not null begin drop view <model_identifier> end
if object_id <model_identifier> is not null begin drop table <model_identifier> end
exec(\'create view <model_identifier> as select \'\'blue\'\' as "from",1 as id,\'\'2019-01-01\'\' as date_day;\');
create table <model_identifier>([id] integer not null,[from] varchar(100)not null,[date_day] varchar(100))
with(distribution = round_robin,heap)
insert into <model_identifier>([id],[from],[date_day])
select [id],[from],[date_day] from <model_identifier>
if object_id <model_identifier> is not null begin drop view <model_identifier> end
"""


class TestTableConstraintsRuntimeDdlEnforcementSynapse(BaseConstraintsRuntimeDdlEnforcement):
pass

Expand Down Expand Up @@ -580,3 +693,15 @@ class TestTableConstraintsRollbackSynapse(BaseConstraintsRollback):

class TestIncrementalConstraintsRollbackSynapse(BaseIncrementalConstraintsRollback):
pass


class TestTableContractSqlHeaderSynapse(BaseTableContractSqlHeader):
pass


class TestIncrementalContractSqlHeaderSynapse(BaseIncrementalContractSqlHeader):
pass


class TestConstraintQuotedColumnSynapse(BaseConstraintQuotedColumn):
pass
Loading

0 comments on commit 8c117ef

Please sign in to comment.