Skip to content

Commit

Permalink
Merge pull request #127 from microsoft/v1.8_dataroots
Browse files Browse the repository at this point in the history
v1.8.0rc1
  • Loading branch information
prdpsvs authored Feb 27, 2024
2 parents a97aa2f + ce1f174 commit 8639a0a
Show file tree
Hide file tree
Showing 12 changed files with 174 additions and 87 deletions.
25 changes: 25 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,30 @@
# Changelog

### v1.8.0rc1

## Features

Supporting dbt-core 1.8.0

## Bug fixes

* Refactor relations to query from sys catalog instead of information schema causing concurrency issues when running multiple threads in parallel (https://github.com/microsoft/dbt-fabric/issues/52).

## Enhancements

[Decouple imports](https://github.com/dbt-labs/dbt-adapters/discussions/87) to common dbt core and dbt adapter interface packages for future maintainability and extensibility.

* Bump adapter packages
- from pyodbc>=4.0.35,<5.1.0" to pyodbc>=4.0.35,<5.2.0

> From now on, Apple-silicon users don't have to locally build pyodbc, because M1, M2 binaries is included in pyodbc from 5.1.0 onwards!

* Bump dev requirements
- from pytest~=7.4. to pytest~=8.0.1
- from twine~=4.0.2 to twine~=5.0.0
- from pre-commit~=3.5.0 to pre-commit~=3.6.2

### v1.7.3

## Enhancements
Expand Down
2 changes: 1 addition & 1 deletion dbt/adapters/fabric/__version__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
version = "1.7.4"
version = "1.8.0a1"
19 changes: 9 additions & 10 deletions dbt/adapters/fabric/fabric_adapter.py
Original file line number Diff line number Diff line change
@@ -1,23 +1,22 @@
from typing import List, Optional

import agate
import dbt.exceptions
import dbt_common.exceptions
from dbt.adapters.base import Column as BaseColumn

# from dbt.events.functions import fire_event
# from dbt.events.types import SchemaCreation
from dbt.adapters.base.impl import ConstraintSupport
from dbt.adapters.base.meta import available
from dbt.adapters.base.relation import BaseRelation
from dbt.adapters.cache import _make_ref_key_dict
from dbt.adapters.capability import Capability, CapabilityDict, CapabilitySupport, Support

# from dbt.adapters.cache import _make_ref_key_msg
from dbt.adapters.events.types import SchemaCreation
from dbt.adapters.sql import SQLAdapter
from dbt.adapters.sql.impl import CREATE_SCHEMA_MACRO_NAME
from dbt.contracts.graph.nodes import ColumnLevelConstraint, ConstraintType, ModelLevelConstraint
from dbt.events.functions import fire_event
from dbt.events.types import SchemaCreation
from dbt_common.contracts.constraints import (
ColumnLevelConstraint,
ConstraintType,
ModelLevelConstraint,
)
from dbt_common.events.functions import fire_event

from dbt.adapters.fabric.fabric_column import FabricColumn
from dbt.adapters.fabric.fabric_configs import FabricConfigs
Expand Down Expand Up @@ -204,7 +203,7 @@ def render_model_constraint(cls, constraint: ModelLevelConstraint) -> Optional[s
column_list = ", ".join(constraint.columns)

if constraint.name is None:
raise dbt.exceptions.DbtDatabaseError(
raise dbt_common.exceptions.DbtDatabaseError(
"Constraint name cannot be empty. Provide constraint name - column "
+ column_list
+ " and run the project again."
Expand Down
22 changes: 11 additions & 11 deletions dbt/adapters/fabric/fabric_connection_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,18 +6,18 @@
from typing import Any, Callable, Dict, Mapping, Optional, Tuple, Union

import agate
import dbt.exceptions
import dbt_common.exceptions
import pyodbc
from azure.core.credentials import AccessToken
from azure.identity import AzureCliCredential, DefaultAzureCredential, EnvironmentCredential
from dbt.adapters.contracts.connection import AdapterResponse, Connection, ConnectionState
from dbt.adapters.events.logging import AdapterLogger
from dbt.adapters.events.types import ConnectionUsed, SQLQuery, SQLQueryStatus
from dbt.adapters.sql import SQLConnectionManager
from dbt.clients.agate_helper import empty_table
from dbt.contracts.connection import AdapterResponse, Connection, ConnectionState
from dbt.events import AdapterLogger
from dbt.events.contextvars import get_node_info
from dbt.events.functions import fire_event
from dbt.events.types import ConnectionUsed, SQLQuery, SQLQueryStatus
from dbt.utils import cast_to_str
from dbt_common.clients.agate_helper import empty_table
from dbt_common.events.contextvars import get_node_info
from dbt_common.events.functions import fire_event
from dbt_common.utils.casting import cast_to_str

from dbt.adapters.fabric import __version__
from dbt.adapters.fabric.fabric_credentials import FabricCredentials
Expand Down Expand Up @@ -265,19 +265,19 @@ def exception_handler(self, sql):
except pyodbc.Error:
logger.debug("Failed to release connection!")

raise dbt.exceptions.DbtDatabaseError(str(e).strip()) from e
raise dbt_common.exceptions.DbtDatabaseError(str(e).strip()) from e

except Exception as e:
logger.debug(f"Error running SQL: {sql}")
logger.debug("Rolling back transaction.")
self.release()
if isinstance(e, dbt.exceptions.DbtRuntimeError):
if isinstance(e, dbt_common.exceptions.DbtRuntimeError):
# during a sql query, an internal to dbt exception was raised.
# this sounds a lot like a signal handler and probably has
# useful information, so raise it without modification.
raise

raise dbt.exceptions.DbtRuntimeError(e)
raise dbt_common.exceptions.DbtRuntimeError(e)

@classmethod
def open(cls, connection: Connection) -> Connection:
Expand Down
2 changes: 1 addition & 1 deletion dbt/adapters/fabric/fabric_credentials.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from dataclasses import dataclass
from typing import Optional

from dbt.contracts.connection import Credentials
from dbt.adapters.contracts.connection import Credentials


@dataclass
Expand Down
48 changes: 28 additions & 20 deletions dbt/include/fabric/macros/adapters/catalog.sql
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@

tables as (
select
object_id,
name as table_name,
schema_id as schema_id,
principal_id as principal_id,
Expand All @@ -32,6 +33,7 @@

tables_with_metadata as (
select
object_id,
table_name,
schema_name,
coalesce(tables.principal_id, schemas.principal_id) as owner_principal_id,
Expand All @@ -43,6 +45,7 @@

views as (
select
object_id,
name as table_name,
schema_id as schema_id,
principal_id as principal_id,
Expand All @@ -53,6 +56,7 @@

views_with_metadata as (
select
object_id,
table_name,
schema_name,
coalesce(views.principal_id, schemas.principal_id) as owner_principal_id,
Expand All @@ -64,6 +68,7 @@

tables_and_views as (
select
object_id,
table_name,
schema_name,
principal_name,
Expand All @@ -73,6 +78,7 @@
join principals on tables_with_metadata.owner_principal_id = principals.principal_id
union all
select
object_id,
table_name,
schema_name,
principal_name,
Expand All @@ -85,18 +91,16 @@
cols as (

select
table_catalog as table_database,
table_schema,
table_name,
column_name,
ordinal_position as column_index,
data_type as column_type
from INFORMATION_SCHEMA.COLUMNS {{ information_schema_hints() }}

c.object_id,
c.name as column_name,
c.column_id as column_index,
t.name as column_type
from sys.columns as c {{ information_schema_hints() }}
left join sys.types as t on c.system_type_id = t.system_type_id {{ information_schema_hints() }}
)

select
cols.table_database,
DB_NAME() as table_database,
tv.schema_name as table_schema,
tv.table_name,
tv.table_type,
Expand All @@ -107,7 +111,7 @@
cols.column_type,
null as column_comment
from tables_and_views tv
join cols on tv.schema_name = cols.table_schema and tv.table_name = cols.table_name
join cols on tv.object_id = cols.object_id
where ({%- for schema in schemas -%}
upper(tv.schema_name) = upper('{{ schema }}'){%- if not loop.last %} or {% endif -%}
{%- endfor -%})
Expand Down Expand Up @@ -144,6 +148,7 @@

tables as (
select
object_id,
name as table_name,
schema_id as schema_id,
principal_id as principal_id,
Expand All @@ -154,6 +159,7 @@

tables_with_metadata as (
select
object_id,
table_name,
schema_name,
coalesce(tables.principal_id, schemas.principal_id) as owner_principal_id,
Expand All @@ -165,6 +171,7 @@

views as (
select
object_id,
name as table_name,
schema_id as schema_id,
principal_id as principal_id,
Expand All @@ -175,6 +182,7 @@

views_with_metadata as (
select
object_id,
table_name,
schema_name,
coalesce(views.principal_id, schemas.principal_id) as owner_principal_id,
Expand All @@ -186,6 +194,7 @@

tables_and_views as (
select
object_id,
table_name,
schema_name,
principal_name,
Expand All @@ -195,6 +204,7 @@
join principals on tables_with_metadata.owner_principal_id = principals.principal_id
union all
select
object_id,
table_name,
schema_name,
principal_name,
Expand All @@ -207,18 +217,16 @@
cols as (

select
table_catalog as table_database,
table_schema,
table_name,
column_name,
ordinal_position as column_index,
data_type as column_type
from INFORMATION_SCHEMA.COLUMNS {{ information_schema_hints() }}

c.object_id,
c.name as column_name,
c.column_id as column_index,
t.name as column_type
from sys.columns as c {{ information_schema_hints() }}
left join sys.types as t on c.system_type_id = t.system_type_id {{ information_schema_hints() }}
)

select
cols.table_database,
DB_NAME() as table_database,
tv.schema_name as table_schema,
tv.table_name,
tv.table_type,
Expand All @@ -229,7 +237,7 @@
cols.column_type,
null as column_comment
from tables_and_views tv
join cols on tv.schema_name = cols.table_schema and tv.table_name = cols.table_name
join cols on tv.object_id = cols.object_id
where (
{%- for relation in relations -%}
{% if relation.schema and relation.identifier %}
Expand Down
62 changes: 37 additions & 25 deletions dbt/include/fabric/macros/adapters/metadata.sql
Original file line number Diff line number Diff line change
Expand Up @@ -28,37 +28,49 @@

{% macro fabric__list_relations_without_caching(schema_relation) -%}
{% call statement('list_relations_without_caching', fetch_result=True) -%}
select
table_catalog as [database],
table_name as [name],
table_schema as [schema],
case when table_type = 'BASE TABLE' then 'table'
when table_type = 'VIEW' then 'view'
else table_type
end as table_type

from INFORMATION_SCHEMA.TABLES {{ information_schema_hints() }}
where table_schema like '{{ schema_relation.schema }}'
with base as (
select
DB_NAME() as [database],
t.name as [name],
SCHEMA_NAME(t.schema_id) as [schema],
'table' as table_type
from sys.tables as t {{ information_schema_hints() }}
union all
select
DB_NAME() as [database],
v.name as [name],
SCHEMA_NAME(v.schema_id) as [schema],
'view' as table_type
from sys.views as v {{ information_schema_hints() }}
)
select * from base
where [schema] like '{{ schema_relation.schema }}'
{% endcall %}
{{ return(load_result('list_relations_without_caching').table) }}
{% endmacro %}

{% macro fabric__get_relation_without_caching(schema_relation) -%}
{% call statement('list_relations_without_caching', fetch_result=True) -%}
select
table_catalog as [database],
table_name as [name],
table_schema as [schema],
case when table_type = 'BASE TABLE' then 'table'
when table_type = 'VIEW' then 'view'
else table_type
end as table_type

from INFORMATION_SCHEMA.TABLES {{ information_schema_hints() }}
where table_schema like '{{ schema_relation.schema }}'
and table_name like '{{ schema_relation.identifier }}'
{% call statement('get_relation_without_caching', fetch_result=True) -%}
with base as (
select
DB_NAME() as [database],
t.name as [name],
SCHEMA_NAME(t.schema_id) as [schema],
'table' as table_type
from sys.tables as t {{ information_schema_hints() }}
union all
select
DB_NAME() as [database],
v.name as [name],
SCHEMA_NAME(v.schema_id) as [schema],
'view' as table_type
from sys.views as v {{ information_schema_hints() }}
)
select * from base
where [schema] like '{{ schema_relation.schema }}'
and [name] like '{{ schema_relation.identifier }}'
{% endcall %}
{{ return(load_result('list_relations_without_caching').table) }}
{{ return(load_result('get_relation_without_caching').table) }}
{% endmacro %}

{% macro fabric__get_relation_last_modified(information_schema, relations) -%}
Expand Down
14 changes: 10 additions & 4 deletions dev_requirements.txt
Original file line number Diff line number Diff line change
@@ -1,9 +1,15 @@
pytest==7.4.4
twine==4.0.2
# install latest changes in dbt-core
# TODO: how to automate switching from develop to version branches?
git+https://github.com/dbt-labs/dbt-core.git#egg=dbt-core&subdirectory=core
git+https://github.com/dbt-labs/dbt-adapters.git
git+https://github.com/dbt-labs/dbt-adapters.git#subdirectory=dbt-tests-adapter

pytest==8.0.1
twine==5.0.0
wheel==0.42
pre-commit==3.5.0
pre-commit==3.5.0;python_version<"3.9"
pre-commit==3.6.2;python_version>="3.9"
pytest-dotenv==0.5.2
dbt-tests-adapter~=1.7.4
flaky==3.7.0
pytest-xdist==3.5.0
-e .
Loading

0 comments on commit 8639a0a

Please sign in to comment.