Skip to content

Commit

Permalink
reverting back and trying to use tables field is_dynamic
Browse files Browse the repository at this point in the history
  • Loading branch information
McKnight-42 committed Mar 20, 2024
1 parent 86e986b commit a5c636d
Show file tree
Hide file tree
Showing 2 changed files with 28 additions and 37 deletions.
43 changes: 20 additions & 23 deletions dbt/include/snowflake/macros/catalog.sql
Original file line number Diff line number Diff line change
Expand Up @@ -38,41 +38,38 @@

{% macro snowflake__get_catalog_tables_sql(information_schema) -%}
select
t.table_catalog as "table_database",
t.table_schema as "table_schema",
t.table_name as "table_name",
case
when tp.target_lag is not null and t.table_type = 'BASE TABLE' then 'DYNAMIC TABLE'
else t.table_type
end as "table_type",
t.comment as "table_comment",
t.table_owner as "table_owner",
table_catalog as "table_database",
table_schema as "table_schema",
table_name as "table_name",
case
when is_dynamic is not null and table_type = 'BASE TABLE' THEN 'DYNAMIC TABLE'
else table_type
end as "table_type",
comment as "table_comment",

-- note: this is the _role_ that owns the table
table_owner as "table_owner",

'Clustering Key' as "stats:clustering_key:label",
t.clustering_key as "stats:clustering_key:value",
clustering_key as "stats:clustering_key:value",
'The key used to cluster this table' as "stats:clustering_key:description",
(t.clustering_key is not null) as "stats:clustering_key:include",
(clustering_key is not null) as "stats:clustering_key:include",

'Row Count' as "stats:row_count:label",
t.row_count as "stats:row_count:value",
row_count as "stats:row_count:value",
'An approximate count of rows in this table' as "stats:row_count:description",
(t.row_count is not null) as "stats:row_count:include",
(row_count is not null) as "stats:row_count:include",

'Approximate Size' as "stats:bytes:label",
t.bytes as "stats:bytes:value",
bytes as "stats:bytes:value",
'Approximate size of the table as reported by Snowflake' as "stats:bytes:description",
(t.bytes is not null) as "stats:bytes:include",
(bytes is not null) as "stats:bytes:include",

'Last Modified' as "stats:last_modified:label",
to_varchar(convert_timezone('UTC', t.last_altered), 'yyyy-mm-dd HH24:MI'||'UTC') as "stats:last_modified:value",
to_varchar(convert_timezone('UTC', last_altered), 'yyyy-mm-dd HH24:MI'||'UTC') as "stats:last_modified:value",
'The timestamp for last update/change' as "stats:last_modified:description",
(t.last_altered is not null and t.table_type = 'BASE TABLE') as "stats:last_modified:include"
from
{{ information_schema }}.tables t
left join
table_properties tp on t.table_catalog = tp.table_catalog
and t.table_schema = tp.table_schema
and t.table_name = tp.table_name;
(last_altered is not null and table_type='BASE TABLE') as "stats:last_modified:include"
from {{ information_schema }}.tables
{%- endmacro %}


Expand Down
22 changes: 8 additions & 14 deletions tests/functional/adapter/dynamic_table_tests/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,21 +11,15 @@ def query_relation_type(project, relation: SnowflakeRelation) -> Optional[str]:
sql = f"""
select
case
when t.table_type = 'BASE TABLE' and dt.completion_target is not null then 'dynamic_table'
when t.table_type = 'BASE TABLE' then 'table'
when t.table_type = 'VIEW' then 'view'
when t.table_type = 'EXTERNAL TABLE' then 'external_table'
when table_type = 'BASE TABLE' and is_dynamic is not null then 'dynamic_table'
when table_type = 'BASE TABLE' then 'table'
when table_type = 'VIEW' then 'view'
when table_type = 'EXTERNAL TABLE' then 'external_table'
end as relation_type
from
information_schema.tables t
left join
information_schema.dynamic_table_refresh_history dt on t.table_catalog = dt.table_catalog
and t.table_schema = dt.schema_name
and t.table_name = dt.name
where
t.table_name like '{relation.identifier.upper()}'
and t.schema_name like '{relation.schema.upper()}'
and t.table_catalog like '{relation.database.upper()}'
from information_schema.tables
where table_name like '{relation.identifier.upper()}'
and table_schema like '{relation.schema.upper()}'
and table_catalog like '{relation.database.upper()}'
"""
results = project.run_sql(sql, fetch="one")
if results is None or len(results) == 0:
Expand Down

0 comments on commit a5c636d

Please sign in to comment.