From a5c636db592514bdb4bae6fd69383a9f73ba92af Mon Sep 17 00:00:00 2001 From: Matthew McKnight Date: Wed, 20 Mar 2024 15:18:33 -0500 Subject: [PATCH] reverting back and trying to use tables field is_dynamic --- dbt/include/snowflake/macros/catalog.sql | 43 +++++++++---------- .../adapter/dynamic_table_tests/utils.py | 22 ++++------ 2 files changed, 28 insertions(+), 37 deletions(-) diff --git a/dbt/include/snowflake/macros/catalog.sql b/dbt/include/snowflake/macros/catalog.sql index 5fd149740..e434071c3 100644 --- a/dbt/include/snowflake/macros/catalog.sql +++ b/dbt/include/snowflake/macros/catalog.sql @@ -38,41 +38,38 @@ {% macro snowflake__get_catalog_tables_sql(information_schema) -%} select - t.table_catalog as "table_database", - t.table_schema as "table_schema", - t.table_name as "table_name", - case - when tp.target_lag is not null and t.table_type = 'BASE TABLE' then 'DYNAMIC TABLE' - else t.table_type - end as "table_type", - t.comment as "table_comment", - t.table_owner as "table_owner", + table_catalog as "table_database", + table_schema as "table_schema", + table_name as "table_name", + case + when is_dynamic is not null and table_type = 'BASE TABLE' THEN 'DYNAMIC TABLE' + else table_type + end as "table_type", + comment as "table_comment", + + -- note: this is the _role_ that owns the table + table_owner as "table_owner", 'Clustering Key' as "stats:clustering_key:label", - t.clustering_key as "stats:clustering_key:value", + clustering_key as "stats:clustering_key:value", 'The key used to cluster this table' as "stats:clustering_key:description", - (t.clustering_key is not null) as "stats:clustering_key:include", + (clustering_key is not null) as "stats:clustering_key:include", 'Row Count' as "stats:row_count:label", - t.row_count as "stats:row_count:value", + row_count as "stats:row_count:value", 'An approximate count of rows in this table' as "stats:row_count:description", - (t.row_count is not null) as "stats:row_count:include", + (row_count is not null) as "stats:row_count:include", 'Approximate Size' as "stats:bytes:label", - t.bytes as "stats:bytes:value", + bytes as "stats:bytes:value", 'Approximate size of the table as reported by Snowflake' as "stats:bytes:description", - (t.bytes is not null) as "stats:bytes:include", + (bytes is not null) as "stats:bytes:include", 'Last Modified' as "stats:last_modified:label", - to_varchar(convert_timezone('UTC', t.last_altered), 'yyyy-mm-dd HH24:MI'||'UTC') as "stats:last_modified:value", + to_varchar(convert_timezone('UTC', last_altered), 'yyyy-mm-dd HH24:MI'||'UTC') as "stats:last_modified:value", 'The timestamp for last update/change' as "stats:last_modified:description", - (t.last_altered is not null and t.table_type = 'BASE TABLE') as "stats:last_modified:include" - from - {{ information_schema }}.tables t - left join - table_properties tp on t.table_catalog = tp.table_catalog - and t.table_schema = tp.table_schema - and t.table_name = tp.table_name; + (last_altered is not null and table_type='BASE TABLE') as "stats:last_modified:include" + from {{ information_schema }}.tables {%- endmacro %} diff --git a/tests/functional/adapter/dynamic_table_tests/utils.py b/tests/functional/adapter/dynamic_table_tests/utils.py index 8ef0477ac..6d79d1792 100644 --- a/tests/functional/adapter/dynamic_table_tests/utils.py +++ b/tests/functional/adapter/dynamic_table_tests/utils.py @@ -11,21 +11,15 @@ def query_relation_type(project, relation: SnowflakeRelation) -> Optional[str]: sql = f""" select case - when t.table_type = 'BASE TABLE' and dt.completion_target is not null then 'dynamic_table' - when t.table_type = 'BASE TABLE' then 'table' - when t.table_type = 'VIEW' then 'view' - when t.table_type = 'EXTERNAL TABLE' then 'external_table' + when table_type = 'BASE TABLE' and is_dynamic is not null then 'dynamic_table' + when table_type = 'BASE TABLE' then 'table' + when table_type = 'VIEW' then 'view' + when table_type = 'EXTERNAL TABLE' then 'external_table' end as relation_type - from - information_schema.tables t - left join - information_schema.dynamic_table_refresh_history dt on t.table_catalog = dt.table_catalog - and t.table_schema = dt.schema_name - and t.table_name = dt.name - where - t.table_name like '{relation.identifier.upper()}' - and t.schema_name like '{relation.schema.upper()}' - and t.table_catalog like '{relation.database.upper()}' + from information_schema.tables + where table_name like '{relation.identifier.upper()}' + and table_schema like '{relation.schema.upper()}' + and table_catalog like '{relation.database.upper()}' """ results = project.run_sql(sql, fetch="one") if results is None or len(results) == 0: