Skip to content

Commit

Permalink
Consolidate date macros into timestamps.sql (#5838) (#5985)
Browse files Browse the repository at this point in the history
* Consolidate date macros into dates.sql

* rename to timestamps.sql

* fix whitespace + add changie

* cleanup macros and add testing

* fix whitespace

* remove now macro

* fix functional test

* remove local config

* make snowflake backwards compat return utc

* move timestamps to adaptor base tests

* move backcompat macros to respective adapters

* change timestamp param to source_timestamp

* move timestamps.py to utils

* update changie.yaml

* make expected schema a fixture

* formatting

* add debug message to assert

* fix changie.yaml

* Update tests/adapter/dbt/tests/adapter/utils/test_timestamps.py

Co-authored-by: Doug Beatty <[email protected]>

* Update plugins/postgres/dbt/include/postgres/macros/timestamps.sql

Co-authored-by: Doug Beatty <[email protected]>

* Update .changie.yaml

* add backcompat utc

* remove current_timestamp_in_utc

* remove convert_timezone

* add _in_utc_backcompat

* fix macro_calls typo

* add expected sql validation to test_timestamps

* make expected_sql optional

* improve sql check string comparison test

* remove extraneous test file

* add timestamp casting back

* Update plugins/postgres/dbt/include/postgres/macros/adapters.sql

Co-authored-by: Doug Beatty <[email protected]>

* add check_relation_has_expected_schema to comments

* fix whitespace

* remove default impl of current_timestamp

Co-authored-by: Doug Beatty <[email protected]>
(cherry picked from commit a79960f)

Co-authored-by: colin-rogers-dbt <[email protected]>
Co-authored-by: leahwicz <[email protected]>
  • Loading branch information
3 people authored Oct 3, 2022
1 parent 2584465 commit 6042469
Show file tree
Hide file tree
Showing 11 changed files with 144 additions and 44 deletions.
7 changes: 7 additions & 0 deletions .changes/unreleased/Features-20220914-095625.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
kind: Features
body: Add standard timestamps.sql
time: 2022-09-14T09:56:25.97818-07:00
custom:
Author: colin-rogers-dbt
Issue: "5521"
PR: "5838"
2 changes: 1 addition & 1 deletion .changie.yaml
100755 → 100644
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ custom:
footerFormat: |
{{- $contributorDict := dict }}
{{- /* any names added to this list should be all lowercase for later matching purposes */}}
{{- $core_team := list "peterallenwebb" "emmyoop" "nathaniel-may" "gshank" "leahwicz" "chenyulinx" "stu-k" "iknox-fa" "versusfacit" "mcknight-42" "jtcohen6" "dependabot[bot]" "snyk-bot" }}
{{- $core_team := list "peterallenwebb" "emmyoop" "nathaniel-may" "gshank" "leahwicz" "chenyulinx" "stu-k" "iknox-fa" "versusfacit" "mcknight-42" "jtcohen6" "dependabot[bot]" "snyk-bot" "colin-rogers-dbt" }}
{{- range $change := .Changes }}
{{- $authorList := splitList " " $change.Custom.Author }}
{{- /* loop through all authors for a PR */}}
Expand Down
2 changes: 1 addition & 1 deletion core/dbt/clients/jinja_static.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ def statically_extract_macro_calls(string, ctx, db_wrapper=None):
if hasattr(func_call, "node") and hasattr(func_call.node, "name"):
func_name = func_call.node.name
else:
# func_call for dbt_utils.current_timestamp macro
# func_call for dbt.current_timestamp macro
# Call(
# node=Getattr(
# node=Name(
Expand Down
10 changes: 0 additions & 10 deletions core/dbt/include/global_project/macros/adapters/freshness.sql
Original file line number Diff line number Diff line change
@@ -1,13 +1,3 @@
{% macro current_timestamp() -%}
{{ adapter.dispatch('current_timestamp', 'dbt')() }}
{%- endmacro %}

{% macro default__current_timestamp() -%}
{{ exceptions.raise_not_implemented(
'current_timestamp macro not implemented for adapter '+adapter.type()) }}
{%- endmacro %}


{% macro collect_freshness(source, loaded_at_field, filter) %}
{{ return(adapter.dispatch('collect_freshness', 'dbt')(source, loaded_at_field, filter))}}
{% endmacro %}
Expand Down
44 changes: 44 additions & 0 deletions core/dbt/include/global_project/macros/adapters/timestamps.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
{%- macro current_timestamp() -%}
{{ adapter.dispatch('current_timestamp', 'dbt')() }}
{%- endmacro -%}

{% macro default__current_timestamp() -%}
{{ exceptions.raise_not_implemented(
'current_timestamp macro not implemented for adapter ' + adapter.type()) }}
{%- endmacro %}

{%- macro snapshot_get_time() -%}
{{ adapter.dispatch('snapshot_get_time', 'dbt')() }}
{%- endmacro -%}

{% macro default__snapshot_get_time() %}
{{ current_timestamp() }}
{% endmacro %}

---------------------------------------------

/* {#
DEPRECATED: DO NOT USE IN NEW PROJECTS
This is ONLY to handle the fact that Snowflake + Postgres had functionally
different implementations of {{ dbt.current_timestamp }} + {{ dbt_utils.current_timestamp }}
If you had a project or package that called {{ dbt_utils.current_timestamp() }}, you should
continue to use this macro to guarantee identical behavior on those two databases.
#} */

{% macro current_timestamp_backcompat() %}
{{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}
{% endmacro %}

{% macro default__current_timestamp_backcompat() %}
current_timestamp::timestamp
{% endmacro %}

{% macro current_timestamp_in_utc_backcompat() %}
{{ return(adapter.dispatch('current_timestamp_in_utc_backcompat', 'dbt')()) }}
{% endmacro %}

{% macro default__current_timestamp_in_utc_backcompat() %}
{{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}
{% endmacro %}
Original file line number Diff line number Diff line change
Expand Up @@ -46,19 +46,6 @@
{%- endfor -%})
{%- endmacro %}


{#
Get the current time cross-db
#}
{% macro snapshot_get_time() -%}
{{ adapter.dispatch('snapshot_get_time', 'dbt')() }}
{%- endmacro %}

{% macro default__snapshot_get_time() -%}
{{ current_timestamp() }}
{%- endmacro %}


{#
Core strategy definitions
#}
Expand Down
14 changes: 13 additions & 1 deletion core/dbt/tests/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import json
import warnings
from datetime import datetime
from typing import List
from typing import Dict, List
from contextlib import contextmanager
from dbt.adapters.factory import Adapter

Expand Down Expand Up @@ -35,6 +35,7 @@
# relation_from_name
# check_relation_types (table/view)
# check_relations_equal
# check_relation_has_expected_schema
# check_relations_equal_with_relations
# check_table_does_exist
# check_table_does_not_exist
Expand Down Expand Up @@ -321,6 +322,17 @@ def check_relations_equal(adapter, relation_names: List, compare_snapshot_cols=F
)


# Used to check that a particular relation has an expected schema
# expected_schema should look like {"column_name": "expected datatype"}
def check_relation_has_expected_schema(adapter, relation_name, expected_schema: Dict):
relation = relation_from_name(adapter, relation_name)
with get_connection(adapter):
actual_columns = {c.name: c.data_type for c in adapter.get_columns_in_relation(relation)}
assert (
actual_columns == expected_schema
), f"Actual schema did not match expected, actual: {json.dumps(actual_columns)}"


# This can be used when checking relations in different schemas, by supplying
# a list of relations. Called by 'check_relations_equal'.
# Uses:
Expand Down
17 changes: 1 addition & 16 deletions plugins/postgres/dbt/include/postgres/macros/adapters.sql
Original file line number Diff line number Diff line change
Expand Up @@ -117,23 +117,8 @@
{{ return(load_result('check_schema_exists').table) }}
{% endmacro %}


{% macro postgres__current_timestamp() -%}
now()
{%- endmacro %}

{% macro postgres__snapshot_string_as_time(timestamp) -%}
{%- set result = "'" ~ timestamp ~ "'::timestamp without time zone" -%}
{{ return(result) }}
{%- endmacro %}


{% macro postgres__snapshot_get_time() -%}
{{ current_timestamp() }}::timestamp without time zone
{%- endmacro %}

{#
Postgres tables have a maximum length off 63 characters, anything longer is silently truncated.
Postgres tables have a maximum length of 63 characters, anything longer is silently truncated.
Temp and backup relations add a lot of extra characters to the end of table names to ensure uniqueness.
To prevent this going over the character limit, the base_relation name is truncated to ensure
that name + suffix + uniquestring is < 63 characters.
Expand Down
20 changes: 20 additions & 0 deletions plugins/postgres/dbt/include/postgres/macros/timestamps.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
{% macro postgres__current_timestamp() -%}
now()
{%- endmacro %}

{% macro postgres__snapshot_string_as_time(timestamp) -%}
{%- set result = "'" ~ timestamp ~ "'::timestamp without time zone" -%}
{{ return(result) }}
{%- endmacro %}

{% macro postgres__snapshot_get_time() -%}
{{ current_timestamp() }}::timestamp without time zone
{%- endmacro %}

{% macro postgres__current_timestamp_backcompat() %}
current_timestamp::{{ type_timestamp() }}
{% endmacro %}

{% macro postgres__current_timestamp_in_utc_backcompat() %}
(current_timestamp at time zone 'utc')::{{ type_timestamp() }}
{% endmacro %}
4 changes: 2 additions & 2 deletions test/unit/test_macro_calls.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ def setUp(self):
*
from {{ model }} )
{% endmacro %}""",
"{% macro test_my_test(model) %} select {{ dbt_utils.current_timestamp() }} {% endmacro %}",
"{% macro test_my_test(model) %} select {{ current_timestamp_backcompat() }} {% endmacro %}",
"{% macro some_test(model) -%} {{ return(adapter.dispatch('test_some_kind4', 'foo_utils4')) }} {%- endmacro %}",
"{% macro some_test(model) -%} {{ return(adapter.dispatch('test_some_kind5', macro_namespace = 'foo_utils5')) }} {%- endmacro %}",
]
Expand All @@ -34,7 +34,7 @@ def setUp(self):
['get_snapshot_unique_id'],
['get_columns_in_query'],
['get_snapshot_unique_id'],
['dbt_utils.current_timestamp'],
['current_timestamp_backcompat'],
['test_some_kind4', 'foo_utils4.test_some_kind4'],
['test_some_kind5', 'foo_utils5.test_some_kind5'],
]
Expand Down
55 changes: 55 additions & 0 deletions tests/adapter/dbt/tests/adapter/utils/test_timestamps.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
import pytest
import re
from dbt.tests.util import check_relation_has_expected_schema, run_dbt

_MODEL_CURRENT_TIMESTAMP = """
select {{ current_timestamp() }} as current_timestamp,
{{ current_timestamp_in_utc_backcompat() }} as current_timestamp_in_utc_backcompat,
{{ current_timestamp_backcompat() }} as current_timestamp_backcompat
"""

_MODEL_EXPECTED_SQL = """
select now() as current_timestamp,
(current_timestamp at time zone 'utc')::TIMESTAMP as current_timestamp_in_utc_backcompat,
current_timestamp::TIMESTAMP as current_timestamp_backcompat
"""


class BaseCurrentTimestamps:
@pytest.fixture(scope="class")
def models(self):
return {"get_current_timestamp.sql": _MODEL_CURRENT_TIMESTAMP}

# any adapters that don't want to check can set expected schema to None
@pytest.fixture(scope="class")
def expected_sql(self):
return _MODEL_EXPECTED_SQL

@pytest.fixture(scope="class")
def expected_schema(self):
return {
"current_timestamp": "timestamp with time zone",
"current_timestamp_in_utc_backcompat": "timestamp without time zone",
"current_timestamp_backcompat": "timestamp without time zone",
}

def test_current_timestamps(self, project, models, expected_schema, expected_sql):
results = run_dbt(["run"])
assert len(results) == 1
check_relation_has_expected_schema(
project.adapter,
relation_name="get_current_timestamp",
expected_schema=expected_schema,
)

if expected_sql:
generated_sql = results.results[0].node.compiled_code
generated_sql_check = re.sub(r"\s+", "", generated_sql).lower()
expected_sql_check = re.sub(r"\s+", "", expected_sql).lower()
assert (
expected_sql_check == generated_sql_check
), f"generated sql did not match expected: {generated_sql}"


class TestCurrentTimestamps(BaseCurrentTimestamps):
pass

0 comments on commit 6042469

Please sign in to comment.