-
Notifications
You must be signed in to change notification settings - Fork 1.6k
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Fix multiple semantic models with generated metrics #10451
Changes from all commits
be06cbd
af8a7bc
495f8a9
8a3a62d
6313074
2880158
cff48c1
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,6 @@ | ||
kind: Fixes | ||
body: Fix over deletion of generated_metrics in partial parsing | ||
time: 2024-07-16T13:37:03.49651-04:00 | ||
custom: | ||
Author: gshank | ||
Issue: "10450" |
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -968,13 +968,17 @@ | |
elif unique_id in self.saved_manifest.disabled: | ||
self.delete_disabled(unique_id, schema_file.file_id) | ||
|
||
metrics = schema_file.generated_metrics.copy() | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. So here we were deleting all generated metrics instead of just the one related to a semantic model There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Correct. |
||
for unique_id in metrics: | ||
if unique_id in self.saved_manifest.metrics: | ||
self.saved_manifest.metrics.pop(unique_id) | ||
schema_file.generated_metrics.remove(unique_id) | ||
elif unique_id in self.saved_manifest.disabled: | ||
self.delete_disabled(unique_id, schema_file.file_id) | ||
if schema_file.generated_metrics: | ||
# If this partial parse file has an old "generated_metrics" list, | ||
# call code to fix it up before processing. | ||
schema_file.fix_metrics_from_measures() | ||
if semantic_model_name in schema_file.metrics_from_measures: | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Can we add a bit of comment here? |
||
for unique_id in schema_file.metrics_from_measures[semantic_model_name]: | ||
if unique_id in self.saved_manifest.metrics: | ||
self.saved_manifest.metrics.pop(unique_id) | ||
elif unique_id in self.saved_manifest.disabled: | ||
self.delete_disabled(unique_id, schema_file.file_id) | ||
del schema_file.metrics_from_measures[semantic_model_name] | ||
|
||
def delete_schema_unit_test(self, schema_file, unit_test_dict): | ||
unit_test_name = unit_test_dict["name"] | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -3,13 +3,14 @@ | |
import pytest | ||
|
||
from dbt.contracts.graph.manifest import Manifest | ||
from dbt.tests.util import write_file | ||
from dbt.tests.util import run_dbt, write_file | ||
from dbt_common.events.base_types import BaseEvent | ||
from dbt_semantic_interfaces.type_enums.time_granularity import TimeGranularity | ||
from tests.functional.assertions.test_runner import dbtTestRunner | ||
from tests.functional.semantic_models.fixtures import ( | ||
fct_revenue_sql, | ||
metricflow_time_spine_sql, | ||
multi_sm_schema_yml, | ||
schema_without_semantic_model_yml, | ||
schema_yml, | ||
) | ||
|
@@ -146,3 +147,28 @@ def test_semantic_model_flipping_create_metric_partial_parsing(self, project): | |
# Verify the metric originally created by `create_metric: true` was removed | ||
metric = result.result.metrics[generated_metric] | ||
assert metric.name == "txn_revenue" | ||
|
||
|
||
class TestSemanticModelPartialParsingGeneratedMetrics: | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Nice re-pro case!!! |
||
@pytest.fixture(scope="class") | ||
def models(self): | ||
return { | ||
"schema.yml": multi_sm_schema_yml, | ||
"fct_revenue.sql": fct_revenue_sql, | ||
"metricflow_time_spine.sql": metricflow_time_spine_sql, | ||
} | ||
|
||
def test_generated_metrics(self, project): | ||
manifest = run_dbt(["parse"]) | ||
expected = { | ||
"metric.test.simple_metric", | ||
"metric.test.txn_revenue", | ||
"metric.test.alt_txn_revenue", | ||
} | ||
assert set(manifest.metrics.keys()) == expected | ||
|
||
# change description of 'revenue' semantic model | ||
modified_schema_yml = multi_sm_schema_yml.replace("first", "FIRST") | ||
write_file(modified_schema_yml, project.project_root, "models", "schema.yml") | ||
manifest = run_dbt(["parse"]) | ||
assert set(manifest.metrics.keys()) == expected |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Is it possible to add some unittest to test this new logic added? I see the coverage is only 10%-ish.