From 92c5191b375a439c5c67da96516e04c81a535013 Mon Sep 17 00:00:00 2001 From: Paul Yang Date: Thu, 30 Nov 2023 00:10:22 -0800 Subject: [PATCH] Update tests to reflect class signature changes. Tests cases need to be updated to handle signature changes to the MetricFlowQueryParser, MetricFlowQuerySpec, WhereFilterSpec. In cases where a filter needs to be created for a query, the query parser is is used to generate the appropriate loookup. --- .../builder/test_dataflow_plan_builder.py | 168 +++++------------- .../source_scan/test_source_scan_optimizer.py | 29 ++- metricflow/test/fixtures/dataflow_fixtures.py | 24 +++ metricflow/test/fixtures/model_fixtures.py | 5 - .../test_distinct_values_to_sql.py | 69 +++++++ .../test_dataflow_to_sql_plan.py | 23 ++- metricflow/test/query/test_query_parser.py | 52 +++--- .../test_cumulative_metric_rendering.py | 28 ++- .../test_derived_metric_rendering.py | 59 +++--- .../query_rendering/test_query_rendering.py | 119 +++++-------- 10 files changed, 267 insertions(+), 309 deletions(-) create mode 100644 metricflow/test/plan_conversion/dataflow_to_sql/test_distinct_values_to_sql.py diff --git a/metricflow/test/dataflow/builder/test_dataflow_plan_builder.py b/metricflow/test/dataflow/builder/test_dataflow_plan_builder.py index 2235b2b5a8..cbc9410812 100644 --- a/metricflow/test/dataflow/builder/test_dataflow_plan_builder.py +++ b/metricflow/test/dataflow/builder/test_dataflow_plan_builder.py @@ -4,13 +4,14 @@ import pytest from _pytest.fixtures import FixtureRequest -from dbt_semantic_interfaces.implementations.filters.where_filter import PydanticWhereFilter +from dbt_semantic_interfaces.naming.keywords import METRIC_TIME_ELEMENT_NAME from dbt_semantic_interfaces.type_enums.time_granularity import TimeGranularity from metricflow.dataflow.builder.dataflow_plan_builder import DataflowPlanBuilder from metricflow.dataflow.dataflow_plan_to_text import dataflow_plan_as_text from metricflow.dataset.dataset import DataSet from metricflow.errors.errors import UnableToSatisfyQueryError +from metricflow.query.query_parser import MetricFlowQueryParser from metricflow.specs.column_assoc import ColumnAssociationResolver from metricflow.specs.specs import ( DimensionSpec, @@ -20,7 +21,6 @@ OrderBySpec, TimeDimensionSpec, ) -from metricflow.specs.where_filter_transform import WhereSpecFactory from metricflow.test.dataflow_plan_to_svg import display_graph_if_requested from metricflow.test.fixtures.setup_fixtures import MetricFlowTestSessionState from metricflow.test.snapshot_utils import assert_plan_snapshot_text_equal @@ -344,28 +344,15 @@ def test_where_constrained_plan( # noqa: D mf_test_session_state: MetricFlowTestSessionState, column_association_resolver: ColumnAssociationResolver, dataflow_plan_builder: DataflowPlanBuilder, + query_parser: MetricFlowQueryParser, ) -> None: """Tests a simple plan getting a metric and a local dimension.""" - dataflow_plan = dataflow_plan_builder.build_plan( - MetricFlowQuerySpec( - metric_specs=(MetricSpec(element_name="bookings"),), - dimension_specs=( - DimensionSpec( - element_name="is_instant", - entity_links=(EntityReference("booking"),), - ), - ), - where_constraint=( - WhereSpecFactory( - column_association_resolver=column_association_resolver, - ).create_from_where_filter( - PydanticWhereFilter( - where_sql_template="{{ Dimension('listing__country_latest') }} = 'us'", - ) - ) - ), - ) + query_spec = query_parser.parse_and_validate_query( + metric_names=("bookings",), + group_by_names=("booking__is_instant",), + where_constraint_str="{{ Dimension('listing__country_latest') }} = 'us'", ) + dataflow_plan = dataflow_plan_builder.build_plan(query_spec) assert_plan_snapshot_text_equal( request=request, @@ -386,29 +373,15 @@ def test_where_constrained_plan_time_dimension( # noqa: D request: FixtureRequest, mf_test_session_state: MetricFlowTestSessionState, dataflow_plan_builder: DataflowPlanBuilder, - column_association_resolver: ColumnAssociationResolver, + query_parser: MetricFlowQueryParser, ) -> None: """Tests a simple plan getting a metric and a local dimension.""" - dataflow_plan = dataflow_plan_builder.build_plan( - MetricFlowQuerySpec( - metric_specs=(MetricSpec(element_name="bookings"),), - dimension_specs=( - DimensionSpec( - element_name="is_instant", - entity_links=(EntityReference("booking"),), - ), - ), - where_constraint=( - WhereSpecFactory( - column_association_resolver=column_association_resolver, - ).create_from_where_filter( - PydanticWhereFilter( - where_sql_template="{{ TimeDimension('metric_time', 'day') }} >= '2020-01-01'", - ) - ) - ), - ) + query_spec = query_parser.parse_and_validate_query( + metric_names=("bookings",), + group_by_names=("booking__is_instant",), + where_constraint_str="{{ TimeDimension('metric_time', 'day') }} >= '2020-01-01'", ) + dataflow_plan = dataflow_plan_builder.build_plan(query_spec) assert_plan_snapshot_text_equal( request=request, @@ -430,28 +403,15 @@ def test_where_constrained_with_common_linkable_plan( # noqa: D mf_test_session_state: MetricFlowTestSessionState, column_association_resolver: ColumnAssociationResolver, dataflow_plan_builder: DataflowPlanBuilder, + query_parser: MetricFlowQueryParser, ) -> None: """Tests a dataflow plan where the where clause has a common linkable with the query.""" - dataflow_plan = dataflow_plan_builder.build_plan( - MetricFlowQuerySpec( - metric_specs=(MetricSpec(element_name="bookings"),), - dimension_specs=( - DimensionSpec( - element_name="country_latest", - entity_links=(EntityReference(element_name="listing"),), - ), - ), - where_constraint=( - WhereSpecFactory( - column_association_resolver=column_association_resolver, - ).create_from_where_filter( - PydanticWhereFilter( - where_sql_template="{{ Dimension('listing__country_latest') }} = 'us'", - ) - ) - ), - ) + query_spec = query_parser.parse_and_validate_query( + metric_names=("bookings",), + group_by_names=("listing__country_latest",), + where_constraint_str="{{ Dimension('listing__country_latest') }} = 'us'", ) + dataflow_plan = dataflow_plan_builder.build_plan(query_spec) assert_plan_snapshot_text_equal( request=request, @@ -580,34 +540,17 @@ def test_distinct_values_plan( # noqa: D request: FixtureRequest, mf_test_session_state: MetricFlowTestSessionState, dataflow_plan_builder: DataflowPlanBuilder, - column_association_resolver: ColumnAssociationResolver, + query_parser: MetricFlowQueryParser, ) -> None: """Tests a plan to get distinct values of a dimension.""" - dataflow_plan = dataflow_plan_builder.build_plan_for_distinct_values( - query_spec=MetricFlowQuerySpec( - dimension_specs=( - DimensionSpec(element_name="country_latest", entity_links=(EntityReference(element_name="listing"),)), - ), - where_constraint=( - WhereSpecFactory( - column_association_resolver=column_association_resolver, - ).create_from_where_filter( - PydanticWhereFilter( - where_sql_template="{{ Dimension('listing__country_latest') }} = 'us'", - ) - ) - ), - order_by_specs=( - OrderBySpec( - instance_spec=DimensionSpec( - element_name="country_latest", entity_links=(EntityReference(element_name="listing"),) - ), - descending=True, - ), - ), - limit=100, - ) + query_spec = query_parser.parse_and_validate_query( + metric_names=(), + group_by_names=("listing__country_latest",), + where_constraint_str="{{ Dimension('listing__country_latest') }} = 'us'", + order_by_names=("-listing__country_latest",), + limit=100, ) + dataflow_plan = dataflow_plan_builder.build_plan_for_distinct_values(query_spec) assert_plan_snapshot_text_equal( request=request, @@ -628,35 +571,16 @@ def test_distinct_values_plan_with_join( # noqa: D request: FixtureRequest, mf_test_session_state: MetricFlowTestSessionState, dataflow_plan_builder: DataflowPlanBuilder, - column_association_resolver: ColumnAssociationResolver, + query_parser: MetricFlowQueryParser, ) -> None: """Tests a plan to get distinct values of 2 dimensions, where a join is required.""" - dataflow_plan = dataflow_plan_builder.build_plan_for_distinct_values( - query_spec=MetricFlowQuerySpec( - dimension_specs=( - DimensionSpec(element_name="home_state_latest", entity_links=(EntityReference(element_name="user"),)), - DimensionSpec(element_name="is_lux_latest", entity_links=(EntityReference(element_name="listing"),)), - ), - where_constraint=( - WhereSpecFactory( - column_association_resolver=column_association_resolver, - ).create_from_where_filter( - PydanticWhereFilter( - where_sql_template="{{ Dimension('listing__country_latest') }} = 'us'", - ) - ) - ), - order_by_specs=( - OrderBySpec( - instance_spec=DimensionSpec( - element_name="country_latest", entity_links=(EntityReference(element_name="listing"),) - ), - descending=True, - ), - ), - limit=100, - ) + query_spec = query_parser.parse_and_validate_query( + group_by_names=("user__home_state_latest", "listing__is_lux_latest"), + where_constraint_str="{{ Dimension('listing__country_latest') }} = 'us'", + order_by_names=("-listing__is_lux_latest",), + limit=100, ) + dataflow_plan = dataflow_plan_builder.build_plan_for_distinct_values(query_spec) assert_plan_snapshot_text_equal( request=request, @@ -676,16 +600,15 @@ def test_distinct_values_plan_with_join( # noqa: D def test_measure_constraint_plan( request: FixtureRequest, mf_test_session_state: MetricFlowTestSessionState, + query_parser: MetricFlowQueryParser, dataflow_plan_builder: DataflowPlanBuilder, ) -> None: """Tests a plan for querying a metric with a constraint on one or more of its input measures.""" - dataflow_plan = dataflow_plan_builder.build_plan( - MetricFlowQuerySpec( - metric_specs=(MetricSpec(element_name="lux_booking_value_rate_expr"),), - dimension_specs=(), - time_dimension_specs=(MTD_SPEC_DAY,), - ), + query_spec = query_parser.parse_and_validate_query( + metric_names=("lux_booking_value_rate_expr",), + group_by_names=(METRIC_TIME_ELEMENT_NAME,), ) + dataflow_plan = dataflow_plan_builder.build_plan(query_spec) assert_plan_snapshot_text_equal( request=request, @@ -705,16 +628,15 @@ def test_measure_constraint_plan( def test_measure_constraint_with_reused_measure_plan( request: FixtureRequest, mf_test_session_state: MetricFlowTestSessionState, + query_parser: MetricFlowQueryParser, dataflow_plan_builder: DataflowPlanBuilder, ) -> None: """Tests a plan for querying a metric with a constraint on one or more of its input measures.""" - dataflow_plan = dataflow_plan_builder.build_plan( - MetricFlowQuerySpec( - metric_specs=(MetricSpec(element_name="instant_booking_value_ratio"),), - dimension_specs=(), - time_dimension_specs=(MTD_SPEC_DAY,), - ), + query_spec = query_parser.parse_and_validate_query( + metric_names=("instant_booking_value_ratio",), + group_by_names=(METRIC_TIME_ELEMENT_NAME,), ) + dataflow_plan = dataflow_plan_builder.build_plan(query_spec) assert_plan_snapshot_text_equal( request=request, diff --git a/metricflow/test/dataflow/optimizer/source_scan/test_source_scan_optimizer.py b/metricflow/test/dataflow/optimizer/source_scan/test_source_scan_optimizer.py index e97dbffacd..c48e332b91 100644 --- a/metricflow/test/dataflow/optimizer/source_scan/test_source_scan_optimizer.py +++ b/metricflow/test/dataflow/optimizer/source_scan/test_source_scan_optimizer.py @@ -4,7 +4,7 @@ import pytest from _pytest.fixtures import FixtureRequest -from dbt_semantic_interfaces.implementations.filters.where_filter import PydanticWhereFilter +from dbt_semantic_interfaces.naming.keywords import METRIC_TIME_ELEMENT_NAME from dbt_semantic_interfaces.type_enums.time_granularity import TimeGranularity from metricflow.dataflow.builder.dataflow_plan_builder import DataflowPlanBuilder @@ -31,6 +31,7 @@ from metricflow.dataflow.dataflow_plan_to_text import dataflow_plan_as_text from metricflow.dataflow.optimizer.source_scan.source_scan_optimizer import SourceScanOptimizer from metricflow.dataset.dataset import DataSet +from metricflow.query.query_parser import MetricFlowQueryParser from metricflow.specs.column_assoc import ColumnAssociationResolver from metricflow.specs.specs import ( DimensionSpec, @@ -38,7 +39,6 @@ MetricFlowQuerySpec, MetricSpec, ) -from metricflow.specs.where_filter_transform import WhereSpecFactory from metricflow.test.dataflow_plan_to_svg import display_graph_if_requested from metricflow.test.fixtures.setup_fixtures import MetricFlowTestSessionState from metricflow.test.snapshot_utils import assert_plan_snapshot_text_equal @@ -221,31 +221,22 @@ def test_constrained_metric_not_combined( # noqa: D mf_test_session_state: MetricFlowTestSessionState, column_association_resolver: ColumnAssociationResolver, dataflow_plan_builder: DataflowPlanBuilder, + query_parser: MetricFlowQueryParser, ) -> None: """Tests that 2 metrics from the same semantic model but where 1 is constrained results in 2 scans. - If there is a constraint, need needs to be handled in a separate query because the constraint applies to all rows. + If there is a constraint for a metric, it needs to be handled in a separate query because the constraint applies to + all rows. """ + query_spec = query_parser.parse_and_validate_query( + metric_names=("booking_value", "instant_booking_value"), + group_by_names=(METRIC_TIME_ELEMENT_NAME,), + ) check_optimization( request=request, mf_test_session_state=mf_test_session_state, dataflow_plan_builder=dataflow_plan_builder, - query_spec=MetricFlowQuerySpec( - metric_specs=( - MetricSpec(element_name="booking_value"), - MetricSpec( - element_name="instant_booking_value", - constraint=( - WhereSpecFactory( - column_association_resolver=column_association_resolver, - ).create_from_where_filter( - PydanticWhereFilter(where_sql_template="{{ Dimension('booking__is_instant') }} ") - ) - ), - ), - ), - dimension_specs=(DataSet.metric_time_dimension_spec(TimeGranularity.DAY),), - ), + query_spec=query_spec, expected_num_sources_in_unoptimized=2, expected_num_sources_in_optimized=2, ) diff --git a/metricflow/test/fixtures/dataflow_fixtures.py b/metricflow/test/fixtures/dataflow_fixtures.py index eb34764f01..5f298ddd1b 100644 --- a/metricflow/test/fixtures/dataflow_fixtures.py +++ b/metricflow/test/fixtures/dataflow_fixtures.py @@ -7,6 +7,7 @@ from metricflow.plan_conversion.column_resolver import DunderColumnAssociationResolver from metricflow.plan_conversion.time_spine import TimeSpineSource from metricflow.protocols.sql_client import SqlClient +from metricflow.query.query_parser import MetricFlowQueryParser from metricflow.specs.column_assoc import ColumnAssociationResolver from metricflow.test.fixtures.model_fixtures import ConsistentIdObjectRepository from metricflow.test.fixtures.setup_fixtures import MetricFlowTestSessionState @@ -38,6 +39,18 @@ def dataflow_plan_builder( # noqa: D ) +@pytest.fixture +def query_parser( # noqa: D + simple_semantic_manifest_lookup: SemanticManifestLookup, + column_association_resolver: ColumnAssociationResolver, + consistent_id_object_repository: ConsistentIdObjectRepository, +) -> MetricFlowQueryParser: + return MetricFlowQueryParser( + column_association_resolver=column_association_resolver, + model=simple_semantic_manifest_lookup, + ) + + @pytest.fixture def extended_date_dataflow_plan_builder( # noqa: D extended_date_semantic_manifest_lookup: SemanticManifestLookup, @@ -85,6 +98,17 @@ def scd_dataflow_plan_builder( # noqa: D ) +@pytest.fixture +def scd_query_parser( # noqa: D + scd_column_association_resolver: ColumnAssociationResolver, + scd_semantic_manifest_lookup: SemanticManifestLookup, +) -> MetricFlowQueryParser: + return MetricFlowQueryParser( + column_association_resolver=scd_column_association_resolver, + model=scd_semantic_manifest_lookup, + ) + + @pytest.fixture(scope="session") def time_spine_source( # noqa: D sql_client: SqlClient, mf_test_session_state: MetricFlowTestSessionState # noqa: F811 diff --git a/metricflow/test/fixtures/model_fixtures.py b/metricflow/test/fixtures/model_fixtures.py index c5da86885f..7166d49dba 100644 --- a/metricflow/test/fixtures/model_fixtures.py +++ b/metricflow/test/fixtures/model_fixtures.py @@ -64,11 +64,6 @@ def query_parser_from_yaml(yaml_contents: List[YamlConfigFile]) -> MetricFlowQue return MetricFlowQueryParser( model=semantic_manifest_lookup, column_association_resolver=DunderColumnAssociationResolver(semantic_manifest_lookup), - read_nodes=list(_data_set_to_read_nodes(create_data_sets(semantic_manifest_lookup)).values()), - node_output_resolver=DataflowPlanNodeOutputDataSetResolver( - column_association_resolver=DunderColumnAssociationResolver(semantic_manifest_lookup), - semantic_manifest_lookup=semantic_manifest_lookup, - ), ) diff --git a/metricflow/test/plan_conversion/dataflow_to_sql/test_distinct_values_to_sql.py b/metricflow/test/plan_conversion/dataflow_to_sql/test_distinct_values_to_sql.py new file mode 100644 index 0000000000..9392944529 --- /dev/null +++ b/metricflow/test/plan_conversion/dataflow_to_sql/test_distinct_values_to_sql.py @@ -0,0 +1,69 @@ +from __future__ import annotations + +import pytest +from _pytest.fixtures import FixtureRequest +from dbt_semantic_interfaces.implementations.filters.where_filter import PydanticWhereFilter +from dbt_semantic_interfaces.references import EntityReference + +from metricflow.dataflow.builder.dataflow_plan_builder import DataflowPlanBuilder +from metricflow.plan_conversion.dataflow_to_sql import DataflowToSqlQueryPlanConverter +from metricflow.protocols.sql_client import SqlClient +from metricflow.query.query_parser import MetricFlowQueryParser +from metricflow.specs.column_assoc import ColumnAssociationResolver +from metricflow.specs.specs import DimensionSpec, MetricFlowQuerySpec +from metricflow.test.fixtures.setup_fixtures import MetricFlowTestSessionState +from metricflow.test.plan_conversion.test_dataflow_to_sql_plan import convert_and_check + + +@pytest.mark.sql_engine_snapshot +def test_dimensions_requiring_join( + request: FixtureRequest, + mf_test_session_state: MetricFlowTestSessionState, + dataflow_plan_builder: DataflowPlanBuilder, + dataflow_to_sql_converter: DataflowToSqlQueryPlanConverter, + sql_client: SqlClient, +) -> None: + """Tests querying 2 dimensions that require a join.""" + dimension_specs = ( + DimensionSpec(element_name="home_state_latest", entity_links=(EntityReference(element_name="user"),)), + DimensionSpec(element_name="is_lux_latest", entity_links=(EntityReference(element_name="listing"),)), + ) + dataflow_plan = dataflow_plan_builder.build_plan_for_distinct_values( + query_spec=MetricFlowQuerySpec(dimension_specs=dimension_specs) + ) + + convert_and_check( + request=request, + mf_test_session_state=mf_test_session_state, + dataflow_to_sql_converter=dataflow_to_sql_converter, + sql_client=sql_client, + node=dataflow_plan.sink_output_nodes[0].parent_node, + ) + + +@pytest.mark.sql_engine_snapshot +def test_dimension_values_with_a_join_and_a_filter( + request: FixtureRequest, + mf_test_session_state: MetricFlowTestSessionState, + column_association_resolver: ColumnAssociationResolver, + query_parser: MetricFlowQueryParser, + dataflow_plan_builder: DataflowPlanBuilder, + dataflow_to_sql_converter: DataflowToSqlQueryPlanConverter, + sql_client: SqlClient, +) -> None: + """Tests querying 2 dimensions that require a join and a filter.""" + query_spec = query_parser.parse_and_validate_query( + group_by_names=("user__home_state_latest", "listing__is_lux_latest"), + where_constraint=PydanticWhereFilter( + where_sql_template="{{ Dimension('user__home_state_latest') }} = 'us'", + ), + ) + dataflow_plan = dataflow_plan_builder.build_plan_for_distinct_values(query_spec) + + convert_and_check( + request=request, + mf_test_session_state=mf_test_session_state, + dataflow_to_sql_converter=dataflow_to_sql_converter, + sql_client=sql_client, + node=dataflow_plan.sink_output_nodes[0].parent_node, + ) diff --git a/metricflow/test/plan_conversion/test_dataflow_to_sql_plan.py b/metricflow/test/plan_conversion/test_dataflow_to_sql_plan.py index 0c3680a13d..567aa09eab 100644 --- a/metricflow/test/plan_conversion/test_dataflow_to_sql_plan.py +++ b/metricflow/test/plan_conversion/test_dataflow_to_sql_plan.py @@ -4,7 +4,6 @@ import pytest from _pytest.fixtures import FixtureRequest -from dbt_semantic_interfaces.implementations.filters.where_filter import PydanticWhereFilter from dbt_semantic_interfaces.implementations.metric import PydanticMetricTimeWindow from dbt_semantic_interfaces.references import EntityReference, TimeDimensionReference from dbt_semantic_interfaces.test_utils import as_datetime @@ -37,6 +36,7 @@ from metricflow.specs.specs import ( DimensionSpec, InstanceSpecSet, + LinkableSpecSet, LinklessEntitySpec, MeasureSpec, MetricFlowQuerySpec, @@ -45,9 +45,10 @@ NonAdditiveDimensionSpec, OrderBySpec, TimeDimensionSpec, + WhereFilterSpec, ) -from metricflow.specs.where_filter_transform import WhereSpecFactory from metricflow.sql.optimizer.optimization_levels import SqlQueryOptimizationLevel +from metricflow.sql.sql_bind_parameters import SqlBindParameters from metricflow.sql.sql_plan import SqlJoinType from metricflow.test.dataflow_plan_to_svg import display_graph_if_requested from metricflow.test.fixtures.model_fixtures import ConsistentIdObjectRepository @@ -182,14 +183,18 @@ def test_filter_with_where_constraint_node( # noqa: D ) # need to include ds_spec because where constraint operates on ds where_constraint_node = WhereConstraintNode( parent_node=filter_node, - where_constraint=( - WhereSpecFactory( - column_association_resolver=column_association_resolver, - ).create_from_where_filter( - PydanticWhereFilter( - where_sql_template="{{ TimeDimension('booking__ds', 'day') }} = '2020-01-01'", + where_constraint=WhereFilterSpec( + where_sql="booking__ds__day = '2020-01-01'", + bind_parameters=SqlBindParameters(), + linkable_spec_set=LinkableSpecSet( + time_dimension_specs=( + TimeDimensionSpec( + element_name="ds", + entity_links=(EntityReference(element_name="booking"),), + time_granularity=TimeGranularity.DAY, + ), ) - ) + ), ), ) diff --git a/metricflow/test/query/test_query_parser.py b/metricflow/test/query/test_query_parser.py index 20a9f01f97..b1f2a43434 100644 --- a/metricflow/test/query/test_query_parser.py +++ b/metricflow/test/query/test_query_parser.py @@ -2,7 +2,6 @@ import logging import textwrap -from collections import namedtuple import pytest from dbt_semantic_interfaces.parsing.objects import YamlConfigFile @@ -11,7 +10,6 @@ from dbt_semantic_interfaces.type_enums.date_part import DatePart from dbt_semantic_interfaces.type_enums.time_granularity import TimeGranularity -from metricflow.errors.errors import UnableToSatisfyQueryError from metricflow.filters.time_constraint import TimeRangeConstraint from metricflow.query.query_exceptions import InvalidQueryException from metricflow.query.query_parser import MetricFlowQueryParser @@ -31,7 +29,6 @@ from metricflow.test.fixtures.model_fixtures import query_parser_from_yaml from metricflow.test.model.example_project_configuration import EXAMPLE_PROJECT_CONFIGURATION_YAML_CONFIG_FILE from metricflow.test.time.metric_time_dimension import MTD -from metricflow.time.time_granularity_solver import RequestTimeGranularityException logger = logging.getLogger(__name__) @@ -223,8 +220,7 @@ def test_query_parser_case_insensitivity(bookings_query_parser: MetricFlowQueryP ) # Object params - Metric = namedtuple("Metric", ["name", "descending"]) - metric = Metric("BOOKINGS", False) + metric = MetricParameter(name="BOOKINGS") group_by = ( DimensionOrEntityParameter("BOOKING__IS_INSTANT"), DimensionOrEntityParameter("LISTING"), @@ -256,13 +252,12 @@ def test_query_parser_case_insensitivity(bookings_query_parser: MetricFlowQueryP def test_query_parser_invalid_group_by(bookings_query_parser: MetricFlowQueryParser) -> None: # noqa: D - with pytest.raises(UnableToSatisfyQueryError): + with pytest.raises(InvalidQueryException): bookings_query_parser.parse_and_validate_query(group_by_names=["random_stuff"]) def test_query_parser_with_object_params(bookings_query_parser: MetricFlowQueryParser) -> None: # noqa: D - Metric = namedtuple("Metric", ["name", "descending"]) - metric = Metric("bookings", False) + metric = MetricParameter(name="bookings") group_by = ( DimensionOrEntityParameter("booking__is_instant"), DimensionOrEntityParameter("listing"), @@ -357,8 +352,8 @@ def test_time_range_constraint_conversion() -> None: def test_parse_and_validate_where_constraint_dims(bookings_query_parser: MetricFlowQueryParser) -> None: """Test that the returned time constraint in the query spec is adjusted to match the granularity of the query.""" - # check constraint on invalid_dim raises UnableToSatisfyQueryError - with pytest.raises(UnableToSatisfyQueryError): + # check constraint on invalid_dim raises InvalidQueryException + with pytest.raises(InvalidQueryException, match="does not match any of the available"): bookings_query_parser.parse_and_validate_query( metric_names=["bookings"], group_by_names=[MTD], @@ -367,7 +362,7 @@ def test_parse_and_validate_where_constraint_dims(bookings_query_parser: MetricF where_constraint_str="{{ Dimension('booking__invalid_dim') }} = '1'", ) - with pytest.raises(InvalidQueryException): + with pytest.raises(InvalidQueryException, match="Error parsing where filter"): bookings_query_parser.parse_and_validate_query( metric_names=["bookings"], group_by_names=[MTD], @@ -394,7 +389,7 @@ def test_parse_and_validate_where_constraint_metric_time() -> None: revenue_yaml_file = YamlConfigFile(filepath="inline_for_test_2", contents=REVENUE_YAML) query_parser = query_parser_from_yaml([EXAMPLE_PROJECT_CONFIGURATION_YAML_CONFIG_FILE, revenue_yaml_file]) - with pytest.raises(RequestTimeGranularityException): + with pytest.raises(InvalidQueryException, match="does not match any of the available"): query_parser.parse_and_validate_query( metric_names=["revenue"], group_by_names=[MTD], @@ -403,12 +398,15 @@ def test_parse_and_validate_where_constraint_metric_time() -> None: def test_parse_and_validate_metric_constraint_dims() -> None: - """Test that the returned time constraint in the query spec is adjusted to match the granularity of the query.""" + """Test that the returned time constraint in the query spec is adjusted to match the granularity of the query. + + TODO: This test doesn't do what it says it does. + """ revenue_yaml_file = YamlConfigFile(filepath="inline_for_test_2", contents=REVENUE_YAML) query_parser = query_parser_from_yaml([EXAMPLE_PROJECT_CONFIGURATION_YAML_CONFIG_FILE, revenue_yaml_file]) - # check constraint on invalid_dim raises UnableToSatisfyQueryError - with pytest.raises(UnableToSatisfyQueryError): + # check constraint on invalid_dim raises InvalidQueryException + with pytest.raises(InvalidQueryException, match="given input does not match"): query_parser.parse_and_validate_query( metric_names=["metric_with_invalid_constraint"], group_by_names=[MTD], @@ -430,7 +428,7 @@ def test_cumulative_metric_no_time_dimension_validation() -> None: [EXAMPLE_PROJECT_CONFIGURATION_YAML_CONFIG_FILE, bookings_yaml_file, revenue_yaml_file, metrics_yaml_file] ) - with pytest.raises(UnableToSatisfyQueryError, match="must be queried with the dimension 'metric_time'"): + with pytest.raises(InvalidQueryException, match="do not include 'metric_time'"): query_parser.parse_and_validate_query( metric_names=["revenue_cumulative"], ) @@ -454,7 +452,7 @@ def test_cumulative_metric_wrong_time_dimension_validation() -> None: [EXAMPLE_PROJECT_CONFIGURATION_YAML_CONFIG_FILE, bookings_yaml_file, revenue_yaml_file, metrics_yaml_file] ) - with pytest.raises(UnableToSatisfyQueryError, match="must be queried with the dimension 'metric_time'"): + with pytest.raises(InvalidQueryException, match="do not include 'metric_time'"): query_parser.parse_and_validate_query( metric_names=["revenue_cumulative"], group_by_names=["company__loaded_at"], @@ -479,7 +477,7 @@ def test_cumulative_metric_agg_time_dimension_name_validation() -> None: [EXAMPLE_PROJECT_CONFIGURATION_YAML_CONFIG_FILE, bookings_yaml_file, revenue_yaml_file, metrics_yaml_file] ) - with pytest.raises(UnableToSatisfyQueryError, match="must be queried with the dimension 'metric_time'"): + with pytest.raises(InvalidQueryException, match="do not include 'metric_time'"): query_parser.parse_and_validate_query( metric_names=["revenue_cumulative"], group_by_names=["company__ds"], @@ -495,14 +493,14 @@ def test_derived_metric_query_parsing() -> None: [EXAMPLE_PROJECT_CONFIGURATION_YAML_CONFIG_FILE, bookings_yaml_file, revenue_yaml_file, metrics_yaml_file] ) # Attempt to query with no dimension - with pytest.raises(UnableToSatisfyQueryError): + with pytest.raises(InvalidQueryException, match="do not include 'metric_time'"): query_parser.parse_and_validate_query( metric_names=["revenue_sub_10"], group_by_names=[], ) # Attempt to query with non-time dimension - with pytest.raises(UnableToSatisfyQueryError): + with pytest.raises(InvalidQueryException, match="does not match any of the available"): query_parser.parse_and_validate_query( metric_names=["revenue_sub_10"], group_by_names=["country"], @@ -523,17 +521,17 @@ def test_derived_metric_with_offset_parsing() -> None: [EXAMPLE_PROJECT_CONFIGURATION_YAML_CONFIG_FILE, revenue_yaml_file, metrics_yaml_file] ) # Attempt to query with no dimension - with pytest.raises(UnableToSatisfyQueryError): + with pytest.raises(InvalidQueryException, match="do not include 'metric_time'"): query_parser.parse_and_validate_query( metric_names=["revenue_growth_2_weeks"], group_by_names=[], ) # Attempt to query with non-time dimension - with pytest.raises(UnableToSatisfyQueryError): + with pytest.raises(InvalidQueryException, match="do not include 'metric_time'"): query_parser.parse_and_validate_query( metric_names=["revenue_growth_2_weeks"], - group_by_names=["country"], + group_by_names=["company__country"], ) # Query with time dimension @@ -552,28 +550,28 @@ def test_date_part_parsing() -> None: ) # Date part is incompatible with metric's defined time granularity - with pytest.raises(RequestTimeGranularityException): + with pytest.raises(InvalidQueryException, match="does not match any of the available"): query_parser.parse_and_validate_query( metric_names=["revenue"], group_by=(TimeDimensionParameter(name="metric_time", date_part=DatePart.DOW),), ) # Can't query date part for cumulative metrics - with pytest.raises(UnableToSatisfyQueryError): + with pytest.raises(InvalidQueryException, match="does not match any of the available"): query_parser.parse_and_validate_query( metric_names=["revenue_cumulative"], group_by=(TimeDimensionParameter(name="metric_time", date_part=DatePart.YEAR),), ) # Can't query date part for metrics with offset to grain - with pytest.raises(UnableToSatisfyQueryError): + with pytest.raises(InvalidQueryException, match="does not allow group-by-items with a date part in the query"): query_parser.parse_and_validate_query( metric_names=["revenue_since_start_of_year"], group_by=(TimeDimensionParameter(name="metric_time", date_part=DatePart.MONTH),), ) # Requested granularity doesn't match resolved granularity - with pytest.raises(RequestTimeGranularityException): + with pytest.raises(InvalidQueryException, match="does not match any of the available"): query_parser.parse_and_validate_query( metric_names=["revenue"], group_by=( diff --git a/metricflow/test/query_rendering/test_cumulative_metric_rendering.py b/metricflow/test/query_rendering/test_cumulative_metric_rendering.py index 2304a8a6c1..d3e887078b 100644 --- a/metricflow/test/query_rendering/test_cumulative_metric_rendering.py +++ b/metricflow/test/query_rendering/test_cumulative_metric_rendering.py @@ -5,21 +5,21 @@ import pytest from _pytest.fixtures import FixtureRequest from dbt_semantic_interfaces.implementations.filters.where_filter import PydanticWhereFilter +from dbt_semantic_interfaces.naming.keywords import METRIC_TIME_ELEMENT_NAME from dbt_semantic_interfaces.test_utils import as_datetime from dbt_semantic_interfaces.type_enums.time_granularity import TimeGranularity from metricflow.dataflow.builder.dataflow_plan_builder import DataflowPlanBuilder -from metricflow.dataset.dataset import DataSet from metricflow.filters.time_constraint import TimeRangeConstraint from metricflow.plan_conversion.dataflow_to_sql import DataflowToSqlQueryPlanConverter from metricflow.protocols.sql_client import SqlClient +from metricflow.query.query_parser import MetricFlowQueryParser from metricflow.specs.column_assoc import ColumnAssociationResolver from metricflow.specs.specs import ( MetricFlowQuerySpec, MetricSpec, TimeDimensionSpec, ) -from metricflow.specs.where_filter_transform import WhereSpecFactory from metricflow.test.fixtures.model_fixtures import ConsistentIdObjectRepository from metricflow.test.fixtures.setup_fixtures import MetricFlowTestSessionState from metricflow.test.query_rendering.compare_rendered_query import convert_and_check @@ -104,6 +104,7 @@ def test_cumulative_metric_with_non_adjustable_time_filter( request: FixtureRequest, mf_test_session_state: MetricFlowTestSessionState, column_association_resolver: ColumnAssociationResolver, + query_parser: MetricFlowQueryParser, dataflow_plan_builder: DataflowPlanBuilder, dataflow_to_sql_converter: DataflowToSqlQueryPlanConverter, consistent_id_object_repository: ConsistentIdObjectRepository, @@ -115,22 +116,17 @@ def test_cumulative_metric_with_non_adjustable_time_filter( span of input data for a cumulative metric. When we do not have an adjustable time filter we must include all input data in order to ensure the cumulative metric is correct. """ - dataflow_plan = dataflow_plan_builder.build_plan( - MetricFlowQuerySpec( - metric_specs=(MetricSpec(element_name="every_two_days_bookers"),), - time_dimension_specs=(DataSet.metric_time_dimension_spec(time_granularity=TimeGranularity.DAY),), - where_constraint=WhereSpecFactory( - column_association_resolver=column_association_resolver, - ).create_from_where_filter( - PydanticWhereFilter( - where_sql_template=( - "{{ TimeDimension('metric_time', 'day') }} = '2020-01-03' " - "or {{ TimeDimension('metric_time', 'day') }} = '2020-01-07'" - ) - ), - ), + query_spec = query_parser.parse_and_validate_query( + metric_names=("every_two_days_bookers",), + group_by_names=(METRIC_TIME_ELEMENT_NAME,), + where_constraint=PydanticWhereFilter( + where_sql_template=( + "{{ TimeDimension('metric_time', 'day') }} = '2020-01-03' " + "or {{ TimeDimension('metric_time', 'day') }} = '2020-01-07'" + ) ), ) + dataflow_plan = dataflow_plan_builder.build_plan(query_spec) convert_and_check( request=request, diff --git a/metricflow/test/query_rendering/test_derived_metric_rendering.py b/metricflow/test/query_rendering/test_derived_metric_rendering.py index e2f8f1a7d2..933328b80d 100644 --- a/metricflow/test/query_rendering/test_derived_metric_rendering.py +++ b/metricflow/test/query_rendering/test_derived_metric_rendering.py @@ -7,17 +7,18 @@ import pytest from _pytest.fixtures import FixtureRequest from dbt_semantic_interfaces.implementations.filters.where_filter import PydanticWhereFilter +from dbt_semantic_interfaces.naming.keywords import METRIC_TIME_ELEMENT_NAME from metricflow.dataflow.builder.dataflow_plan_builder import DataflowPlanBuilder from metricflow.filters.time_constraint import TimeRangeConstraint from metricflow.plan_conversion.dataflow_to_sql import DataflowToSqlQueryPlanConverter from metricflow.protocols.sql_client import SqlClient +from metricflow.query.query_parser import MetricFlowQueryParser from metricflow.specs.column_assoc import ColumnAssociationResolver from metricflow.specs.specs import ( MetricFlowQuerySpec, MetricSpec, ) -from metricflow.specs.where_filter_transform import WhereSpecFactory from metricflow.test.fixtures.setup_fixtures import MetricFlowTestSessionState from metricflow.test.query_rendering.compare_rendered_query import convert_and_check from metricflow.test.time.metric_time_dimension import ( @@ -106,26 +107,22 @@ def test_derived_metric_with_offset_window_and_time_filter( # noqa: D request: FixtureRequest, mf_test_session_state: MetricFlowTestSessionState, column_association_resolver: ColumnAssociationResolver, + query_parser: MetricFlowQueryParser, dataflow_plan_builder: DataflowPlanBuilder, dataflow_to_sql_converter: DataflowToSqlQueryPlanConverter, sql_client: SqlClient, ) -> None: - dataflow_plan = dataflow_plan_builder.build_plan( - MetricFlowQuerySpec( - metric_specs=(MetricSpec(element_name="bookings_growth_2_weeks"),), - time_dimension_specs=(MTD_SPEC_DAY,), - where_constraint=WhereSpecFactory( - column_association_resolver=column_association_resolver, - ).create_from_where_filter( - PydanticWhereFilter( - where_sql_template=( - "{{ TimeDimension('metric_time', 'day') }} = '2020-01-01' " - "or {{ TimeDimension('metric_time', 'day') }} = '2020-01-14'" - ) - ), - ), - ) - ) + query_spec = query_parser.parse_and_validate_query( + metric_names=("bookings_growth_2_weeks",), + group_by_names=(METRIC_TIME_ELEMENT_NAME,), + where_constraint=PydanticWhereFilter( + where_sql_template=( + "{{ TimeDimension('metric_time', 'day') }} = '2020-01-01' " + "or {{ TimeDimension('metric_time', 'day') }} = '2020-01-14'" + ) + ), + ) + dataflow_plan = dataflow_plan_builder.build_plan(query_spec) convert_and_check( request=request, @@ -382,28 +379,24 @@ def test_nested_derived_metric_with_offset_multiple_input_metrics( # noqa: D def test_nested_offsets_with_where_constraint( # noqa: D request: FixtureRequest, mf_test_session_state: MetricFlowTestSessionState, + query_parser: MetricFlowQueryParser, dataflow_plan_builder: DataflowPlanBuilder, dataflow_to_sql_converter: DataflowToSqlQueryPlanConverter, sql_client: SqlClient, column_association_resolver: ColumnAssociationResolver, create_source_tables: bool, ) -> None: - dataflow_plan = dataflow_plan_builder.build_plan( - query_spec=MetricFlowQuerySpec( - metric_specs=(MetricSpec(element_name="bookings_offset_twice"),), - time_dimension_specs=(MTD_SPEC_DAY,), - where_constraint=WhereSpecFactory( - column_association_resolver=column_association_resolver, - ).create_from_where_filter( - PydanticWhereFilter( - where_sql_template=( - "{{ TimeDimension('metric_time', 'day') }} = '2020-01-12' " - "or {{ TimeDimension('metric_time', 'day') }} = '2020-01-13'" - ) - ), - ), - ) - ) + query_spec = query_parser.parse_and_validate_query( + metric_names=("bookings_offset_twice",), + group_by_names=(METRIC_TIME_ELEMENT_NAME,), + where_constraint=PydanticWhereFilter( + where_sql_template=( + "{{ TimeDimension('metric_time', 'day') }} = '2020-01-12' " + "or {{ TimeDimension('metric_time', 'day') }} = '2020-01-13'" + ) + ), + ) + dataflow_plan = dataflow_plan_builder.build_plan(query_spec) convert_and_check( request=request, diff --git a/metricflow/test/query_rendering/test_query_rendering.py b/metricflow/test/query_rendering/test_query_rendering.py index e9684993b0..e6318a13d0 100644 --- a/metricflow/test/query_rendering/test_query_rendering.py +++ b/metricflow/test/query_rendering/test_query_rendering.py @@ -10,6 +10,7 @@ import pytest from _pytest.fixtures import FixtureRequest from dbt_semantic_interfaces.implementations.filters.where_filter import PydanticWhereFilter +from dbt_semantic_interfaces.naming.keywords import METRIC_TIME_ELEMENT_NAME from dbt_semantic_interfaces.references import EntityReference from dbt_semantic_interfaces.test_utils import as_datetime from dbt_semantic_interfaces.type_enums.time_granularity import TimeGranularity @@ -21,15 +22,14 @@ from metricflow.plan_conversion.column_resolver import DunderColumnAssociationResolver from metricflow.plan_conversion.dataflow_to_sql import DataflowToSqlQueryPlanConverter from metricflow.protocols.sql_client import SqlClient +from metricflow.query.query_parser import MetricFlowQueryParser from metricflow.specs.column_assoc import ColumnAssociationResolver from metricflow.specs.specs import ( DimensionSpec, MetricFlowQuerySpec, MetricSpec, - OrderBySpec, TimeDimensionSpec, ) -from metricflow.specs.where_filter_transform import WhereSpecFactory from metricflow.test.fixtures.model_fixtures import ConsistentIdObjectRepository from metricflow.test.fixtures.setup_fixtures import MetricFlowTestSessionState from metricflow.test.query_rendering.compare_rendered_query import convert_and_check @@ -95,31 +95,20 @@ def test_filter_with_where_constraint_on_join_dim( mf_test_session_state: MetricFlowTestSessionState, column_association_resolver: ColumnAssociationResolver, dataflow_plan_builder: DataflowPlanBuilder, + query_parser: MetricFlowQueryParser, dataflow_to_sql_converter: DataflowToSqlQueryPlanConverter, consistent_id_object_repository: ConsistentIdObjectRepository, sql_client: SqlClient, ) -> None: """Tests converting a dataflow plan to a SQL query plan where there is a join between 1 measure and 2 dimensions.""" - dataflow_plan = dataflow_plan_builder.build_plan( - MetricFlowQuerySpec( - metric_specs=(MetricSpec(element_name="bookings"),), - dimension_specs=( - DimensionSpec( - element_name="is_instant", - entity_links=(), - ), - ), - where_constraint=( - WhereSpecFactory( - column_association_resolver=column_association_resolver, - ).create_from_where_filter( - PydanticWhereFilter( - where_sql_template="{{ Dimension('listing__country_latest') }} = 'us'", - ) - ) - ), - ) + query_spec = query_parser.parse_and_validate_query( + metric_names=("bookings",), + group_by_names=("booking__is_instant",), + where_constraint=PydanticWhereFilter( + where_sql_template="{{ Dimension('listing__country_latest') }} = 'us'", + ), ) + dataflow_plan = dataflow_plan_builder.build_plan(query_spec) convert_and_check( request=request, @@ -196,37 +185,22 @@ def test_limit_rows( # noqa: D def test_distinct_values( # noqa: D request: FixtureRequest, mf_test_session_state: MetricFlowTestSessionState, + query_parser: MetricFlowQueryParser, dataflow_plan_builder: DataflowPlanBuilder, dataflow_to_sql_converter: DataflowToSqlQueryPlanConverter, column_association_resolver: ColumnAssociationResolver, sql_client: SqlClient, ) -> None: """Tests a plan to get distinct values for a dimension.""" - dataflow_plan = dataflow_plan_builder.build_plan_for_distinct_values( - query_spec=MetricFlowQuerySpec( - dimension_specs=( - DimensionSpec(element_name="country_latest", entity_links=(EntityReference(element_name="listing"),)), - ), - where_constraint=( - WhereSpecFactory( - column_association_resolver=column_association_resolver, - ).create_from_where_filter( - PydanticWhereFilter( - where_sql_template="{{ Dimension('listing__country_latest') }} = 'us'", - ) - ) - ), - order_by_specs=( - OrderBySpec( - instance_spec=DimensionSpec( - element_name="country_latest", entity_links=(EntityReference(element_name="listing"),) - ), - descending=True, - ), - ), - limit=100, - ) + query_spec = query_parser.parse_and_validate_query( + group_by_names=("listing__country_latest",), + order_by_names=("-listing__country_latest",), + where_constraint=PydanticWhereFilter( + where_sql_template="{{ Dimension('listing__country_latest') }} = 'us'", + ), + limit=100, ) + dataflow_plan = dataflow_plan_builder.build_plan_for_distinct_values(query_spec) convert_and_check( request=request, @@ -270,16 +244,16 @@ def test_local_dimension_using_local_entity( # noqa: D def test_measure_constraint( # noqa: D request: FixtureRequest, mf_test_session_state: MetricFlowTestSessionState, + query_parser: MetricFlowQueryParser, dataflow_plan_builder: DataflowPlanBuilder, dataflow_to_sql_converter: DataflowToSqlQueryPlanConverter, sql_client: SqlClient, ) -> None: - dataflow_plan = dataflow_plan_builder.build_plan( - MetricFlowQuerySpec( - metric_specs=(MetricSpec(element_name="lux_booking_value_rate_expr"),), - time_dimension_specs=(MTD_SPEC_DAY,), - ) + query_spec = query_parser.parse_and_validate_query( + metric_names=("lux_booking_value_rate_expr",), + group_by_names=(MTD_SPEC_DAY.qualified_name,), ) + dataflow_plan = dataflow_plan_builder.build_plan(query_spec) convert_and_check( request=request, @@ -294,16 +268,16 @@ def test_measure_constraint( # noqa: D def test_measure_constraint_with_reused_measure( # noqa: D request: FixtureRequest, mf_test_session_state: MetricFlowTestSessionState, + query_parser: MetricFlowQueryParser, dataflow_plan_builder: DataflowPlanBuilder, dataflow_to_sql_converter: DataflowToSqlQueryPlanConverter, sql_client: SqlClient, ) -> None: - dataflow_plan = dataflow_plan_builder.build_plan( - MetricFlowQuerySpec( - metric_specs=(MetricSpec(element_name="instant_booking_value_ratio"),), - time_dimension_specs=(MTD_SPEC_DAY,), - ) + query_spec = query_parser.parse_and_validate_query( + metric_names=("instant_booking_value_ratio",), + group_by_names=(MTD_SPEC_DAY.qualified_name,), ) + dataflow_plan = dataflow_plan_builder.build_plan(query_spec) convert_and_check( request=request, @@ -318,17 +292,18 @@ def test_measure_constraint_with_reused_measure( # noqa: D def test_measure_constraint_with_single_expr_and_alias( # noqa: D request: FixtureRequest, mf_test_session_state: MetricFlowTestSessionState, + query_parser: MetricFlowQueryParser, dataflow_plan_builder: DataflowPlanBuilder, dataflow_to_sql_converter: DataflowToSqlQueryPlanConverter, sql_client: SqlClient, ) -> None: - dataflow_plan = dataflow_plan_builder.build_plan( - MetricFlowQuerySpec( - metric_specs=(MetricSpec(element_name="double_counted_delayed_bookings"),), - time_dimension_specs=(MTD_SPEC_DAY,), - ) + query_spec = query_parser.parse_and_validate_query( + metric_names=("double_counted_delayed_bookings",), + group_by_names=(MTD_SPEC_DAY.qualified_name,), ) + dataflow_plan = dataflow_plan_builder.build_plan(query_spec) + convert_and_check( request=request, mf_test_session_state=mf_test_session_state, @@ -343,30 +318,20 @@ def test_join_to_scd_dimension( request: FixtureRequest, mf_test_session_state: MetricFlowTestSessionState, scd_column_association_resolver: ColumnAssociationResolver, + scd_query_parser: MetricFlowQueryParser, scd_dataflow_plan_builder: DataflowPlanBuilder, scd_dataflow_to_sql_converter: DataflowToSqlQueryPlanConverter, sql_client: SqlClient, ) -> None: """Tests conversion of a plan using a dimension with a validity window inside a measure constraint.""" - dataflow_plan = scd_dataflow_plan_builder.build_plan( - MetricFlowQuerySpec( - metric_specs=( - MetricSpec( - element_name="family_bookings", - constraint=( - WhereSpecFactory( - column_association_resolver=scd_column_association_resolver, - ).create_from_where_filter( - PydanticWhereFilter( - where_sql_template="{{ Dimension('listing__capacity') }} > 2", - ) - ) - ), - ), - ), - time_dimension_specs=(MTD_SPEC_DAY,), + query_spec = scd_query_parser.parse_and_validate_query( + metric_names=("family_bookings",), + group_by_names=(METRIC_TIME_ELEMENT_NAME,), + where_constraint=PydanticWhereFilter( + where_sql_template="{{ Dimension('listing__capacity') }} > 2", ), ) + dataflow_plan = scd_dataflow_plan_builder.build_plan(query_spec) convert_and_check( request=request,