diff --git a/metricflow/test/plan_conversion/test_dataflow_to_sql_plan.py b/metricflow/test/plan_conversion/test_dataflow_to_sql_plan.py index 586ae959f3..4c8d80ac6b 100644 --- a/metricflow/test/plan_conversion/test_dataflow_to_sql_plan.py +++ b/metricflow/test/plan_conversion/test_dataflow_to_sql_plan.py @@ -32,6 +32,7 @@ from metricflow.filters.time_constraint import TimeRangeConstraint from metricflow.plan_conversion.dataflow_to_sql import DataflowToSqlQueryPlanConverter from metricflow.protocols.sql_client import SqlClient +from metricflow.query.query_parser import MetricFlowQueryParser from metricflow.specs.column_assoc import ColumnAssociationResolver from metricflow.specs.specs import ( DimensionSpec, @@ -1105,26 +1106,18 @@ def test_dimensions_requiring_join( def test_dimension_with_joined_where_constraint( request: FixtureRequest, mf_test_session_state: MetricFlowTestSessionState, + query_parser: MetricFlowQueryParser, dataflow_plan_builder: DataflowPlanBuilder, dataflow_to_sql_converter: DataflowToSqlQueryPlanConverter, sql_client: SqlClient, column_association_resolver: ColumnAssociationResolver, ) -> None: """Tests querying 2 dimensions that require a join.""" - dataflow_plan = dataflow_plan_builder.build_plan_for_distinct_values( - query_spec=MetricFlowQuerySpec( - dimension_specs=( - DimensionSpec(element_name="home_state_latest", entity_links=(EntityReference(element_name="user"),)), - ), - where_constraint=WhereSpecFactory( - column_association_resolver=column_association_resolver, - ).create_from_where_filter( - PydanticWhereFilter( - where_sql_template="{{ Dimension('listing__country_latest') }} = 'us'", - ) - ), - ), + query_spec = query_parser.parse_and_validate_query( + group_by_names=("user__home_state_latest",), + where_constraint_str="{{ Dimension('listing__country_latest') }} = 'us'", ) + dataflow_plan = dataflow_plan_builder.build_plan_for_distinct_values(query_spec) convert_and_check( request=request, diff --git a/metricflow/test/query_rendering/test_derived_metric_rendering.py b/metricflow/test/query_rendering/test_derived_metric_rendering.py index 97c7fc5ac6..1bf6204a9d 100644 --- a/metricflow/test/query_rendering/test_derived_metric_rendering.py +++ b/metricflow/test/query_rendering/test_derived_metric_rendering.py @@ -11,6 +11,7 @@ from metricflow.dataflow.builder.dataflow_plan_builder import DataflowPlanBuilder from metricflow.filters.time_constraint import TimeRangeConstraint +from metricflow.naming.dunder_scheme import DunderNamingScheme from metricflow.plan_conversion.dataflow_to_sql import DataflowToSqlQueryPlanConverter from metricflow.protocols.sql_client import SqlClient from metricflow.query.query_parser import MetricFlowQueryParser @@ -495,24 +496,23 @@ def test_cumulative_time_offset_metric_with_time_constraint( # noqa: D def test_nested_derived_metric_offset_with_joined_where_constraint_not_selected( # noqa: D request: FixtureRequest, mf_test_session_state: MetricFlowTestSessionState, + query_parser: MetricFlowQueryParser, dataflow_plan_builder: DataflowPlanBuilder, dataflow_to_sql_converter: DataflowToSqlQueryPlanConverter, sql_client: SqlClient, create_source_tables: bool, column_association_resolver: ColumnAssociationResolver, ) -> None: - dataflow_plan = dataflow_plan_builder.build_plan( - query_spec=MetricFlowQuerySpec( - metric_specs=(MetricSpec(element_name="bookings_offset_twice"),), - time_dimension_specs=(MTD_SPEC_DAY,), - where_constraint=WhereSpecFactory( - column_association_resolver=column_association_resolver, - ).create_from_where_filter( - PydanticWhereFilter(where_sql_template=("{{ Dimension('booking__is_instant') }}")) - ), - ) + group_by_name = DunderNamingScheme().input_str(MTD_SPEC_DAY) + assert group_by_name is not None + + query_spec = query_parser.parse_and_validate_query( + metric_names=("bookings_offset_twice",), + group_by_names=(group_by_name,), + where_constraint_str="{{ Dimension('booking__is_instant') }}", ) + dataflow_plan = dataflow_plan_builder.build_plan(query_spec) convert_and_check( request=request, mf_test_session_state=mf_test_session_state,