diff --git a/metricflow/dataflow/builder/dataflow_plan_builder.py b/metricflow/dataflow/builder/dataflow_plan_builder.py index 1445c7918f..c989d43131 100644 --- a/metricflow/dataflow/builder/dataflow_plan_builder.py +++ b/metricflow/dataflow/builder/dataflow_plan_builder.py @@ -792,7 +792,7 @@ def _build_aggregated_measures_from_measure_source_node( # If querying an offset metric, join to time spine. join_to_time_spine_node: Optional[JoinToTimeSpineNode] = None if metric_spec.offset_window or metric_spec.offset_to_grain: - assert metric_time_dimension_specs, "Joining to time spine requires querying with metric time." + assert metric_time_dimension_specs, "Joining to time spine requires querying with metric_time." join_to_time_spine_node = JoinToTimeSpineNode( parent_node=time_range_node or measure_recipe.measure_node, metric_time_dimension_specs=metric_time_dimension_specs, diff --git a/metricflow/engine/metricflow_engine.py b/metricflow/engine/metricflow_engine.py index d08163c5d4..e508c32a92 100644 --- a/metricflow/engine/metricflow_engine.py +++ b/metricflow/engine/metricflow_engine.py @@ -456,6 +456,7 @@ def _create_execution_plan(self, mf_query_request: MetricFlowQueryRequest) -> Me query_spec = self._query_parser.parse_and_validate_query( metric_names=mf_query_request.metric_names, group_by_names=mf_query_request.group_by_names, + group_by=mf_query_request.group_by, limit=mf_query_request.limit, time_constraint_start=mf_query_request.time_constraint_start, time_constraint_end=mf_query_request.time_constraint_end, diff --git a/metricflow/naming/linkable_spec_name.py b/metricflow/naming/linkable_spec_name.py index 01c3863ad0..0e73415cd4 100644 --- a/metricflow/naming/linkable_spec_name.py +++ b/metricflow/naming/linkable_spec_name.py @@ -37,31 +37,6 @@ def from_name(qualified_name: str) -> StructuredLinkableSpecName: if len(name_parts) == 1: return StructuredLinkableSpecName(entity_link_names=(), element_name=name_parts[0]) - associated_date_part: Optional[DatePart] = None - for date_part in DatePart: - if name_parts[-1] == StructuredLinkableSpecName.date_part_suffix(date_part): - associated_date_part = date_part - - # Has a date_part - if associated_date_part: - # e.g. "ds__extract_month" - if len(name_parts) == 2: - # Since DAY works with all currently supported DateParts & changing the granularity will not change the - # extracted date part, assume day granularity here. - time_granularity = TimeGranularity.DAY - return StructuredLinkableSpecName( - entity_link_names=(), - element_name=name_parts[0], - time_granularity=time_granularity, - date_part=associated_date_part, - ) - # e.g. "messages__ds__extract_month" - return StructuredLinkableSpecName( - entity_link_names=tuple(name_parts[:-2]), - element_name=name_parts[-2], - date_part=associated_date_part, - ) - associated_granularity = None granularity: TimeGranularity for granularity in TimeGranularity: @@ -94,7 +69,6 @@ def qualified_name(self) -> str: items.append(self.date_part_suffix(date_part=self.date_part)) elif self.time_granularity: items.append(self.time_granularity.value) - return DUNDER.join(items) @property diff --git a/metricflow/query/query_parser.py b/metricflow/query/query_parser.py index 956f3933fc..329bb3bdc3 100644 --- a/metricflow/query/query_parser.py +++ b/metricflow/query/query_parser.py @@ -299,7 +299,12 @@ def _get_group_by_names( return ( group_by_names if group_by_names - else [f"{g.name}__{g.date_part or g.grain}" if (g.date_part or g.grain) else g.name for g in group_by] + else [ + StructuredLinkableSpecName( + entity_link_names=(), element_name=g.name, time_granularity=g.grain, date_part=g.date_part + ).qualified_name + for g in group_by + ] if group_by else [] ) @@ -344,7 +349,6 @@ def _parse_and_validate_query( time_granularity: Optional[TimeGranularity] = None, ) -> MetricFlowQuerySpec: metric_names = self._get_metric_names(metric_names, metrics) - group_by_names = self._get_group_by_names(group_by_names, group_by) where_filter = self._get_where_filter(where_constraint, where_constraint_str) order = self._get_order(order, order_by) @@ -394,7 +398,9 @@ def _parse_and_validate_query( # If the time constraint is all time, just ignore and not render time_constraint = None - requested_linkable_specs = self._parse_linkable_element_names(group_by_names, metric_references) + requested_linkable_specs = self._parse_linkable_elements( + qualified_linkable_names=group_by_names, linkable_elements=group_by, metric_references=metric_references + ) where_filter_spec: Optional[WhereFilterSpec] = None if where_filter is not None: try: @@ -427,7 +433,16 @@ def _parse_and_validate_query( if len(time_dimension_specs) == 0: self._validate_no_time_dimension_query(metric_references=metric_references) - self._time_granularity_solver.validate_time_granularity(metric_references, time_dimension_specs) + self._time_granularity_solver.validate_time_granularity_and_date_part(metric_references, time_dimension_specs) + for time_dimension_spec in time_dimension_specs: + if ( + time_dimension_spec.date_part + and time_dimension_spec.date_part.to_int() < time_dimension_spec.time_granularity.to_int() + ): + raise RequestTimeGranularityException( + f"Date part {time_dimension_spec.date_part.name} is not compatible with time granularity " + f"{time_dimension_spec.time_granularity.name}." + ) order_by_specs = self._parse_order_by(order or [], partial_time_dimension_spec_replacements) @@ -437,8 +452,9 @@ def _parse_and_validate_query( for metric_reference in metric_references: metric = self._metric_lookup.get_metric(metric_reference) if metric.filter is not None: - group_by_specs_for_one_metric = self._parse_linkable_element_names( + group_by_specs_for_one_metric = self._parse_linkable_elements( qualified_linkable_names=group_by_names, + linkable_elements=group_by, metric_references=(metric_reference,), ) @@ -492,7 +508,7 @@ def _parse_and_validate_query( raise InvalidQueryException(f"Limit was specified as {limit}, which is < 0.") if where_filter_spec: - self._time_granularity_solver.validate_time_granularity( + self._time_granularity_solver.validate_time_granularity_and_date_part( metric_references=metric_references, time_dimension_specs=where_filter_spec.linkable_spec_set.time_dimension_specs, ) @@ -537,7 +553,7 @@ def _adjust_time_range_constraint( time_range_constraint: TimeRangeConstraint, ) -> TimeRangeConstraint: """Adjust the time range constraint so that it matches the boundaries of the granularity of the result.""" - self._time_granularity_solver.validate_time_granularity(metric_references, time_dimension_specs) + self._time_granularity_solver.validate_time_granularity_and_date_part(metric_references, time_dimension_specs) smallest_primary_time_granularity_in_query = self._find_smallest_metric_time_dimension_spec_granularity( time_dimension_specs @@ -644,21 +660,38 @@ def _parse_metric_names( metric_references.extend(list(input_metrics)) return tuple(metric_references) - def _parse_linkable_element_names( + def _parse_linkable_elements( self, - qualified_linkable_names: Sequence[str], metric_references: Sequence[MetricReference], + qualified_linkable_names: Optional[Sequence[str]] = None, + linkable_elements: Optional[Sequence[QueryParameter]] = None, ) -> QueryTimeLinkableSpecSet: """Convert the linkable spec names into the respective specification objects.""" - qualified_linkable_names = [x.lower() for x in qualified_linkable_names] + assert not (qualified_linkable_names and linkable_elements) + + structured_names: List[StructuredLinkableSpecName] = [] + if qualified_linkable_names: + qualified_linkable_names = [x.lower() for x in qualified_linkable_names] + structured_names = [StructuredLinkableSpecName.from_name(name) for name in qualified_linkable_names] + elif linkable_elements: + for linkable_element in linkable_elements: + parsed_name = StructuredLinkableSpecName.from_name(linkable_element.name) + if parsed_name.time_granularity: + raise ValueError("Time granularity must be passed in the grain attribute for group_by query param.") + structured_name = StructuredLinkableSpecName( + entity_link_names=parsed_name.entity_link_names, + element_name=parsed_name.element_name, + time_granularity=linkable_element.grain, + date_part=linkable_element.date_part, + ) + structured_names.append(structured_name) dimension_specs = [] time_dimension_specs = [] partial_time_dimension_specs = [] entity_specs = [] - for qualified_name in qualified_linkable_names: - structured_name = StructuredLinkableSpecName.from_name(qualified_name) + for structured_name in structured_names: element_name = structured_name.element_name entity_links = tuple(EntityReference(element_name=x) for x in structured_name.entity_link_names) # Create the spec based on the type of element referenced. @@ -677,6 +710,7 @@ def _parse_linkable_element_names( PartialTimeDimensionSpec( element_name=element_name, entity_links=entity_links, + date_part=structured_name.date_part, ) ) elif DimensionReference(element_name=element_name) in self._known_dimension_element_references: @@ -684,20 +718,30 @@ def _parse_linkable_element_names( elif EntityReference(element_name=element_name) in self._known_entity_element_references: entity_specs.append(EntitySpec(element_name=element_name, entity_links=entity_links)) else: + valid_group_bys_for_metrics = self._metric_lookup.element_specs_for_metrics(list(metric_references)) valid_group_by_names_for_metrics = sorted( - x.qualified_name for x in self._metric_lookup.element_specs_for_metrics(list(metric_references)) + list( + set( + x.qualified_name if qualified_linkable_names else x.element_name + for x in valid_group_bys_for_metrics + ) + ) ) + # If requested by name, show qualified name. If requested as object, show element name. + display_name = structured_name.qualified_name if qualified_linkable_names else element_name suggestions = { - f"Suggestions for '{qualified_name}'": pformat_big_objects( + f"Suggestions for '{display_name}'": pformat_big_objects( MetricFlowQueryParser._top_fuzzy_matches( - item=qualified_name, + item=display_name, candidate_items=valid_group_by_names_for_metrics, ) ) } raise UnableToSatisfyQueryError( - f"Unknown element name '{element_name}' in dimension name '{qualified_name}'", + f"Unknown element name '{element_name}' in dimension name '{display_name}'" + if qualified_linkable_names + else f"Unknown dimension {element_name}", context=suggestions, ) diff --git a/metricflow/test/conftest.py b/metricflow/test/conftest.py index 31cb6d558e..13318c9f16 100644 --- a/metricflow/test/conftest.py +++ b/metricflow/test/conftest.py @@ -1,6 +1,11 @@ # These imports are required to properly set up pytest fixtures. from __future__ import annotations +from dataclasses import dataclass +from typing import Optional + +from dbt_semantic_interfaces.type_enums.time_granularity import TimeGranularity + from metricflow.test.fixtures.cli_fixtures import * # noqa: F401, F403 from metricflow.test.fixtures.dataflow_fixtures import * # noqa: F401, F403 from metricflow.test.fixtures.id_fixtures import * # noqa: F401, F403 @@ -9,3 +14,13 @@ from metricflow.test.fixtures.sql_client_fixtures import * # noqa: F401, F403 from metricflow.test.fixtures.sql_fixtures import * # noqa: F401, F403 from metricflow.test.fixtures.table_fixtures import * # noqa: F401, F403 +from metricflow.time.date_part import DatePart + + +@dataclass +class MockQueryParameter: + """This is a mock that is just used to test the query parser.""" + + name: str + grain: Optional[TimeGranularity] = None + date_part: Optional[DatePart] = None diff --git a/metricflow/test/integration/configured_test_case.py b/metricflow/test/integration/configured_test_case.py index a6c0ac8e20..766873b9b0 100644 --- a/metricflow/test/integration/configured_test_case.py +++ b/metricflow/test/integration/configured_test_case.py @@ -4,7 +4,7 @@ import os from collections import OrderedDict from enum import Enum -from typing import Optional, Sequence, Tuple +from typing import Dict, Optional, Sequence, Tuple import yaml from dbt_semantic_interfaces.implementations.base import FrozenBaseModel @@ -53,6 +53,7 @@ class Config: # noqa: D check_query: str file_path: str group_bys: Tuple[str, ...] = () + group_by_objs: Tuple[Dict, ...] = () order_bys: Tuple[str, ...] = () # The required features in the DW engine for the test to complete. required_features: Tuple[RequiredDwEngineFeatures, ...] = () diff --git a/metricflow/test/integration/test_cases/itest_metrics.yaml b/metricflow/test/integration/test_cases/itest_metrics.yaml index bd53ff686f..20c0f302d4 100644 --- a/metricflow/test/integration/test_cases/itest_metrics.yaml +++ b/metricflow/test/integration/test_cases/itest_metrics.yaml @@ -1049,7 +1049,7 @@ integration_test: description: Test query using date_part model: SIMPLE_MODEL metrics: ["bookings"] - group_bys: ["metric_time__extract_year"] + group_by_objs: [{"name": "metric_time", "date_part": "year"}] check_query: | SELECT SUM(1) AS bookings @@ -1062,7 +1062,13 @@ integration_test: description: Test query using multiple date_parts model: SIMPLE_MODEL metrics: ["bookings"] - group_bys: ["metric_time__extract_quarter", "metric_time__extract_dow", "metric_time__extract_doy", "metric_time__extract_day", "metric_time__extract_week"] + group_by_objs: [ + {"name": "metric_time", "date_part": "quarter"}, + {"name": "metric_time", "date_part": "dow"}, + {"name": "metric_time", "date_part": "doy"}, + {"name": "metric_time", "date_part": "day"}, + {"name": "metric_time", "date_part": "week"}, + ] check_query: | SELECT SUM(1) AS bookings @@ -1084,7 +1090,7 @@ integration_test: description: Tests a derived metric offset query with window and date_part model: SIMPLE_MODEL metrics: ["bookings_5_day_lag"] - group_bys: ["metric_time__extract_month"] + group_by_objs: [{"name": "metric_time", "date_part": "month"}] check_query: | SELECT {{ render_extract("a.ds", DatePart.MONTH) }} AS metric_time__extract_month diff --git a/metricflow/test/integration/test_configured_cases.py b/metricflow/test/integration/test_configured_cases.py index cde625fde7..46dd07e2a3 100644 --- a/metricflow/test/integration/test_configured_cases.py +++ b/metricflow/test/integration/test_configured_cases.py @@ -2,6 +2,7 @@ import datetime import logging +from copy import copy from typing import List, Optional, Sequence, Tuple import jinja2 @@ -31,6 +32,7 @@ SqlTimeDeltaExpression, ) from metricflow.test.compare_df import assert_dataframes_equal +from metricflow.test.conftest import MockQueryParameter from metricflow.test.fixtures.setup_fixtures import MetricFlowTestSessionState from metricflow.test.integration.configured_test_case import ( CONFIGURED_INTEGRATION_TESTS_REPOSITORY, @@ -253,10 +255,18 @@ def test_case( check_query_helpers = CheckQueryHelpers(sql_client) + group_by: List[MockQueryParameter] = [] + for group_by_kwargs in case.group_by_objs: + kwargs = copy(group_by_kwargs) + date_part = kwargs.get("date_part") + if date_part: + kwargs["date_part"] = DatePart(date_part) + group_by.append(MockQueryParameter(**kwargs)) query_result = engine.query( MetricFlowQueryRequest.create_with_random_request_id( metric_names=case.metrics, group_by_names=case.group_bys, + group_by=tuple(group_by), limit=case.limit, time_constraint_start=parser.parse(case.time_constraint[0]) if case.time_constraint else None, time_constraint_end=parser.parse(case.time_constraint[1]) if case.time_constraint else None, diff --git a/metricflow/test/query/test_query_parser.py b/metricflow/test/query/test_query_parser.py index b4962b4bee..41f205ed05 100644 --- a/metricflow/test/query/test_query_parser.py +++ b/metricflow/test/query/test_query_parser.py @@ -21,9 +21,11 @@ OrderBySpec, TimeDimensionSpec, ) +from metricflow.test.conftest import MockQueryParameter from metricflow.test.fixtures.model_fixtures import query_parser_from_yaml from metricflow.test.model.example_project_configuration import EXAMPLE_PROJECT_CONFIGURATION_YAML_CONFIG_FILE from metricflow.test.time.metric_time_dimension import MTD +from metricflow.time.date_part import DatePart from metricflow.time.time_granularity_solver import RequestTimeGranularityException logger = logging.getLogger(__name__) @@ -171,16 +173,6 @@ def test_query_parser(bookings_query_parser: MetricFlowQueryParser) -> None: # ) -class MockQueryParameter: - """This is a mock that is just used to test the query parser.""" - - grain = None - date_part = None - - def __init__(self, name: str): # noqa: D - self.name = name - - def test_query_parser_with_object_params(bookings_query_parser: MetricFlowQueryParser) -> None: # noqa: D Metric = namedtuple("Metric", ["name"]) metric = Metric("bookings") @@ -372,11 +364,10 @@ def test_derived_metric_query_parsing() -> None: def test_derived_metric_with_offset_parsing() -> None: """Test that querying derived metrics with a time offset requires a time dimension.""" - bookings_yaml_file = YamlConfigFile(filepath="inline_for_test_1", contents=BOOKINGS_YAML) - bookings_yaml_file = YamlConfigFile(filepath="inline_for_test_1", contents=REVENUE_YAML) + revenue_yaml_file = YamlConfigFile(filepath="inline_for_test_1", contents=REVENUE_YAML) metrics_yaml_file = YamlConfigFile(filepath="inline_for_test_1", contents=METRICS_YAML) query_parser = query_parser_from_yaml( - [EXAMPLE_PROJECT_CONFIGURATION_YAML_CONFIG_FILE, bookings_yaml_file, metrics_yaml_file] + [EXAMPLE_PROJECT_CONFIGURATION_YAML_CONFIG_FILE, revenue_yaml_file, metrics_yaml_file] ) # Attempt to query with no dimension with pytest.raises(UnableToSatisfyQueryError): @@ -397,3 +388,32 @@ def test_derived_metric_with_offset_parsing() -> None: metric_names=["revenue_growth_2_weeks"], group_by_names=[MTD], ) + + +def test_date_part_parsing() -> None: + """Test that querying with a date_part verifies compatibility with time_granularity.""" + revenue_yaml_file = YamlConfigFile(filepath="inline_for_test_1", contents=REVENUE_YAML) + metrics_yaml_file = YamlConfigFile(filepath="inline_for_test_1", contents=METRICS_YAML) + query_parser = query_parser_from_yaml( + [EXAMPLE_PROJECT_CONFIGURATION_YAML_CONFIG_FILE, revenue_yaml_file, metrics_yaml_file] + ) + + # Date part is incompatible with metric's defined time granularity + with pytest.raises(RequestTimeGranularityException, match="is not valid for querying"): + query_parser.parse_and_validate_query( + metric_names=["revenue"], + group_by=[MockQueryParameter(name="metric_time", date_part=DatePart.DOW)], + ) + + # Date part is compatible with the requested time granularity for the same time dimension + with pytest.raises(RequestTimeGranularityException, match="is not compatible with time granularity"): + query_parser.parse_and_validate_query( + metric_names=["revenue"], + group_by=[MockQueryParameter(name="metric_time", grain=TimeGranularity.YEAR, date_part=DatePart.MONTH)], + ) + + # Date part is compatible + query_parser.parse_and_validate_query( + metric_names=["revenue"], + group_by=[MockQueryParameter(name="metric_time", date_part=DatePart.MONTH)], + ) diff --git a/metricflow/test/time/test_time_granularity_solver.py b/metricflow/test/time/test_time_granularity_solver.py index 025778df10..e354881588 100644 --- a/metricflow/test/time/test_time_granularity_solver.py +++ b/metricflow/test/time/test_time_granularity_solver.py @@ -27,21 +27,21 @@ def time_granularity_solver( # noqa: D def test_validate_day_granuarity_for_day_metric(time_granularity_solver: TimeGranularitySolver) -> None: # noqa: D - time_granularity_solver.validate_time_granularity( + time_granularity_solver.validate_time_granularity_and_date_part( metric_references=[MetricReference(element_name="bookings")], time_dimension_specs=[DataSet.metric_time_dimension_spec(TimeGranularity.DAY)], ) def test_validate_month_granuarity_for_day_metric(time_granularity_solver: TimeGranularitySolver) -> None: # noqa: D - time_granularity_solver.validate_time_granularity( + time_granularity_solver.validate_time_granularity_and_date_part( metric_references=[MetricReference(element_name="bookings")], time_dimension_specs=[DataSet.metric_time_dimension_spec(TimeGranularity.MONTH)], ) def test_validate_month_granuarity_for_month_metric(time_granularity_solver: TimeGranularitySolver) -> None: # noqa: D - time_granularity_solver.validate_time_granularity( + time_granularity_solver.validate_time_granularity_and_date_part( metric_references=[MetricReference(element_name="bookings_monthly")], time_dimension_specs=[DataSet.metric_time_dimension_spec(TimeGranularity.MONTH)], ) @@ -50,7 +50,7 @@ def test_validate_month_granuarity_for_month_metric(time_granularity_solver: Tim def test_validate_month_granuarity_for_day_and_month_metrics( # noqa: D time_granularity_solver: TimeGranularitySolver, ) -> None: - time_granularity_solver.validate_time_granularity( + time_granularity_solver.validate_time_granularity_and_date_part( metric_references=[MetricReference(element_name="bookings"), MetricReference(element_name="bookings_monthly")], time_dimension_specs=[DataSet.metric_time_dimension_spec(TimeGranularity.MONTH)], ) @@ -59,7 +59,7 @@ def test_validate_month_granuarity_for_day_and_month_metrics( # noqa: D def test_validate_year_granularity_for_day_and_month_metrics( # noqa: D time_granularity_solver: TimeGranularitySolver, ) -> None: - time_granularity_solver.validate_time_granularity( + time_granularity_solver.validate_time_granularity_and_date_part( metric_references=[MetricReference(element_name="bookings"), MetricReference(element_name="bookings_monthly")], time_dimension_specs=[DataSet.metric_time_dimension_spec(TimeGranularity.YEAR)], ) @@ -67,7 +67,7 @@ def test_validate_year_granularity_for_day_and_month_metrics( # noqa: D def test_validate_day_granuarity_for_month_metric(time_granularity_solver: TimeGranularitySolver) -> None: # noqa: D with pytest.raises(RequestTimeGranularityException): - time_granularity_solver.validate_time_granularity( + time_granularity_solver.validate_time_granularity_and_date_part( metric_references=[MetricReference(element_name="bookings_monthly")], time_dimension_specs=[DataSet.metric_time_dimension_spec(TimeGranularity.DAY)], ) @@ -77,7 +77,7 @@ def test_validate_day_granularity_for_day_and_month_metric( # noqa: D time_granularity_solver: TimeGranularitySolver, ) -> None: with pytest.raises(RequestTimeGranularityException): - time_granularity_solver.validate_time_granularity( + time_granularity_solver.validate_time_granularity_and_date_part( metric_references=[ MetricReference(element_name="bookings"), MetricReference(element_name="bookings_monthly"), @@ -120,7 +120,7 @@ def test_granularity_error_for_cumulative_metric( # noqa: D time_granularity_solver: TimeGranularitySolver, ) -> None: with pytest.raises(RequestTimeGranularityException): - time_granularity_solver.validate_time_granularity( + time_granularity_solver.validate_time_granularity_and_date_part( metric_references=[ MetricReference(element_name="weekly_bookers"), MetricReference(element_name="bookings_monthly"), diff --git a/metricflow/time/time_granularity_solver.py b/metricflow/time/time_granularity_solver.py index 8fbf95c601..367fc832a5 100644 --- a/metricflow/time/time_granularity_solver.py +++ b/metricflow/time/time_granularity_solver.py @@ -19,6 +19,7 @@ from metricflow.specs.specs import ( TimeDimensionSpec, ) +from metricflow.time.date_part import DatePart from metricflow.time.time_granularity import ( adjust_to_end_of_period, adjust_to_start_of_period, @@ -38,6 +39,7 @@ class PartialTimeDimensionSpec: element_name: str entity_links: Tuple[EntityReference, ...] + date_part: Optional[DatePart] = None @dataclass(frozen=True) @@ -65,28 +67,34 @@ def __init__( # noqa: D ) -> None: self._semantic_manifest_lookup = semantic_manifest_lookup - def validate_time_granularity( + def validate_time_granularity_and_date_part( self, metric_references: Sequence[MetricReference], time_dimension_specs: Sequence[TimeDimensionSpec] ) -> None: - """Check that the granularity specified for time dimensions is valid with respect to the metrics. + """Check that the granularity & date_part specified for time dimensions is valid with respect to the metrics. - e.g. throw an error if "ds__week" is specified for a metric with a time granularity of MONTH. + e.g. throw an error if "ds__week" or "extract week" is specified for a metric with a time granularity of MONTH. """ valid_group_by_elements = self._semantic_manifest_lookup.metric_lookup.linkable_set_for_metrics( metric_references=metric_references, ) for time_dimension_spec in time_dimension_specs: - match_found = False + match_found_with_granularity = False + match_found_for_date_part = False for path_key in valid_group_by_elements.path_key_to_linkable_dimensions: - if ( - path_key.element_name == time_dimension_spec.element_name - and (path_key.entity_links == time_dimension_spec.entity_links) - and path_key.time_granularity == time_dimension_spec.time_granularity + if path_key.element_name == time_dimension_spec.element_name and ( + path_key.entity_links == time_dimension_spec.entity_links ): - match_found = True - break - if not match_found: + if path_key.time_granularity == time_dimension_spec.time_granularity: + match_found_with_granularity = True + if not time_dimension_spec.date_part or ( + path_key.time_granularity + and path_key.time_granularity.to_int() <= time_dimension_spec.date_part.to_int() + ): + match_found_for_date_part = True + if match_found_with_granularity and match_found_for_date_part: + break + if not (match_found_with_granularity and match_found_for_date_part): raise RequestTimeGranularityException( f"{time_dimension_spec} is not valid for querying {metric_references}" ) @@ -123,6 +131,7 @@ def resolve_granularity_for_partial_time_dimension_specs( element_name=partial_time_dimension_spec.element_name, entity_links=partial_time_dimension_spec.entity_links, time_granularity=minimum_time_granularity, + date_part=partial_time_dimension_spec.date_part, ) else: raise RequestTimeGranularityException(