diff --git a/metricflow-semantics/metricflow_semantics/model/semantics/semantic_model_lookup.py b/metricflow-semantics/metricflow_semantics/model/semantics/semantic_model_lookup.py index 08613dffcf..9698c38e9d 100644 --- a/metricflow-semantics/metricflow_semantics/model/semantics/semantic_model_lookup.py +++ b/metricflow-semantics/metricflow_semantics/model/semantics/semantic_model_lookup.py @@ -54,8 +54,11 @@ def __init__( self._measure_non_additive_dimension_specs: Dict[MeasureReference, NonAdditiveDimensionSpec] = {} self._dimension_index: Dict[DimensionReference, List[SemanticModel]] = {} self._entity_index: Dict[EntityReference, List[SemanticModel]] = {} + self._primary_entity_index: Dict[EntityReference, List[SemanticModel]] = {} + # TODO: remove this dict self._dimension_ref_to_spec: Dict[DimensionReference, DimensionSpec] = {} + self._entity_ref_to_spec: Dict[EntityReference, EntitySpec] = {} self._semantic_model_to_aggregation_time_dimensions: Dict[ @@ -89,29 +92,6 @@ def get_dimension_from_semantic_model( f"No dimension with name '{dimension_reference.element_name}' in semantic_model '{semantic_model.name}'." ) - # TODO: remove this method - def get_dimension(self, dimension_reference: DimensionReference) -> Dimension: - """Retrieves a full dimension object by name.""" - # If the reference passed is a TimeDimensionReference, convert to DimensionReference. - dimension_reference = DimensionReference(dimension_reference.element_name) - - semantic_models = self._dimension_index.get(dimension_reference) - if not semantic_models: - raise ValueError( - f"Could not find dimension with name '{dimension_reference.element_name}' in configured semantic models" - ) - - return SemanticModelLookup.get_dimension_from_semantic_model( - # Dimension object should match across semantic models, so just use the first semantic model. - semantic_model=semantic_models[0], - dimension_reference=dimension_reference, - ) - - # TODO: remove this method - def get_time_dimension(self, time_dimension_reference: TimeDimensionReference) -> Dimension: - """Retrieves a full dimension object by name.""" - return self.get_dimension(dimension_reference=time_dimension_reference.dimension_reference) - @property def measure_references(self) -> Sequence[MeasureReference]: """Return all measure references from the collection of semantic models.""" @@ -277,19 +257,18 @@ def _add_semantic_model(self, semantic_model: SemanticModel) -> None: ) ) - # TODO: Construct these specs correctly. All of the time dimension specs have the default granularity - self._dimension_ref_to_spec[dim.time_dimension_reference or dim.reference] = ( - TimeDimensionSpec(element_name=dim.name, entity_links=()) - if dim.type is DimensionType.TIME - else DimensionSpec(element_name=dim.name, entity_links=()) - ) - for entity in semantic_model.entities: semantic_models_for_entity = self._entity_index.get(entity.reference, []) + [semantic_model] self._entity_index[entity.reference] = semantic_models_for_entity self._entity_ref_to_spec[entity.reference] = EntitySpec(element_name=entity.name, entity_links=()) + primary_entity = self.resolved_primary_entity(semantic_model) + if primary_entity: + self._primary_entity_index[primary_entity] = self._primary_entity_index.get(primary_entity, []) + [ + semantic_model + ] + self._semantic_model_reference_to_semantic_model[semantic_model.reference] = semantic_model def get_primary_entity_else_error(self, semantic_model: SemanticModel) -> EntityReference: @@ -375,6 +354,7 @@ def entity_links_for_local_elements(semantic_model: SemanticModel) -> Sequence[E return sorted(possible_entity_links, key=lambda entity_reference: entity_reference.element_name) + # TODO: remove this method def get_element_spec_for_name(self, element_name: str) -> LinkableInstanceSpec: """Returns the spec for the given name of a linkable element (dimension or entity).""" if TimeDimensionReference(element_name=element_name) in self._dimension_ref_to_spec: @@ -449,3 +429,38 @@ def _get_defined_time_granularity( defined_time_granularity = time_dimension.type_params.time_granularity return defined_time_granularity + + def get_semantic_models_for_primary_entity(self, entity_reference: EntityReference) -> List[SemanticModel]: + """Return all semantic models associated with the primary entity reference.""" + return self._primary_entity_index.get(entity_reference, []) + + def get_semantic_model_for_dimension( + self, dimension_reference: DimensionReference, entity_links: Sequence[EntityReference] + ) -> SemanticModel: + """Use the entity links proveded to deterine the semantic model where this dimension is defined.""" + if not entity_links: + raise ValueError( + f"No entity links received for dimension {dimension_reference}. " + "Entity links are required to determine semantic model for dimension." + ) + primary_entity = entity_links[-1] + semantic_models = self.get_semantic_models_for_entity(primary_entity) + for semantic_model in semantic_models: + try: + self.get_dimension_from_semantic_model( + semantic_model=semantic_model, dimension_reference=dimension_reference + ) + return semantic_model + except ValueError: + continue + raise ValueError( + f"Could not find dimension {dimension_reference} in any semantic model associated with primary entity {primary_entity}." + ) + + def dimension_is_partition(self, dimension_spec: DimensionSpec) -> bool: # noqa: D102 + semantic_model = self.get_semantic_model_for_dimension( + dimension_reference=dimension_spec.reference, entity_links=dimension_spec.entity_links + ) + return self.get_dimension_from_semantic_model( + dimension_reference=dimension_spec.reference, semantic_model=semantic_model + ).is_partition diff --git a/metricflow-semantics/metricflow_semantics/specs/dimension_spec.py b/metricflow-semantics/metricflow_semantics/specs/dimension_spec.py index 7b20a8c2c9..ad15320d6d 100644 --- a/metricflow-semantics/metricflow_semantics/specs/dimension_spec.py +++ b/metricflow-semantics/metricflow_semantics/specs/dimension_spec.py @@ -24,6 +24,7 @@ def without_first_entity_link(self) -> DimensionSpec: # noqa: D102 @property def without_entity_links(self) -> DimensionSpec: # noqa: D102 + # TODO - check if this is acceptable? return DimensionSpec(element_name=self.element_name, entity_links=()) @staticmethod diff --git a/metricflow-semantics/metricflow_semantics/specs/non_additive_dimension_spec.py b/metricflow-semantics/metricflow_semantics/specs/non_additive_dimension_spec.py index ed583f2f00..01c5dc51c5 100644 --- a/metricflow-semantics/metricflow_semantics/specs/non_additive_dimension_spec.py +++ b/metricflow-semantics/metricflow_semantics/specs/non_additive_dimension_spec.py @@ -6,6 +6,7 @@ from typing import Any, Sequence, Tuple from dbt_semantic_interfaces.dataclass_serialization import SerializableDataclass +from dbt_semantic_interfaces.references import EntityReference from dbt_semantic_interfaces.type_enums import AggregationType, TimeGranularity from metricflow_semantics.mf_logging.lazy_formattable import LazyFormat @@ -57,7 +58,9 @@ def bucket_hash(self) -> str: values.extend(sorted(self.window_groupings)) return hash_items(values) - def linkable_specs(self, non_additive_dimension_grain: TimeGranularity) -> Sequence[LinkableInstanceSpec]: + def linkable_specs( + self, non_additive_dimension_grain: TimeGranularity, primary_entity: EntityReference + ) -> Sequence[LinkableInstanceSpec]: """Return the set of linkable specs referenced by the NonAdditiveDimensionSpec. In practice, the name will always point to a time dimension. This method requires the time granularity @@ -67,7 +70,7 @@ def linkable_specs(self, non_additive_dimension_grain: TimeGranularity) -> Seque return ( TimeDimensionSpec( element_name=self.name, - entity_links=(), + entity_links=(primary_entity,), time_granularity=ExpandedTimeGranularity.from_time_granularity(non_additive_dimension_grain), ), ) + tuple(LinklessEntitySpec.from_element_name(entity_name) for entity_name in self.window_groupings) diff --git a/metricflow-semantics/metricflow_semantics/specs/time_dimension_spec.py b/metricflow-semantics/metricflow_semantics/specs/time_dimension_spec.py index a7d64a528b..1b6908e34f 100644 --- a/metricflow-semantics/metricflow_semantics/specs/time_dimension_spec.py +++ b/metricflow-semantics/metricflow_semantics/specs/time_dimension_spec.py @@ -101,6 +101,7 @@ def without_first_entity_link(self) -> TimeDimensionSpec: # noqa: D102 date_part=self.date_part, ) + # TODO - can we remove this method? @property def without_entity_links(self) -> TimeDimensionSpec: # noqa: D102 return TimeDimensionSpec( @@ -138,12 +139,6 @@ def element_path_key(self) -> ElementPathKey: date_part=self.date_part, ) - # TODO: remove this method - @staticmethod - def from_reference(reference: TimeDimensionReference) -> TimeDimensionSpec: - """Initialize from a time dimension reference instance.""" - return TimeDimensionSpec(entity_links=(), element_name=reference.element_name) - def accept(self, visitor: InstanceSpecVisitor[VisitorOutputT]) -> VisitorOutputT: # noqa: D102 return visitor.visit_time_dimension_spec(self) diff --git a/metricflow-semantics/tests_metricflow_semantics/model/test_semantic_model_container.py b/metricflow-semantics/tests_metricflow_semantics/model/test_semantic_model_container.py index e9b73b2559..5a790b70fe 100644 --- a/metricflow-semantics/tests_metricflow_semantics/model/test_semantic_model_container.py +++ b/metricflow-semantics/tests_metricflow_semantics/model/test_semantic_model_container.py @@ -78,11 +78,6 @@ def test_get_names( # noqa: D103 def test_get_elements(semantic_model_lookup: SemanticModelLookup) -> None: # noqa: D103 - for dimension_reference in semantic_model_lookup.get_dimension_references(): - assert ( - semantic_model_lookup.get_dimension(dimension_reference=dimension_reference).reference - == dimension_reference - ) for measure_reference in semantic_model_lookup.measure_references: measure_reference = MeasureReference(element_name=measure_reference.element_name) assert semantic_model_lookup.get_measure(measure_reference=measure_reference).reference == measure_reference diff --git a/metricflow/dataflow/builder/dataflow_plan_builder.py b/metricflow/dataflow/builder/dataflow_plan_builder.py index 8cc77f9135..e0fae79295 100644 --- a/metricflow/dataflow/builder/dataflow_plan_builder.py +++ b/metricflow/dataflow/builder/dataflow_plan_builder.py @@ -14,7 +14,12 @@ MetricTimeWindow, MetricType, ) -from dbt_semantic_interfaces.references import MetricReference, SemanticModelElementReference, TimeDimensionReference +from dbt_semantic_interfaces.references import ( + MetricReference, + SemanticModelElementReference, + SemanticModelReference, + TimeDimensionReference, +) from dbt_semantic_interfaces.type_enums.time_granularity import TimeGranularity from dbt_semantic_interfaces.validations.unique_valid_name import MetricFlowReservedKeywords from metricflow_semantics.dag.id_prefix import StaticIdPrefix @@ -48,7 +53,7 @@ from metricflow_semantics.specs.order_by_spec import OrderBySpec from metricflow_semantics.specs.query_spec import MetricFlowQuerySpec from metricflow_semantics.specs.spec_set import InstanceSpecSet, group_specs_by_type -from metricflow_semantics.specs.time_dimension_spec import TimeDimensionSpec +from metricflow_semantics.specs.time_dimension_spec import DEFAULT_TIME_GRANULARITY, TimeDimensionSpec from metricflow_semantics.specs.where_filter.where_filter_spec import WhereFilterSpec from metricflow_semantics.specs.where_filter.where_filter_spec_set import WhereFilterSpecSet from metricflow_semantics.specs.where_filter.where_filter_transform import WhereSpecFactory @@ -303,19 +308,23 @@ def _build_aggregated_conversion_node( ) # Get the agg time dimension for each measure used for matching conversion time windows - base_time_dimension_spec = TimeDimensionSpec.from_reference( - TimeDimensionReference( - self._semantic_model_lookup.get_agg_time_dimension_for_measure( - base_measure_spec.measure_spec.reference - ).element_name - ) - ) - conversion_time_dimension_spec = TimeDimensionSpec.from_reference( - TimeDimensionReference( - self._semantic_model_lookup.get_agg_time_dimension_for_measure( - conversion_measure_spec.measure_spec.reference - ).element_name - ) + base_time_dimension_spec = TimeDimensionSpec( + element_name=self._semantic_model_lookup.get_agg_time_dimension_for_measure( + base_measure_spec.measure_spec.reference + ).element_name, + entity_links=(), # TODO: is this acceptable? + time_granularity=ExpandedTimeGranularity.from_time_granularity( + DEFAULT_TIME_GRANULARITY + ), # TODO: need actual granularity here + ) + conversion_time_dimension_spec = TimeDimensionSpec( + element_name=self._semantic_model_lookup.get_agg_time_dimension_for_measure( + conversion_measure_spec.measure_spec.reference + ).element_name, + entity_links=(), # TODO: is this acceptable? + time_granularity=ExpandedTimeGranularity.from_time_granularity( + DEFAULT_TIME_GRANULARITY + ), # TODO: need actual granularity here ) # Filter the source nodes with only the required specs needed for the calculation @@ -1467,6 +1476,7 @@ def __get_required_and_extraneous_linkable_specs( linkable_spec_sets_to_merge: List[LinkableSpecSet] = [] for filter_spec in filter_specs: linkable_spec_sets_to_merge.append(LinkableSpecSet.create_from_specs(filter_spec.linkable_specs)) + if measure_spec_properties and measure_spec_properties.non_additive_dimension_spec: non_additive_dimension_grain = self._semantic_model_lookup.get_defined_time_granularity( SemanticModelElementReference( @@ -1474,9 +1484,15 @@ def __get_required_and_extraneous_linkable_specs( semantic_model_name=measure_spec_properties.agg_time_dimension.semantic_model_name, ) ) + semantic_model = self._semantic_model_lookup.get_semantic_model( + SemanticModelReference(measure_spec_properties.agg_time_dimension.semantic_model_name) + ) + primary_entity = self._semantic_model_lookup.get_primary_entity_else_error(semantic_model) linkable_spec_sets_to_merge.append( LinkableSpecSet.create_from_specs( - measure_spec_properties.non_additive_dimension_spec.linkable_specs(non_additive_dimension_grain) + measure_spec_properties.non_additive_dimension_spec.linkable_specs( + non_additive_dimension_grain=non_additive_dimension_grain, primary_entity=primary_entity + ) ) ) @@ -1713,7 +1729,7 @@ def _build_aggregated_measure_from_measure_source_node( time_dimension_spec = TimeDimensionSpec( # The NonAdditiveDimensionSpec name property is a plain element name element_name=non_additive_dimension_spec.name, - entity_links=(), + entity_links=(), # TODO time_granularity=ExpandedTimeGranularity.from_time_granularity(non_additive_dimension_grain), ) window_groupings = tuple( diff --git a/metricflow/dataflow/builder/partitions.py b/metricflow/dataflow/builder/partitions.py index 7222100f7a..00de6c8bd2 100644 --- a/metricflow/dataflow/builder/partitions.py +++ b/metricflow/dataflow/builder/partitions.py @@ -38,15 +38,16 @@ def __init__(self, semantic_model_lookup: SemanticModelLookup) -> None: # noqa: def _get_partitions(self, spec_set: InstanceSpecSet) -> PartitionSpecSet: """Returns the specs from the instance set that correspond to partition specs.""" partition_dimension_specs = tuple( - x - for x in spec_set.dimension_specs - if self._semantic_model_lookup.get_dimension(dimension_reference=x.reference).is_partition + dimension + for dimension in spec_set.dimension_specs + if self._semantic_model_lookup.dimension_is_partition(dimension) ) + partition_time_dimension_specs = tuple( - x - for x in spec_set.time_dimension_specs - if x.reference != DataSet.metric_time_dimension_reference() - and self._semantic_model_lookup.get_time_dimension(time_dimension_reference=x.reference).is_partition + time_dimension + for time_dimension in spec_set.time_dimension_specs + if time_dimension.reference != DataSet.metric_time_dimension_reference() + and self._semantic_model_lookup.dimension_is_partition(time_dimension) ) return PartitionSpecSet( diff --git a/metricflow/dataflow/builder/source_node.py b/metricflow/dataflow/builder/source_node.py index 575668eabb..18fe67be5a 100644 --- a/metricflow/dataflow/builder/source_node.py +++ b/metricflow/dataflow/builder/source_node.py @@ -61,7 +61,10 @@ def __init__( # noqa: D107 semantic_manifest_lookup: SemanticManifestLookup, ) -> None: self._semantic_manifest_lookup = semantic_manifest_lookup - data_set_converter = SemanticModelToDataSetConverter(column_association_resolver) + data_set_converter = SemanticModelToDataSetConverter( + column_association_resolver=column_association_resolver, + semantic_model_lookup=semantic_manifest_lookup.semantic_model_lookup, + ) self.time_spine_sources = TimeSpineSource.build_standard_time_spine_sources( semantic_manifest_lookup.semantic_manifest ) diff --git a/metricflow/dataset/convert_semantic_model.py b/metricflow/dataset/convert_semantic_model.py index 1abee6ddaa..e597a5513a 100644 --- a/metricflow/dataset/convert_semantic_model.py +++ b/metricflow/dataset/convert_semantic_model.py @@ -76,8 +76,11 @@ class SemanticModelToDataSetConverter: # Regex for inferring whether an expression for an element is a column reference. _SQL_IDENTIFIER_REGEX = re.compile("^[a-zA-Z_][a-zA-Z_0-9]*$") - def __init__(self, column_association_resolver: ColumnAssociationResolver) -> None: # noqa: D107 + def __init__( # noqa: D107 + self, column_association_resolver: ColumnAssociationResolver, semantic_model_lookup: SemanticModelLookup + ) -> None: self._column_association_resolver = column_association_resolver + self._semantic_model_lookup = semantic_model_lookup def _create_dimension_instance( self, @@ -437,49 +440,37 @@ def create_sql_source_data_set(self, semantic_model: SemanticModel) -> SemanticM all_select_columns.extend(select_columns) # Group by items in the semantic model can be accessed though a subset of the entities defined in the model. - possible_entity_links: List[Tuple[EntityReference, ...]] = [ - (), - ] - - for entity_link in SemanticModelLookup.entity_links_for_local_elements(semantic_model): - possible_entity_links.append((entity_link,)) - - # Handle dimensions - conversion_results = [ - self._convert_dimensions( + # Is this actually right? Shouldn't it only be the primary entity since you can't use fan-out joins? + # possible_entity_links: List[Tuple[EntityReference, ...]] = [ + # (), # TODO: is this where we lose entity links? + # ] + primary_entity = self._semantic_model_lookup.resolved_primary_entity(semantic_model) + # for entity_link in SemanticModelLookup.entity_links_for_local_elements(semantic_model): + # possible_entity_links.append((entity_link,)) + + # Handle dimensions. Semantic models with dimensions always have a primary entity. + if primary_entity: + conversion_result = self._convert_dimensions( semantic_model_name=semantic_model.name, dimensions=semantic_model.dimensions, - entity_links=entity_links, + entity_links=(primary_entity,), table_alias=from_source_alias, ) - for entity_links in possible_entity_links - ] - - all_dimension_instances.extend( - [ - dimension_instance - for conversion_result in conversion_results - for dimension_instance in conversion_result.dimension_instances - ] - ) - all_time_dimension_instances.extend( - [ - time_dimension_instance - for conversion_result in conversion_results - for time_dimension_instance in conversion_result.time_dimension_instances - ] - ) + all_dimension_instances.extend( + [dimension_instance for dimension_instance in conversion_result.dimension_instances] + ) - all_select_columns.extend( - [ - select_column - for conversion_result in conversion_results - for select_column in conversion_result.select_columns - ] - ) + all_time_dimension_instances.extend( + [time_dimension_instance for time_dimension_instance in conversion_result.time_dimension_instances] + ) + + all_select_columns.extend([select_column for select_column in conversion_result.select_columns]) # Handle entities + possible_entity_links: List[Tuple[EntityReference, ...]] = [()] + if primary_entity: + possible_entity_links.append((primary_entity,)) for entity_links in possible_entity_links: entity_instances, select_columns = self._create_entity_instances( semantic_model_name=semantic_model.name, diff --git a/metricflow/engine/metricflow_engine.py b/metricflow/engine/metricflow_engine.py index 83099dd28f..57c62c4545 100644 --- a/metricflow/engine/metricflow_engine.py +++ b/metricflow/engine/metricflow_engine.py @@ -376,7 +376,10 @@ def __init__( semantic_manifest_lookup.semantic_manifest ) self._source_data_sets: List[SemanticModelDataSet] = [] - converter = SemanticModelToDataSetConverter(column_association_resolver=self._column_association_resolver) + converter = SemanticModelToDataSetConverter( + column_association_resolver=self._column_association_resolver, + semantic_model_lookup=self._semantic_manifest_lookup.semantic_model_lookup, + ) for semantic_model in sorted( self._semantic_manifest_lookup.semantic_manifest.semantic_models, key=lambda model: model.name ): @@ -472,12 +475,14 @@ def _create_execution_plan(self, mf_query_request: MetricFlowQueryRequest) -> Me raise InvalidQueryException("Group by items can't be specified with a saved query.") query_spec = self._query_parser.parse_and_validate_saved_query( saved_query_parameter=SavedQueryParameter(mf_query_request.saved_query_name), - where_filters=[ - PydanticWhereFilter(where_sql_template=where_constraint) - for where_constraint in mf_query_request.where_constraints - ] - if mf_query_request.where_constraints is not None - else None, + where_filters=( + [ + PydanticWhereFilter(where_sql_template=where_constraint) + for where_constraint in mf_query_request.where_constraints + ] + if mf_query_request.where_constraints is not None + else None + ), limit=mf_query_request.limit, time_constraint_start=mf_query_request.time_constraint_start, time_constraint_end=mf_query_request.time_constraint_end, diff --git a/metricflow/validation/data_warehouse_model_validator.py b/metricflow/validation/data_warehouse_model_validator.py index 58b1055d38..5d2541bc8e 100644 --- a/metricflow/validation/data_warehouse_model_validator.py +++ b/metricflow/validation/data_warehouse_model_validator.py @@ -69,7 +69,8 @@ def __init__(self, manifest: SemanticManifest) -> None: # noqa: D107 self.converter = SemanticModelToDataSetConverter( column_association_resolver=DunderColumnAssociationResolver( semantic_manifest_lookup=self.semantic_manifest_lookup - ) + ), + semantic_model_lookup=self.semantic_manifest_lookup.semantic_model_lookup, ) self.plan_converter = DataflowToSqlQueryPlanConverter( column_association_resolver=DunderColumnAssociationResolver(self.semantic_manifest_lookup), diff --git a/scripts/ci_tests/metricflow_package_test.py b/scripts/ci_tests/metricflow_package_test.py index 81e055c24d..274a89ae34 100644 --- a/scripts/ci_tests/metricflow_package_test.py +++ b/scripts/ci_tests/metricflow_package_test.py @@ -65,7 +65,8 @@ def _create_data_sets( semantic_models = sorted(semantic_models, key=lambda x: x.name) converter = SemanticModelToDataSetConverter( - column_association_resolver=DunderColumnAssociationResolver(semantic_manifest_lookup) + column_association_resolver=DunderColumnAssociationResolver(semantic_manifest_lookup), + semantic_model_lookup=semantic_manifest_lookup.semantic_model_lookup, ) for semantic_model in semantic_models: diff --git a/tests_metricflow/examples/test_node_sql.py b/tests_metricflow/examples/test_node_sql.py index c7ac034512..ab5fce7d5c 100644 --- a/tests_metricflow/examples/test_node_sql.py +++ b/tests_metricflow/examples/test_node_sql.py @@ -39,7 +39,10 @@ def test_view_sql_generated_at_a_node( column_association_resolver = DunderColumnAssociationResolver( semantic_manifest_lookup=simple_semantic_manifest_lookup, ) - to_data_set_converter = SemanticModelToDataSetConverter(column_association_resolver) + to_data_set_converter = SemanticModelToDataSetConverter( + column_association_resolver=column_association_resolver, + semantic_model_lookup=simple_semantic_manifest_lookup.semantic_model_lookup, + ) to_sql_plan_converter = DataflowToSqlQueryPlanConverter( column_association_resolver=DunderColumnAssociationResolver(simple_semantic_manifest_lookup), diff --git a/tests_metricflow/fixtures/manifest_fixtures.py b/tests_metricflow/fixtures/manifest_fixtures.py index 7bcde44470..0d758a0262 100644 --- a/tests_metricflow/fixtures/manifest_fixtures.py +++ b/tests_metricflow/fixtures/manifest_fixtures.py @@ -248,7 +248,8 @@ def _create_data_sets( semantic_models = sorted(semantic_models, key=lambda x: x.name) converter = SemanticModelToDataSetConverter( - column_association_resolver=DunderColumnAssociationResolver(multihop_semantic_manifest_lookup) + column_association_resolver=DunderColumnAssociationResolver(multihop_semantic_manifest_lookup), + semantic_model_lookup=multihop_semantic_manifest_lookup.semantic_model_lookup, ) for semantic_model in semantic_models: