diff --git a/docs/src/dataio_3_migration.rst b/docs/src/dataio_3_migration.rst new file mode 100644 index 000000000..7bd3303e8 --- /dev/null +++ b/docs/src/dataio_3_migration.rst @@ -0,0 +1,294 @@ +fmu-dataio 3.0 migration guide +============================== + +This document contains a set of instructions on how to update your code to work +with ``fmu-dataio`` 3.0. Almost all changes that have taken place are related to +fmu-dataio's ``ExportData`` class. + + +ExportData +---------- +Changes to input arguments +^^^^^^^^^^^^^^^^^^^^^^^^^^ +The following arguments are deprecated, or have specific input types/values that are deprecated, +but with replacements inplace. + + - ``access_ssdl`` is deprecated and replaced by the ``classification`` and ``rep_include`` arguments. + - ``classification='asset'`` is deprecated, use ``classification='restricted'`` instead. + - ``fmu_context='preprocessed'`` is deprecated, use argument ``preprocessed=True`` instead. + - ``vertical_domain`` now only supports string input with value either ``time`` / ``depth``. Using + a dictionary form to provide a reference together with the ``vertical_domain`` is deprecated, use + the ``domain_reference`` argument instead. + - ``workflow`` now only supports string input, example ``workflow='Structural modelling'``. + - ``content={'seismic': {'offset': '0-15'}}`` no longer works, use the key ``stacking_offset`` instead + of ``offset``. + + +Following are an example demonstrating several deprecated patterns: + +.. code-block:: python + + from fmu.dataio import ExportData + + ExportData( + fmu_context='preprocessed', # ⛔️ + access_ssdl={'access_level': 'asset', 'rep_include': True}, # ⛔️ + vertical_domain={'depth': 'msl'}, # ⛔️ + workflow={'reference': 'Structural modelling'}, # ⛔️ + ) + +Change to this instead 👇: + +.. code-block:: python + + from fmu.dataio import ExportData + + ExportData( + preprocessed=True, # ✅ + classification='restricted', # ✅ note the use of 'restricted' instead of 'asset' + rep_include=True, # ✅ + vertical_domain='depth', # ✅ + domain_reference='msl', # ✅ + workflow='Structural modelling', # ✅ + ) + + +The following arguments are deprecated, and have for a long time not had any effect. +They can safely be removed. + + - ``depth_reference`` is deprecated and was never used, use the new ``domain_reference`` argument instead. + - ``runpath`` is deprecated and picked up by ERT variables instead. + - ``reuse_metadata_rule`` never had more than one option, and is now deprecated. + - ``grid_model`` was intended to be used for linking a grid_property to a grid, this is now done through + the ``geometry`` argument instead. + - ``realization`` is deprecated, realization number is automatically picked up from environment variables. + - ``verbosity`` is deprecated, logging level should be set from client script in a standard manner instead. + + +The following arguments will be required if specific data types are exported. + + - ``geometry`` needs to be set if the object is of type ``xtgeo.GridProperty`` (see example + `here `_). + + +Additionally + + - ``fmu_context='case_symlink_realization'`` is no longer a valid argument value for ``fmu_context``. + If necessary to create symlinks from data stored at case level to the individual realizations, + use the ``SYMLINK`` forward model provided by ERT instead. + + +Changes to class variables +^^^^^^^^^^^^^^^^^^^^^^^^^^ +The following class variables are deprecated. For a while they've had no effect and can +safely be removed if present in the code. + + * ``ExportData.allow_forcefolder_absolute`` + * ``ExportData.createfolder`` + * ``ExportData.include_ertjobs`` + * ``ExportData.legacy_time_format`` + * ``ExportData.table_include_index`` + * ``ExportData.verifyfolder`` + + +.. code-block:: python + + from fmu.dataio import ExportData + + surface = xtgeo.surface_from_file('mysurf.gri') + + exd = ExportData( + config=CFG, + content='depth', + tagname='DS_final', + ) + exd.legacy_time_format = True # ⛔️ no longer allowed, simply remove the line! + exd.export(surface) + + +Providing arguments through export() / generate_metadata() +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +It is no longer possible to enter arguments inside the ``export()`` and ``generate_metadata()`` methods +to update the ExportData instance after creation. These methods now only accepts the object to export. +To get your code running simply move your arguments from the methods up to the initialisation of the +ExportData instance, or if necessary create a new instance of the ExportData class. + + +Example with deprecated pattern: + +.. code-block:: python + + from fmu.dataio import ExportData + + surface = xtgeo.surface_from_file('mysurf.gri') + + exd = ExportData(config=CFG) + exd.export( + surface, + content='depth', # ⛔️ no longer allowed! + tagname='DS_final' # ⛔️ no longer allowed! + ) + +Change to this instead 👇: + +.. code-block:: python + + from fmu.dataio import ExportData + + surface = xtgeo.surface_from_file('mysurf.gri') + + exd = ExportData( + config=CFG, + content='depth', # ✅ + tagname='DS_final', # ✅ + ) + exd.export(surface) + +Note if you have a loop it might be necessary to move the creation of the +ExportData instance inside the loop. Example below: + +.. code-block:: python + + from fmu.dataio import ExportData + + SURFACE_FOLDER = 'TS_final' + SURFACES = ['TopVolantis', 'TopVolon'] + + def export_surfaces(): + + exd = ExportData( + config=CFG, + content='time', + tagname=SURFACE_FOLDER, + ) + + for surf_name in SURFACES: + surface = xtgeo.surface_from_roxar(project, surf_name, SURFACE_FOLDER) + exd.export(surface, name=surfname) # ⛔️ no longer allowed! + + +Change to this instead 👇: + +.. code-block:: python + + from fmu.dataio import ExportData + + SURFACE_FOLDER = 'TS_final' + SURFACES = ['TopVolantis', 'TopVolon'] + + def export_surfaces(): + + for surf_name in SURFACES: + surface = xtgeo.surface_from_roxar(project, surf_name, SURFACE_FOLDER) + + exd = ExportData( + config=CFG, + content='time', + tagname=SURFACE_FOLDER, + name=surfname, + ) + exd.export(surface) + + +Additionally + + - The ``return_symlink`` argument to ``export()`` is deprecated. It is redundant and can be removed. + + +Providing settings through environment +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +It was previously possible to have a yml-file specifying global input arguments to +the ``ExportData`` class, and have an environment variable ``FMU_DATAIO_CONFIG`` pointing +to that file. This is no longer possible and it will have no effect if provided. + + +Using ExportData to re-export preprocessed data +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Using the ``ExportData`` class for re-exporting preprocessed data is deprecated. Use the dedicated +``ExportPreprocessedData`` class instead. Main difference being that the config is no longer needed +as input argument, and redundant arguments are no longer accepted. + + +Exaple using ``ExportData`` to re-export preprocessed data: + +.. code-block:: python + + from fmu.dataio import ExportData + from fmu.config import utilities as utils + + config = utils.yaml_load('../../fmuconfig/output/global_variables.yml') + + preprocessed_seismic_cube = 'share/preprocessed/cubes/mycube.segy' + + exd = ExportData( + config=config, + is_observation=True, + casepath='/scratch/fmu/user/mycase', + ) + exd.export(preprocessed_seismic_cube) + + +Exaple using ``ExportPreprocessedData`` to re-export preprocessed data: + +.. code-block:: python + + from fmu.dataio import ExportPreprocessedData + + preprocessed_seismic_cube = 'share/preprocessed/cubes/mycube.segy' + + exd = ExportPreprocessedData( + is_observation=True, + casepath='/scratch/fmu/user/mycase', + ) + exd.export(preprocessed_seismic_cube) + +.. note:: + Preprocessed data refers to data that have previously been exported with the ``ExportData`` class, + i.e. it contains metadata and are stored in a ``share/preprocessed/`` folder typically on the project disk. + + +Changes affecting the global_variables.yml +------------------------------------------ +The ``access.ssdl`` block is deprecated, it is recommended to remove it entirely. Setting a global +classification for all your export jobs should now be done through the ``access.classification`` field +instead. Furthermore, setting a global ``rep_include`` value for all exports is no longer supported. +Instead, you must set it on a per-object basis using the ``rep_include`` argument in the ``ExportData`` instance. + + +Example of an old set-up: + +.. code-block:: yaml + + global: + access: + asset: + name: Drogon + ssdl: + access_level: internal # ⛔️ no longer allowed + rep_include: true # ⛔️ no longer in use, simply remove the line! + + +Example of a new set-up: + +.. code-block:: yaml + + global: + access: + asset: + name: Drogon + classification: internal # ✅ Correct way of entering security classification + +.. note:: + If the config contains both ``access.ssdl.access_level`` (deprecated) and ``access.classification``. + The value from ``access.classification`` will be used. + + + +AggregatedData +-------------- +Changes to input arguments + - ``verbosity`` is deprecated, logging level should be set from client script in a standard manner instead. + +Changes to method arguments + - The ``skip_null`` argument to ``generate_metadata()`` is deprecated. It is redundant and can be removed. + diff --git a/docs/src/index.rst b/docs/src/index.rst index 05773b785..90d6bd473 100644 --- a/docs/src/index.rst +++ b/docs/src/index.rst @@ -41,5 +41,6 @@ post-processing services, new and improved cloud-only version of Webviz and much preparations examples rms_oneliners + dataio_3_migration apiref/modules datamodel/index diff --git a/pyproject.toml b/pyproject.toml index 4fa24577e..bdf8a0e42 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -53,7 +53,7 @@ dev = [ "hypothesis", "mypy", "pandas-stubs", - "pyarrow-stubs", + "pyarrow-stubs==10.0.1.9", "pydocstyle", "pytest-cov", "pytest-mock", diff --git a/schema/definitions/0.8.0/schema/fmu_results.json b/schema/definitions/0.8.0/schema/fmu_results.json index ddb0f7361..2fe69dcfe 100644 --- a/schema/definitions/0.8.0/schema/fmu_results.json +++ b/schema/definitions/0.8.0/schema/fmu_results.json @@ -951,6 +951,44 @@ "title": "DomainReference", "type": "string" }, + "Ert": { + "description": "The ``fmu.ert`` block contains information about the current ert run.", + "properties": { + "experiment": { + "anyOf": [ + { + "$ref": "#/$defs/Experiment" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "title": "Ert", + "type": "object" + }, + "Experiment": { + "description": "The ``fmu.ert.experiment`` block contains information about\nthe current ert experiment run.", + "properties": { + "id": { + "anyOf": [ + { + "format": "uuid", + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "title": "Id" + } + }, + "title": "Experiment", + "type": "object" + }, "FMU": { "dependencies": { "aggregation": { @@ -987,6 +1025,17 @@ "context": { "$ref": "#/$defs/Context" }, + "ert": { + "anyOf": [ + { + "$ref": "#/$defs/Ert" + }, + { + "type": "null" + } + ], + "default": null + }, "iteration": { "anyOf": [ { diff --git a/src/fmu/dataio/_metadata.py b/src/fmu/dataio/_metadata.py index bb390c972..58364c558 100644 --- a/src/fmu/dataio/_metadata.py +++ b/src/fmu/dataio/_metadata.py @@ -11,7 +11,7 @@ from ._definitions import SCHEMA, SOURCE, VERSION from ._logging import null_logger -from ._model import fields, internal +from ._model import fields, schema from .exceptions import InvalidMetadataError from .providers._filedata import FileDataProvider from .providers.objectdata._provider import objectdata_provider_factory @@ -42,7 +42,7 @@ def _get_meta_filedata( ).get_metadata() -def _get_meta_fmu(fmudata: FmuProvider) -> internal.FMUClassMetaData | None: +def _get_meta_fmu(fmudata: FmuProvider) -> schema.InternalFMU | None: try: return fmudata.get_metadata() except InvalidMetadataError: @@ -77,7 +77,7 @@ def generate_export_metadata( dataio: ExportData, fmudata: FmuProvider | None = None, compute_md5: bool = True, -) -> internal.DataClassMeta: +) -> schema.InternalObjectMetadata: """ Main function to generate the full metadata @@ -108,7 +108,7 @@ def generate_export_metadata( objdata = objectdata_provider_factory(obj, dataio) masterdata = dataio.config.get("masterdata") - return internal.DataClassMeta( + return schema.InternalObjectMetadata( schema_=TypeAdapter(AnyHttpUrl).validate_strings(SCHEMA), # type: ignore[call-arg] version=VERSION, source=SOURCE, diff --git a/src/fmu/dataio/_model/fields.py b/src/fmu/dataio/_model/fields.py index fd6795fc8..9770729be 100644 --- a/src/fmu/dataio/_model/fields.py +++ b/src/fmu/dataio/_model/fields.py @@ -208,6 +208,22 @@ class Case(BaseModel): """A free-text description of this case.""" +class Ert(BaseModel): + """The ``fmu.ert`` block contains information about the current ert run.""" + + experiment: Optional[Experiment] = Field(default=None) + """Reference to the ert experiment. + See :class:`Experiment`.""" + + +class Experiment(BaseModel): + """The ``fmu.ert.experiment`` block contains information about + the current ert experiment run.""" + + id: Optional[UUID] = Field(default=None) + """The unique identifier of this ert experiment run.""" + + class Iteration(BaseModel): """ The ``fmu.iteration`` block contains information about the iteration this data @@ -581,6 +597,10 @@ class FMU(FMUBase): """The ``fmu.realization`` block contains information about the realization this data object belongs to. See :class:`Realization`.""" + ert: Optional[Ert] = Field(default=None) + """The ``fmu.ert`` block contains information about the current ert run + See :class:`Ert`.""" + @model_validator(mode="before") @classmethod def _dependencies_aggregation_realization(cls, values: Dict) -> Dict: diff --git a/src/fmu/dataio/_model/internal.py b/src/fmu/dataio/_model/schema.py similarity index 89% rename from src/fmu/dataio/_model/internal.py rename to src/fmu/dataio/_model/schema.py index 649d54f4a..b95ec2c68 100644 --- a/src/fmu/dataio/_model/internal.py +++ b/src/fmu/dataio/_model/schema.py @@ -1,5 +1,5 @@ """ -This module, `datastructure._internal`, contains internal data structures that +This module, `_model.schema`, contains internal data structures that are designed to depend on external modules, but not the other way around. This design ensures modularity and flexibility, allowing external modules to be potentially separated into their own repositories without dependencies @@ -108,21 +108,16 @@ class JsonSchemaMetadata(BaseModel, populate_by_name=True): source: str = Field(default=SOURCE) -class FMUModelCase(BaseModel): - model: fields.Model - case: fields.Case - - class Context(BaseModel, use_enum_values=True): stage: enums.FMUContext # Remove the two models below when content is required as input. -class UnsetContent(data.Data): +class InternalUnsetData(data.Data): content: Literal["unset"] # type: ignore @model_validator(mode="after") - def _deprecation_warning(self) -> UnsetContent: + def _deprecation_warning(self) -> InternalUnsetData: valid_contents = [m.value for m in enums.Content] warnings.warn( "The is not provided which will produce invalid metadata. " @@ -134,18 +129,18 @@ def _deprecation_warning(self) -> UnsetContent: return self -class UnsetAnyContent(data.AnyData): - root: UnsetContent # type: ignore +class InternalAnyData(data.AnyData): + root: InternalUnsetData # type: ignore -class FMUClassMetaData(fields.FMU): +class InternalFMU(fields.FMU): # This class is identical to the one used in the schema # exept for more fmu context values beeing allowed internally context: Context # type: ignore -class DataClassMeta(JsonSchemaMetadata): - # TODO: aim to use meta.FMUDataClassMeta as base +class InternalObjectMetadata(JsonSchemaMetadata): + # TODO: aim to use root.ObjectMetadata as base # class and disallow creating invalid metadata. class_: Literal[ enums.FMUClass.surface, @@ -158,20 +153,20 @@ class DataClassMeta(JsonSchemaMetadata): enums.FMUClass.points, enums.FMUClass.dictionary, ] = Field(alias="class") - fmu: Optional[FMUClassMetaData] + fmu: Optional[InternalFMU] masterdata: Optional[fields.Masterdata] access: Optional[fields.SsdlAccess] - data: Union[data.AnyData, UnsetAnyContent] + data: Union[data.AnyData, InternalAnyData] file: fields.File display: fields.Display tracklog: fields.Tracklog preprocessed: Optional[bool] = Field(alias="_preprocessed", default=None) -class CaseSchema(JsonSchemaMetadata): +class InternalCaseMetadata(JsonSchemaMetadata): class_: Literal["case"] = Field(alias="class", default="case") masterdata: fields.Masterdata access: fields.Access - fmu: FMUModelCase + fmu: fields.FMUBase description: Optional[List[str]] = Field(default=None) tracklog: fields.Tracklog diff --git a/src/fmu/dataio/aggregation.py b/src/fmu/dataio/aggregation.py index f853cae5a..56bca216b 100644 --- a/src/fmu/dataio/aggregation.py +++ b/src/fmu/dataio/aggregation.py @@ -12,7 +12,7 @@ from . import _utils, dataio, types from ._logging import null_logger -from ._model import internal +from ._model import schema from ._model.enums import FMUContext from .exceptions import InvalidMetadataError from .providers.objectdata._provider import objectdata_provider_factory @@ -65,7 +65,7 @@ class AggregatedData: tagname: str = "" verbosity: str = "DEPRECATED" # keep for while - _metadata: internal.DataClassMeta = field(init=False) + _metadata: schema.InternalObjectMetadata = field(init=False) _metafile: Path = field(default_factory=Path, init=False) def __post_init__(self) -> None: @@ -274,7 +274,7 @@ def _set_metadata( template["data"]["bbox"] = bbox try: - self._metadata = internal.DataClassMeta.model_validate(template) + self._metadata = schema.InternalObjectMetadata.model_validate(template) except ValidationError as err: raise InvalidMetadataError( f"The existing metadata for the aggregated data is invalid. " diff --git a/src/fmu/dataio/case.py b/src/fmu/dataio/case.py index bee65f075..5251be398 100644 --- a/src/fmu/dataio/case.py +++ b/src/fmu/dataio/case.py @@ -13,7 +13,7 @@ from . import _utils from ._logging import null_logger -from ._model import global_configuration, internal +from ._model import global_configuration, schema from ._model.fields import Access, Case, Masterdata, Model, User logger: Final = null_logger(__name__) @@ -118,10 +118,10 @@ def generate_metadata(self) -> dict: warnings.warn(exists_warning, UserWarning) return {} - self._metadata = internal.CaseSchema( + self._metadata = schema.InternalCaseMetadata( masterdata=Masterdata.model_validate(self.config["masterdata"]), access=Access.model_validate(self.config["access"]), - fmu=internal.FMUModelCase( + fmu=fields.FMUBase( model=Model.model_validate( self.config["model"], ), diff --git a/src/fmu/dataio/dataio.py b/src/fmu/dataio/dataio.py index 7f9d51f12..915e069b9 100644 --- a/src/fmu/dataio/dataio.py +++ b/src/fmu/dataio/dataio.py @@ -400,7 +400,7 @@ class ExportData: unit: Optional[str] = "" verbosity: str = "DEPRECATED" # remove in version 2 vertical_domain: Optional[Union[str, dict]] = None # dict input is deprecated - workflow: Optional[Union[str, Dict[str, str]]] = None + workflow: Optional[Union[str, Dict[str, str]]] = None # dict input is deprecated table_index: Optional[list] = None # some keys that are modified version of input, prepended with _use diff --git a/src/fmu/dataio/preprocessed.py b/src/fmu/dataio/preprocessed.py index 2a3909f17..b1151d085 100644 --- a/src/fmu/dataio/preprocessed.py +++ b/src/fmu/dataio/preprocessed.py @@ -10,7 +10,7 @@ from pydantic import ValidationError from ._logging import null_logger -from ._model import enums, internal +from ._model import enums, schema from ._model.enums import FMUContext from ._model.fields import File from ._utils import export_metadata_file, md5sum @@ -186,7 +186,9 @@ def _get_updated_metadata(self, meta_existing: dict, objfile: Path) -> dict: try: # TODO: Would like to use meta.Root.model_validate() here # but then the '$schema' field is dropped from the meta_existing - validated_metadata = internal.DataClassMeta.model_validate(meta_existing) + validated_metadata = schema.InternalObjectMetadata.model_validate( + meta_existing + ) validated_metadata.tracklog.extend(enums.TrackLogEventType.merged) return validated_metadata.model_dump( mode="json", exclude_none=True, by_alias=True diff --git a/src/fmu/dataio/providers/_fmu.py b/src/fmu/dataio/providers/_fmu.py index 979d85bce..460cf1607 100644 --- a/src/fmu/dataio/providers/_fmu.py +++ b/src/fmu/dataio/providers/_fmu.py @@ -29,6 +29,7 @@ from __future__ import annotations import os +import uuid from dataclasses import dataclass, field from enum import Enum, auto from pathlib import Path @@ -38,7 +39,7 @@ from fmu.config import utilities as ut from fmu.dataio import _utils from fmu.dataio._logging import null_logger -from fmu.dataio._model import fields, internal +from fmu.dataio._model import fields, schema from fmu.dataio._model.enums import FMUContext from fmu.dataio.exceptions import InvalidMetadataError @@ -164,7 +165,7 @@ def get_runpath(self) -> Path | None: """Return runpath for a FMU run.""" return self._runpath - def get_metadata(self) -> internal.FMUClassMetaData: + def get_metadata(self) -> schema.InternalFMU: """Construct the metadata FMU block for an ERT forward job.""" logger.debug("Generate ERT metadata...") @@ -174,23 +175,25 @@ def get_metadata(self) -> internal.FMUClassMetaData: case_meta = self._get_case_meta() if self.fmu_context != FMUContext.realization: - return internal.FMUClassMetaData( + return schema.InternalFMU( case=case_meta.fmu.case, context=self._get_fmucontext_meta(), model=self._get_fmumodel_meta() if self.model else case_meta.fmu.model, workflow=self._get_workflow_meta() if self.workflow else None, + ert=self._get_ert_meta(), ) iter_uuid, real_uuid = self._get_iteration_and_real_uuid( case_meta.fmu.case.uuid ) - return internal.FMUClassMetaData( + return schema.InternalFMU( case=case_meta.fmu.case, context=self._get_fmucontext_meta(), model=self._get_fmumodel_meta() if self.model else case_meta.fmu.model, workflow=self._get_workflow_meta() if self.workflow else None, iteration=self._get_iteration_meta(iter_uuid), realization=self._get_realization_meta(real_uuid), + ert=self._get_ert_meta(), ) @staticmethod @@ -198,6 +201,16 @@ def _get_runpath_from_env() -> Path | None: """get runpath as an absolute path if detected from the enviroment""" return Path(runpath).resolve() if (runpath := FmuEnv.RUNPATH.value) else None + @staticmethod + def _get_ert_meta() -> fields.Ert: + return fields.Ert( + experiment=fields.Experiment( + id=uuid.UUID(FmuEnv.EXPERIMENT_ID.value) + if FmuEnv.EXPERIMENT_ID.value + else None + ) + ) + def _validate_and_establish_casepath(self) -> Path | None: """If casepath is not given, then try update _casepath (if in realization). @@ -249,7 +262,7 @@ def _get_restart_data_uuid(self) -> UUID | None: ) return None - restart_metadata = internal.CaseSchema.model_validate( + restart_metadata = schema.InternalCaseMetadata.model_validate( ut.yaml_load(restart_case_metafile, loader="standard") ) return _utils.uuid_from_string( @@ -275,12 +288,12 @@ def _get_iteration_and_real_uuid(self, case_uuid: UUID) -> tuple[UUID, UUID]: real_uuid = _utils.uuid_from_string(f"{case_uuid}{iter_uuid}{self._real_id}") return iter_uuid, real_uuid - def _get_case_meta(self) -> internal.CaseSchema: + def _get_case_meta(self) -> schema.InternalCaseMetadata: """Parse and validate the CASE metadata.""" logger.debug("Loading case metadata file and return pydantic case model") assert self._casepath is not None case_metafile = self._casepath / ERT_RELATIVE_CASE_METADATA_FILE - return internal.CaseSchema.model_validate( + return schema.InternalCaseMetadata.model_validate( ut.yaml_load(case_metafile, loader="standard") ) @@ -302,8 +315,8 @@ def _get_iteration_meta(self, iter_uuid: UUID) -> fields.Iteration: else None, ) - def _get_fmucontext_meta(self) -> internal.Context: - return internal.Context(stage=self.fmu_context) + def _get_fmucontext_meta(self) -> schema.Context: + return schema.Context(stage=self.fmu_context) def _get_fmumodel_meta(self) -> fields.Model: return fields.Model.model_validate(self.model) diff --git a/src/fmu/dataio/providers/objectdata/_base.py b/src/fmu/dataio/providers/objectdata/_base.py index 5d9b48a9a..4bf6c09af 100644 --- a/src/fmu/dataio/providers/objectdata/_base.py +++ b/src/fmu/dataio/providers/objectdata/_base.py @@ -15,7 +15,7 @@ Timestamp, ) from fmu.dataio._model.enums import Content -from fmu.dataio._model.internal import AllowedContent, UnsetAnyContent +from fmu.dataio._model.schema import AllowedContent, InternalAnyData from fmu.dataio._utils import generate_description from fmu.dataio.providers._base import Provider @@ -65,7 +65,7 @@ class ObjectDataProvider(Provider): # result properties; the most important is metadata which IS the 'data' part in # the resulting metadata. But other variables needed later are also given # as instance properties in addition (for simplicity in other classes/functions) - _metadata: AnyData | UnsetAnyContent | None = field(default=None) + _metadata: AnyData | InternalAnyData | None = field(default=None) name: str = field(default="") time0: datetime | None = field(default=None) time1: datetime | None = field(default=None) @@ -118,7 +118,7 @@ def __post_init__(self) -> None: metadata["description"] = generate_description(self.dataio.description) self._metadata = ( - UnsetAnyContent.model_validate(metadata) + InternalAnyData.model_validate(metadata) if metadata["content"] == "unset" else AnyData.model_validate(metadata) ) @@ -166,7 +166,7 @@ def get_bbox(self) -> BoundingBox2D | BoundingBox3D | None: def get_spec(self) -> AnySpecification | None: raise NotImplementedError - def get_metadata(self) -> AnyData | UnsetAnyContent: + def get_metadata(self) -> AnyData | InternalAnyData: assert self._metadata is not None return self._metadata diff --git a/tests/test_units/test_dataio.py b/tests/test_units/test_dataio.py index 75b71e13b..de171dfa8 100644 --- a/tests/test_units/test_dataio.py +++ b/tests/test_units/test_dataio.py @@ -1000,3 +1000,33 @@ def test_append_to_alias_list(globalconfig2, regsurf): # also check that the name input was added to the alias list assert name not in strat["alias"] assert name in meta["data"]["alias"] + + +def test_ert_experiment_id_present_in_generated_metadata( + fmurun_w_casemetadata, monkeypatch, globalconfig1, regsurf +): + """Test that the ert experiment id has been set correctly + in the generated metadata""" + + monkeypatch.chdir(fmurun_w_casemetadata) + + edata = ExportData(config=globalconfig1, content="depth") + meta = edata.generate_metadata(regsurf) + expected_id = "6a8e1e0f-9315-46bb-9648-8de87151f4c7" + assert meta["fmu"]["ert"]["experiment"]["id"] == expected_id + + +def test_ert_experiment_id_present_in_exported_metadata( + fmurun_w_casemetadata, monkeypatch, globalconfig1, regsurf +): + """Test that the ert experiment id has been set correctly + in the exported metadata""" + + monkeypatch.chdir(fmurun_w_casemetadata) + + edata = ExportData(config=globalconfig1, content="depth") + out = Path(edata.export(regsurf)) + with open(out.parent / f".{out.name}.yml", encoding="utf-8") as f: + export_meta = yaml.safe_load(f) + expected_id = "6a8e1e0f-9315-46bb-9648-8de87151f4c7" + assert export_meta["fmu"]["ert"]["experiment"]["id"] == expected_id diff --git a/tests/test_units/test_ert_context.py b/tests/test_units/test_ert_context.py index cc39d2698..db0327a29 100644 --- a/tests/test_units/test_ert_context.py +++ b/tests/test_units/test_ert_context.py @@ -37,7 +37,7 @@ def test_regsurf_generate_metadata(fmurun_w_casemetadata, rmsglobalconfig, regsu def test_incl_jobs_warning(rmsglobalconfig): - """Check that usning the deprecated class variable include_ertjobs gives warning.""" + """Check that using the deprecated class variable include_ertjobs gives warning.""" dataio.ExportData.include_ertjobs = True