diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a5bea1a3c8..e9c80469aa 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -61,7 +61,7 @@ repos: name: mypy with Python 3.8 files: src additional_dependencies: - ['numpy', 'types-tqdm', 'click', 'types-jsonpatch', 'types-pyyaml', 'types-jsonschema', 'importlib_metadata', 'packaging'] + ['numpy', 'types-tqdm', 'click', 'types-jsonpatch', 'types-pyyaml', 'types-jsonschema', 'importlib_metadata', 'packaging', 'importlib_resources'] args: ["--python-version=3.8"] - <<: *mypy name: mypy with Python 3.11 diff --git a/pyproject.toml b/pyproject.toml index e2a17df696..19f4c7b012 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -46,6 +46,7 @@ classifiers = [ dependencies = [ "click>=8.0.0", # for console scripts "importlib_resources>=1.4.0; python_version < '3.9'", # for resources in schema + "typing_extensions; python_version < '3.11'", # for typing "jsonpatch>=1.15", "jsonschema>=4.15.0", # for utils "pyyaml>=5.1", # for parsing CLI equal-delimited options @@ -264,7 +265,6 @@ module = [ 'pyhf.modifiers.*', 'pyhf.exceptions.*', 'pyhf.parameters.*', - 'pyhf.schema.*', 'pyhf.writexml', 'pyhf.workspace', 'pyhf.patchset', diff --git a/src/pyhf/cli/cli.py b/src/pyhf/cli/cli.py index a1a486fe54..c0113ff08e 100644 --- a/src/pyhf/cli/cli.py +++ b/src/pyhf/cli/cli.py @@ -4,7 +4,7 @@ import click from pyhf import __version__ -from pyhf.cli import rootio, spec, infer, patchset, complete +from pyhf.cli import rootio, spec, infer, patchset, complete, upgrade from pyhf.contrib import cli as contrib from pyhf import utils @@ -56,3 +56,5 @@ def pyhf(): pyhf.add_command(complete.cli) pyhf.add_command(contrib.cli) + +pyhf.add_command(upgrade.cli) diff --git a/src/pyhf/cli/upgrade.py b/src/pyhf/cli/upgrade.py new file mode 100644 index 0000000000..6fc8ba163c --- /dev/null +++ b/src/pyhf/cli/upgrade.py @@ -0,0 +1,72 @@ +"""The pyhf upgrade CLI subcommand.""" +import logging + +import click +import json + +from pyhf.schema.upgrader import upgrade + +log = logging.getLogger(__name__) + + +@click.group(name='upgrade') +def cli(): + """Operations for upgrading specifications.""" + + +@cli.command() +@click.argument('workspace', default='-') +@click.option( + '--version', + help='The version to upgrade to', + default=None, +) +@click.option( + '--output-file', + help='The location of the output json file. If not specified, prints to screen.', + default=None, +) +def workspace(workspace, version, output_file): + """ + Upgrade a HistFactory JSON workspace. + """ + with click.open_file(workspace, 'r', encoding="utf-8") as specstream: + spec = json.load(specstream) + + ws = upgrade(to_version=version).workspace(spec) + + if output_file is None: + click.echo(json.dumps(ws, indent=4, sort_keys=True)) + else: + with open(output_file, 'w+', encoding="utf-8") as out_file: + json.dump(ws, out_file, indent=4, sort_keys=True) + log.debug(f"Written to {output_file:s}") + + +@cli.command() +@click.argument('patchset', default='-') +@click.option( + '--version', + help='The version to upgrade to', + default=None, +) +@click.option( + '--output-file', + help='The location of the output json file. If not specified, prints to screen.', + default=None, +) +def patchset(patchset, version, output_file): + """ + Upgrade a pyhf JSON PatchSet. + """ + with click.open_file(patchset, 'r', encoding="utf-8") as specstream: + spec = json.load(specstream) + + ps = upgrade(to_version=version).patchset(spec) + + if output_file is None: + click.echo(json.dumps(ps, indent=4, sort_keys=True)) + else: + with open(output_file, 'w+', encoding="utf-8") as out_file: + json.dump(ps, out_file, indent=4, sort_keys=True) + log.debug(f"Written to {output_file:s}") diff --git a/src/pyhf/readxml.py b/src/pyhf/readxml.py index a694dab292..cb45d40839 100644 --- a/src/pyhf/readxml.py +++ b/src/pyhf/readxml.py @@ -473,7 +473,7 @@ def parse( 'measurements': measurements, 'channels': channels, 'observations': observations, - 'version': schema.version, # type: ignore[typeddict-unknown-key] + 'version': schema.versions['workspace.json'], # type: ignore[attr-defined] } try: schema.validate(result, 'workspace.json') diff --git a/src/pyhf/schema/__init__.py b/src/pyhf/schema/__init__.py index 61bb01c78e..79c151b89b 100644 --- a/src/pyhf/schema/__init__.py +++ b/src/pyhf/schema/__init__.py @@ -1,25 +1,31 @@ """ See :class:`~pyhf.schema.Schema` for documentation. """ -import pathlib +from __future__ import annotations import sys +from typing import Any from pyhf.schema.loader import load_schema from pyhf.schema.validator import validate from pyhf.schema import variables +from pyhf.typing import Self, SchemaVersion, PathOrStr, Traversable +from pyhf.schema.upgrader import upgrade + +from pathlib import Path __all__ = [ "load_schema", "validate", "path", "version", + "upgrade", ] -def __dir__(): +def __dir__() -> list[str]: return __all__ -class Schema(sys.modules[__name__].__class__): +class Schema(sys.modules[__name__].__class__): # type: ignore[misc] """ A module-level wrapper around :mod:`pyhf.schema` which will provide additional functionality for interacting with schemas. @@ -61,7 +67,7 @@ class Schema(sys.modules[__name__].__class__): """ - def __call__(self, new_path: pathlib.Path): + def __call__(self, new_path: PathOrStr) -> Self: """ Change the local search path for finding schemas locally. @@ -71,15 +77,15 @@ def __call__(self, new_path: pathlib.Path): Returns: self (pyhf.schema.Schema): Returns itself (for contextlib management) """ - self.orig_path, variables.schemas = variables.schemas, new_path + self.orig_path, variables.schemas = variables.schemas, Path(new_path) self.orig_cache = dict(variables.SCHEMA_CACHE) variables.SCHEMA_CACHE.clear() return self - def __enter__(self): + def __enter__(self) -> None: pass - def __exit__(self, *args, **kwargs): + def __exit__(self, *args: Any, **kwargs: Any) -> None: """ Reset the local search path for finding schemas locally. @@ -90,14 +96,14 @@ def __exit__(self, *args, **kwargs): variables.SCHEMA_CACHE = self.orig_cache @property - def path(self): + def path(self) -> Traversable | Path: """ The local path for schemas. """ return variables.schemas @property - def version(self): + def versions(self) -> dict[str, SchemaVersion]: """ The default version used for finding schemas. """ diff --git a/src/pyhf/schema/loader.py b/src/pyhf/schema/loader.py index 920766c4dc..154a8b740f 100644 --- a/src/pyhf/schema/loader.py +++ b/src/pyhf/schema/loader.py @@ -3,6 +3,7 @@ import json import pyhf.exceptions from pyhf.schema import variables +from pyhf.typing import Schema # importlib.resources.as_file wasn't added until Python 3.9 # c.f. https://docs.python.org/3.9/library/importlib.html#importlib.resources.as_file @@ -12,7 +13,7 @@ import importlib_resources as resources -def load_schema(schema_id: str): +def load_schema(schema_id: str) -> Schema: """ Get a schema by relative path from cache, or load it into the cache and return. @@ -54,9 +55,3 @@ def load_schema(schema_id: str): schema = json.load(json_schema) variables.SCHEMA_CACHE[schema['$id']] = schema return variables.SCHEMA_CACHE[schema['$id']] - - -# pre-populate the cache to avoid network access -# on first validation in standard usage -# (not in pyhf.schema.variables to avoid circular imports) -load_schema(f'{variables.SCHEMA_VERSION}/defs.json') diff --git a/src/pyhf/schema/upgrader.py b/src/pyhf/schema/upgrader.py new file mode 100644 index 0000000000..4b7c459cc9 --- /dev/null +++ b/src/pyhf/schema/upgrader.py @@ -0,0 +1,61 @@ +from __future__ import annotations + +from pyhf.typing import Workspace, PatchSet, SchemaVersion, UpgradeProtocol +import copy + + +class Upgrade_1_0_1: + """ + Used for testing functionality of upgrade. + """ + + version: SchemaVersion = '1.0.1' + + @classmethod + def workspace(cls, spec: Workspace) -> Workspace: + """ + Upgrade the provided workspace specification. + + Args: + spec (dict): The specification to validate. + schema_name (str): The name of the schema to upgrade. + + Returns: + upgraded_spec (dict): Upgraded workspace specification. + + Raises: + pyhf.exceptions.InvalidSpecification: the specification is invalid + """ + + new_spec = copy.deepcopy(spec) + if spec['version'] == '1.0.0': + new_spec['version'] = cls.version + return new_spec + + @classmethod + def patchset(cls, spec: PatchSet) -> PatchSet: + """ + Upgrade the provided patchset specification. + + Args: + spec (dict): The specification to validate. + schema_name (str): The name of the schema to upgrade. + + Returns: + upgraded_spec (dict): Upgraded patchset specification. + + Raises: + pyhf.exceptions.InvalidSpecification: the specification is invalid + """ + + new_spec = copy.deepcopy(spec) + if spec['version'] == '1.0.0': + new_spec['version'] = cls.version + return new_spec + + +def upgrade(*, to_version: SchemaVersion | None = None) -> type[UpgradeProtocol]: + if to_version is None or to_version == '1.0.1': + return Upgrade_1_0_1 + + raise ValueError(f'{to_version} is not a valid version to upgrade to.') diff --git a/src/pyhf/schema/validator.py b/src/pyhf/schema/validator.py index 2540a3d002..24bde469f9 100644 --- a/src/pyhf/schema/validator.py +++ b/src/pyhf/schema/validator.py @@ -1,16 +1,28 @@ +from __future__ import annotations + import numbers from pathlib import Path -from typing import Mapping, Union - import jsonschema - +import logging import pyhf.exceptions from pyhf import tensor from pyhf.schema import variables from pyhf.schema.loader import load_schema +from pyhf.typing import Workspace, Model, Measurement, PatchSet +from typing import Any +import sys + +# importlib.resources.as_file wasn't added until Python 3.9 +# c.f. https://docs.python.org/3.9/library/importlib.html#importlib.resources.as_file +if sys.version_info >= (3, 9): + from importlib import resources +else: + import importlib_resources as resources +log = logging.getLogger(__name__) -def _is_array_or_tensor(checker, instance): + +def _is_array_or_tensor(checker: jsonschema.TypeChecker, instance: Any) -> bool: """ A helper function for allowing the validation of tensors as list types in schema validation. @@ -18,10 +30,12 @@ def _is_array_or_tensor(checker, instance): This will check for valid array types using any backends that have been loaded so far. """ - return isinstance(instance, (list, *tensor.array_types)) + return isinstance(instance, (list, *tensor.array_types)) # type: ignore[attr-defined] -def _is_number_or_tensor_subtype(checker, instance): +def _is_number_or_tensor_subtype( + checker: jsonschema.TypeChecker, instance: Any +) -> bool: """ A helper function for allowing the validation of tensor contents as number types in schema validation. @@ -31,16 +45,16 @@ def _is_number_or_tensor_subtype(checker, instance): is_number = jsonschema._types.is_number(checker, instance) if is_number: return True - return isinstance(instance, (numbers.Number, *tensor.array_subtypes)) + return isinstance(instance, (numbers.Number, *tensor.array_subtypes)) # type: ignore[attr-defined] def validate( - spec: Mapping, + spec: Workspace | Model | Measurement | PatchSet, schema_name: str, *, - version: Union[str, None] = None, + version: str | None = None, allow_tensors: bool = True, -): +) -> None: """ Validate the provided instance, ``spec``, against the schema associated with ``schema_name``. @@ -69,31 +83,47 @@ def validate( >>> """ - version = version or variables.SCHEMA_VERSION + latest_known_version = variables.SCHEMA_VERSION.get(schema_name) + + if latest_known_version is not None: + version = version or latest_known_version + if version != latest_known_version: + log.warning( + f"Specification requested version {version} but latest is {latest_known_version}. Upgrade your specification or downgrade pyhf." + ) + + if version is None: + msg = f'The version for {schema_name} is not set and could not be determined automatically as there is no default version specified for this schema. This could be due to using a schema that pyhf is not aware of, or a mistake.' + raise ValueError(msg) schema = load_schema(str(Path(version).joinpath(schema_name))) - # note: trailing slash needed for RefResolver to resolve correctly and by - # design, pathlib strips trailing slashes. See ref below: - # * https://bugs.python.org/issue21039 - # * https://github.com/python/cpython/issues/65238 - resolver = jsonschema.RefResolver( - base_uri=f"{Path(variables.schemas).joinpath(version).as_uri()}/", - referrer=schema_name, - store=variables.SCHEMA_CACHE, - ) - - Validator = jsonschema.Draft6Validator - - if allow_tensors: - type_checker = Validator.TYPE_CHECKER.redefine( - "array", _is_array_or_tensor - ).redefine("number", _is_number_or_tensor_subtype) - Validator = jsonschema.validators.extend(Validator, type_checker=type_checker) - - validator = Validator(schema, resolver=resolver, format_checker=None) - - try: - return validator.validate(spec) - except jsonschema.ValidationError as err: - raise pyhf.exceptions.InvalidSpecification(err, schema_name) + with resources.as_file(variables.schemas) as path: + # note: trailing slash needed for RefResolver to resolve correctly and by + # design, pathlib strips trailing slashes. See ref below: + # * https://bugs.python.org/issue21039 + # * https://github.com/python/cpython/issues/65238 + + # for type ignores below, see https://github.com/python-jsonschema/jsonschema/issues/997 + resolver = jsonschema.RefResolver( + base_uri=f"{path.joinpath(version).as_uri()}/", + referrer=schema_name, # type: ignore[arg-type] + store=variables.SCHEMA_CACHE, # type: ignore[arg-type] + ) + + Validator = jsonschema.Draft202012Validator + + if allow_tensors: + type_checker = Validator.TYPE_CHECKER.redefine( + "array", _is_array_or_tensor + ).redefine("number", _is_number_or_tensor_subtype) + Validator = jsonschema.validators.extend( + Validator, type_checker=type_checker + ) + + validator = Validator(schema, resolver=resolver, format_checker=None) + + try: + return validator.validate(spec) + except jsonschema.ValidationError as err: + raise pyhf.exceptions.InvalidSpecification(err, schema_name) # type: ignore[no-untyped-call] diff --git a/src/pyhf/schema/variables.py b/src/pyhf/schema/variables.py index 80c0a0dd06..fa7c8c61ad 100644 --- a/src/pyhf/schema/variables.py +++ b/src/pyhf/schema/variables.py @@ -1,4 +1,8 @@ +from __future__ import annotations import sys +from pyhf.typing import Schema, SchemaVersion, Traversable + +from pathlib import Path # importlib.resources.as_file wasn't added until Python 3.9 # c.f. https://docs.python.org/3.9/library/importlib.html#importlib.resources.as_file @@ -6,8 +10,12 @@ from importlib import resources else: import importlib_resources as resources -schemas = resources.files('pyhf') / "schemas" +schemas: Traversable | Path = resources.files('pyhf') / "schemas" -SCHEMA_CACHE = {} +SCHEMA_CACHE: dict[str, Schema] = {} SCHEMA_BASE = "https://scikit-hep.org/pyhf/schemas/" -SCHEMA_VERSION = '1.0.0' +SCHEMA_VERSION: dict[str, SchemaVersion] = { + 'model.json': '1.0.0', + 'workspace.json': '1.0.0', + 'patchset.json': '1.0.0', +} diff --git a/tests/test_schema/customschema/1.1.0/defs.json b/src/pyhf/schemas/1.1.0/defs.json similarity index 99% rename from tests/test_schema/customschema/1.1.0/defs.json rename to src/pyhf/schemas/1.1.0/defs.json index dd63e8d32a..02e316eb5c 100644 --- a/tests/test_schema/customschema/1.1.0/defs.json +++ b/src/pyhf/schemas/1.1.0/defs.json @@ -1,6 +1,6 @@ { - "$schema": "http://json-schema.org/draft-06/schema#", - "$id": "1.1.0/defs.json", + "$schema": "http://json-schema.org/draft/2020-12/schema#", + "$id": "https://scikit-hep.org/pyhf/schemas/1.1.0/defs.json", "definitions": { "workspace": { "type": "object", diff --git a/src/pyhf/schemas/1.1.0/jsonpatch.json b/src/pyhf/schemas/1.1.0/jsonpatch.json new file mode 100644 index 0000000000..93b7aba6b0 --- /dev/null +++ b/src/pyhf/schemas/1.1.0/jsonpatch.json @@ -0,0 +1,5 @@ +{ + "$schema": "http://json-schema.org/draft/2020-12/schema#", + "$id": "https://scikit-hep.org/pyhf/schemas/1.1.0/jsonpatch.json", + "$ref": "defs.json#/definitions/jsonpatch" +} diff --git a/src/pyhf/schemas/1.1.0/measurement.json b/src/pyhf/schemas/1.1.0/measurement.json new file mode 100644 index 0000000000..09db479667 --- /dev/null +++ b/src/pyhf/schemas/1.1.0/measurement.json @@ -0,0 +1,5 @@ +{ + "$schema": "http://json-schema.org/draft/2020-12/schema#", + "$id": "https://scikit-hep.org/pyhf/schemas/1.1.0/measurement.json", + "$ref": "defs.json#/definitions/measurement" +} diff --git a/src/pyhf/schemas/1.1.0/model.json b/src/pyhf/schemas/1.1.0/model.json new file mode 100644 index 0000000000..a3d6e6ae4a --- /dev/null +++ b/src/pyhf/schemas/1.1.0/model.json @@ -0,0 +1,5 @@ +{ + "$schema": "http://json-schema.org/draft/2020-12/schema#", + "$id": "https://scikit-hep.org/pyhf/schemas/1.1.0/model.json", + "$ref": "defs.json#/definitions/model" +} diff --git a/src/pyhf/schemas/1.1.0/patchset.json b/src/pyhf/schemas/1.1.0/patchset.json new file mode 100644 index 0000000000..f453981dad --- /dev/null +++ b/src/pyhf/schemas/1.1.0/patchset.json @@ -0,0 +1,5 @@ +{ + "$schema": "http://json-schema.org/draft/2020-12/schema#", + "$id": "https://scikit-hep.org/pyhf/schemas/1.1.0/patchset.json", + "$ref": "defs.json#/definitions/patchset" +} diff --git a/src/pyhf/schemas/1.1.0/workspace.json b/src/pyhf/schemas/1.1.0/workspace.json new file mode 100644 index 0000000000..22ee3e3736 --- /dev/null +++ b/src/pyhf/schemas/1.1.0/workspace.json @@ -0,0 +1,5 @@ +{ + "$schema": "http://json-schema.org/draft/2020-12/schema#", + "$id": "https://scikit-hep.org/pyhf/schemas/1.1.0/workspace.json", + "$ref": "defs.json#/definitions/workspace" +} diff --git a/src/pyhf/typing.py b/src/pyhf/typing.py index f012e3af22..0bea2a65ce 100644 --- a/src/pyhf/typing.py +++ b/src/pyhf/typing.py @@ -1,16 +1,29 @@ import os +import sys from typing import ( Any, Literal, + Mapping, MutableSequence, Protocol, Sequence, + SupportsFloat as Numeric, SupportsIndex, Tuple, TypedDict, Union, ) +if sys.version_info >= (3, 9): + from importlib.abc import Traversable +else: + from importlib_resources.abc import Traversable + +if sys.version_info >= (3, 11): + from typing import NotRequired, Self +else: + from typing_extensions import NotRequired, Self + __all__ = ( "PathOrStr", "ParameterBase", @@ -27,11 +40,15 @@ "Modifier", "Sample", "Channel", + "Model", "Observation", + "PatchSet", "Workspace", "Literal", - "TypedDict", "Protocol", + "Self", + "Traversable", + "TypedDict", ) @@ -41,6 +58,9 @@ Shape = Tuple[int, ...] ShapeLike = Union[SupportsIndex, Sequence[SupportsIndex]] +Schema = Mapping[str, Any] +SchemaVersion = Literal['1.0.1', '1.0.0'] + class ParameterBase(TypedDict, total=False): auxdata: Sequence[float] @@ -119,14 +139,11 @@ class LumiSys(TypedDict): ] -class SampleBase(TypedDict, total=False): - parameter_configs: Sequence[Parameter] - - -class Sample(SampleBase): +class Sample(TypedDict): name: str data: Sequence[float] modifiers: Sequence[Modifier] + parameter_configs: NotRequired[Sequence[Parameter]] class Channel(TypedDict): @@ -139,10 +156,39 @@ class Observation(TypedDict): data: Sequence[float] +class Model(TypedDict): + channels: Sequence[Channel] + parameters: NotRequired[Sequence[Parameter]] + + class Workspace(TypedDict): measurements: Sequence[Measurement] channels: Sequence[Channel] observations: Sequence[Observation] + version: SchemaVersion + + +class PatchMetadata(TypedDict): + name: str + values: Sequence[Union[Numeric, str]] + + +class Patch(TypedDict): + patch: Sequence[Mapping[str, Any]] + metadata: PatchMetadata + + +class PatchSetMetadata(TypedDict): + digests: Mapping[str, str] + labels: Sequence[str] + description: str + references: Mapping[str, str] + + +class PatchSet(TypedDict): + patches: Sequence[Patch] + metadata: PatchSetMetadata + version: SchemaVersion class TensorBackend(Protocol): @@ -164,3 +210,15 @@ def sample(self, sample_shape: Shape) -> Any: def log_prob(self, value: Any) -> Any: ... + + +class UpgradeProtocol(Protocol): + version: SchemaVersion + + @classmethod + def workspace(cls, spec: Workspace) -> Workspace: + ... + + @classmethod + def patchset(cls, spec: PatchSet) -> PatchSet: + ... diff --git a/src/pyhf/workspace.py b/src/pyhf/workspace.py index 7ce0bc486d..7c5eed77e0 100644 --- a/src/pyhf/workspace.py +++ b/src/pyhf/workspace.py @@ -819,7 +819,7 @@ def build(cls, model, data, name='measurement', validate: bool = True): """ workspace = copy.deepcopy(dict(channels=model.spec['channels'])) - workspace['version'] = schema.version + workspace['version'] = schema.versions['workspace.json'] workspace['measurements'] = [ { 'name': name, diff --git a/tests/test_public_api_repr.py b/tests/test_public_api_repr.py index f1482d8921..7bd4141276 100644 --- a/tests/test_public_api_repr.py +++ b/tests/test_public_api_repr.py @@ -260,6 +260,7 @@ def test_schema_public_api(): assert dir(pyhf.schema) == [ "load_schema", "path", + "upgrade", "validate", "version", ] diff --git a/tests/test_schema.py b/tests/test_schema.py index 384fcf0276..d41cfc16e1 100644 --- a/tests/test_schema.py +++ b/tests/test_schema.py @@ -8,12 +8,25 @@ import pyhf -@pytest.mark.parametrize('version', ['1.0.0']) @pytest.mark.parametrize( - 'schema', ['defs.json', 'measurement.json', 'model.json', 'workspace.json'] + 'schema', + [ + "1.0.0/model.json", + "1.0.0/workspace.json", + "1.0.0/defs.json", + "1.0.0/jsonpatch.json", + "1.0.0/measurement.json", + "1.0.0/patchset.json", + "1.1.0/model.json", + "1.1.0/workspace.json", + "1.1.0/defs.json", + "1.1.0/jsonpatch.json", + "1.1.0/measurement.json", + "1.1.0/patchset.json", + ], ) -def test_get_schema(version, schema): - assert pyhf.schema.load_schema(f'{version}/{schema}') +def test_get_schema(schema): + assert pyhf.schema.load_schema(schema) def test_load_missing_schema(): @@ -22,9 +35,9 @@ def test_load_missing_schema(): def test_schema_attributes(): - assert hasattr(pyhf.schema, 'version') + assert hasattr(pyhf.schema, 'versions') assert hasattr(pyhf.schema, 'path') - assert pyhf.schema.version + assert pyhf.schema.versions assert pyhf.schema.path @@ -61,7 +74,11 @@ def test_schema_changeable(datadir, monkeypatch, self_restoring_schema_globals): assert len(pyhf.schema.variables.SCHEMA_CACHE) == 0 with open(new_path / "custom.json", encoding="utf-8") as spec_file: assert pyhf.Workspace(json.load(spec_file)) - assert len(pyhf.schema.variables.SCHEMA_CACHE) == 1 + assert len(pyhf.schema.variables.SCHEMA_CACHE) == 2 + assert list(pyhf.schema.variables.SCHEMA_CACHE) == [ + 'https://scikit-hep.org/pyhf/schemas/0.1.0/workspace.json', + 'https://scikit-hep.org/pyhf/schemas/0.1.0/defs.json', + ] def test_schema_changeable_context(datadir, monkeypatch, self_restoring_schema_globals): @@ -79,7 +96,11 @@ def test_schema_changeable_context(datadir, monkeypatch, self_restoring_schema_g assert len(pyhf.schema.variables.SCHEMA_CACHE) == 0 with open(new_path / "custom.json", encoding="utf-8") as spec_file: assert pyhf.Workspace(json.load(spec_file)) - assert len(pyhf.schema.variables.SCHEMA_CACHE) == 1 + assert len(pyhf.schema.variables.SCHEMA_CACHE) == 2 + assert list(pyhf.schema.variables.SCHEMA_CACHE) == [ + 'https://scikit-hep.org/pyhf/schemas/0.1.0/workspace.json', + 'https://scikit-hep.org/pyhf/schemas/0.1.0/defs.json', + ] assert old_path == pyhf.schema.path assert old_cache == pyhf.schema.variables.SCHEMA_CACHE @@ -543,8 +564,9 @@ def test_normsys_additional_properties(): ], ids=['add', 'replace', 'test', 'remove', 'move', 'copy'], ) -def test_jsonpatch(patch): - pyhf.schema.validate([patch], 'jsonpatch.json') +@pytest.mark.parametrize('version', ['1.0.0']) +def test_jsonpatch(patch, version): + pyhf.schema.validate([patch], 'jsonpatch.json', version=version) @pytest.mark.parametrize( @@ -568,16 +590,18 @@ def test_jsonpatch(patch): 'move_nopath', ], ) -def test_jsonpatch_fail(patch): +@pytest.mark.parametrize('version', ['1.0.0']) +def test_jsonpatch_fail(patch, version): with pytest.raises(pyhf.exceptions.InvalidSpecification): - pyhf.schema.validate([patch], 'jsonpatch.json') + pyhf.schema.validate([patch], 'jsonpatch.json', version=version) @pytest.mark.parametrize('patchset_file', ['patchset_good.json']) -def test_patchset(datadir, patchset_file): +@pytest.mark.parametrize('version', ['1.0.0']) +def test_patchset(datadir, patchset_file, version): with open(datadir.joinpath(patchset_file), encoding="utf-8") as patch_file: patchset = json.load(patch_file) - pyhf.schema.validate(patchset, 'patchset.json') + pyhf.schema.validate(patchset, 'patchset.json', version=version) @pytest.mark.parametrize( diff --git a/tests/test_schema/customschema/0.1.0/defs.json b/tests/test_schema/customschema/0.1.0/defs.json new file mode 100644 index 0000000000..373c019fd9 --- /dev/null +++ b/tests/test_schema/customschema/0.1.0/defs.json @@ -0,0 +1,315 @@ +{ + "$schema": "http://json-schema.org/draft-06/schema#", + "$id": "https://scikit-hep.org/pyhf/schemas/0.1.0/defs.json", + "definitions": { + "workspace": { + "type": "object", + "properties": { + "channels": { "type": "array", "items": {"$ref": "#/definitions/channel"}, "minItems": 1 }, + "measurements": { "type": "array", "items": {"$ref": "#/definitions/measurement"}, "minItems": 1 }, + "observations": { "type": "array", "items": {"$ref": "#/definitions/observation" }, "minItems": 1 }, + "version": { "const": "0.1.0" } + }, + "additionalProperties": false, + "required": ["channels", "measurements", "observations", "version"] + }, + "model": { + "type": "object", + "properties": { + "channels": { "type": "array", "items": {"$ref": "#/definitions/channel"}, "minItems": 1 }, + "parameters": { "type": "array", "items": {"$ref": "#/definitions/parameter"} } + }, + "additionalProperties": false, + "required": ["channels"] + }, + "observation": { + "type": "object", + "properties": { + "name": { "type": "string" }, + "data": { "type": "array", "items": {"type": "number"}, "minItems": 1 } + }, + "required": ["name", "data"], + "additionalProperties": false + }, + "measurement": { + "type": "object", + "properties": { + "name": { "type": "string" }, + "config": { "$ref": "#/definitions/config" } + }, + "required": ["name", "config"], + "additionalProperties": false + }, + "config": { + "type": "object", + "properties": { + "poi": { "type" : "string" }, + "parameters": { "type": "array", "items": {"$ref": "#/definitions/parameter"} } + }, + "required": ["poi", "parameters"], + "additionalProperties": false + }, + "parameter": { + "type": "object", + "properties": { + "name": { "type": "string" }, + "inits": { "type": "array", "items": {"type": "number"}, "minItems": 1 }, + "bounds": { "type": "array", "items": {"type": "array", "items": {"type": "number", "minItems": 2, "maxItems": 2}}, "minItems": 1 }, + "auxdata": { "type": "array", "items": {"type": "number"}, "minItems": 1 }, + "factors": { "type": "array", "items": {"type": "number"}, "minItems": 1 }, + "sigmas": { "type": "array", "items": {"type": "number"}, "minItems": 1}, + "fixed": { "type": "boolean" } + }, + "required": ["name"], + "additionalProperties": false + }, + "channel": { + "type": "object", + "properties": { + "name": { "type": "string" }, + "samples": { "type": "array", "items": {"$ref": "#/definitions/sample"}, "minItems": 1 } + }, + "required": ["name", "samples"], + "additionalProperties": false + }, + "sample": { + "type": "object", + "properties": { + "name": { "type": "string" }, + "data": { "type": "array", "items": {"type": "number"}, "minItems": 1 }, + "modifiers": { + "type": "array", + "items": { + "anyOf": [ + { "$ref": "#/definitions/modifier/histosys" }, + { "$ref": "#/definitions/modifier/lumi" }, + { "$ref": "#/definitions/modifier/normfactor" }, + { "$ref": "#/definitions/modifier/normsys" }, + { "$ref": "#/definitions/modifier/shapefactor" }, + { "$ref": "#/definitions/modifier/shapesys" }, + { "$ref": "#/definitions/modifier/staterror" } + ] + } + } + }, + "required": ["name", "data", "modifiers"], + "additionalProperties": false + }, + "modifier": { + "histosys": { + "type": "object", + "properties": { + "name": { "type": "string" }, + "type": { "const": "histosys" }, + "data": { + "type": "object", + "properties": { + "lo_data": { "type": "array", "items": {"type": "number"}, "minItems": 1 }, + "hi_data": { "type": "array", "items": {"type": "number"}, "minItems": 1 } + }, + "required": ["lo_data", "hi_data"], + "additionalProperties": false + } + }, + "required": ["name", "type", "data"], + "additionalProperties": false + }, + "lumi": { + "type": "object", + "properties": { + "name": { "const": "lumi" }, + "type": { "const": "lumi" }, + "data": { "type": "null" } + }, + "required": ["name", "type", "data"], + "additionalProperties": false + }, + "normfactor": { + "type": "object", + "properties": { + "name": { "type": "string" }, + "type": { "const": "normfactor" }, + "data": { "type": "null" } + }, + "required": ["name", "type", "data"], + "additionalProperties": false + }, + "normsys": { + "type": "object", + "properties": { + "name": { "type": "string" }, + "type": { "const": "normsys" }, + "data": { + "type": "object", + "properties": { + "lo": { "type": "number" }, + "hi": { "type": "number"} + }, + "required": ["lo", "hi"], + "additionalProperties": false + } + }, + "required": ["name", "type", "data"], + "additionalProperties": false + }, + "shapefactor": { + "type": "object", + "properties": { + "name": { "type": "string" }, + "type": { "const": "shapefactor" }, + "data": { "type": "null" } + }, + "required": ["name", "type", "data"], + "additionalProperties": false + }, + "shapesys": { + "type": "object", + "properties": { + "name": { "type": "string" }, + "type": { "const": "shapesys" }, + "data": { "type": "array", "items": {"type": "number"}, "minItems": 1 } + }, + "required": ["name", "type", "data"], + "additionalProperties": false + }, + "staterror": { + "type": "object", + "properties": { + "name": { "type": "string" }, + "type": { "const": "staterror" }, + "data": { "type": "array", "items": {"type": "number"}, "minItems": 1 } + }, + "required": ["name", "type", "data"], + "additionalProperties": false + } + }, + "jsonpatch": { + "description": "an array of patch operations (copied from http://json.schemastore.org/json-patch)", + "type": "array", + "items": { + "$ref": "#/definitions/jsonpatch/operation" + }, + "operation": { + "type": "object", + "required": [ "op", "path" ], + "allOf": [ { "$ref": "#/definitions/jsonpatch/path" } ], + "oneOf": [ + { + "required": [ "value" ], + "properties": { + "op": { + "description": "The operation to perform.", + "type": "string", + "enum": [ "add", "replace", "test" ] + }, + "value": { + "description": "The value to add, replace or test." + } + } + }, + { + "properties": { + "op": { + "description": "The operation to perform.", + "type": "string", + "enum": [ "remove" ] + } + } + }, + { + "required": [ "from" ], + "properties": { + "op": { + "description": "The operation to perform.", + "type": "string", + "enum": [ "move", "copy" ] + }, + "from": { + "description": "A JSON Pointer path pointing to the location to move/copy from.", + "type": "string" + } + } + } + ] + }, + "path": { + "properties": { + "path": { + "description": "A JSON Pointer path.", + "type": "string" + } + } + } + }, + "patchset": { + "description": "A set of JSONPatch patches which modify a pyhf workspace", + "type": "object", + "properties": { + "patches": { "$ref": "#/definitions/patchset/patches" }, + "metadata": { "$ref": "#/definitions/patchset/metadata" }, + "version": { "const": "0.1.0" } + }, + "additionalProperties": false, + "required": ["patches", "metadata", "version"], + "references": { + "type": "object", + "properties": { + "hepdata": { "type": "string", "pattern": "^ins[0-9]{7}$" } + }, + "additionalProperties": false, + "minProperties": 1 + }, + "digests": { + "type": "object", + "properties": { + "md5": { "type": "string", "pattern": "^[a-f0-9]{32}$" }, + "sha256": { "type": "string", "pattern": "^[a-fA-F0-9]{64}$" } + }, + "additionalProperties": false, + "minProperties": 1 + }, + "patches": { + "type": "array", + "items": { "$ref": "#/definitions/patchset/patch" }, + "minItems": 1 + }, + "patch": { + "type": "object", + "properties": { + "patch": { "$ref": "#/definitions/jsonpatch" }, + "metadata": { + "type": "object", + "properties": { + "name": { "type": "string", "pattern": "^[a-zA-Z0-9_]+$" }, + "values": { + "type": "array", + "items": { + "anyOf": [{"type": "number"}, {"type": "string"}] + } + } + }, + "required": ["name", "values"], + "additionalProperties": true + } + }, + "required": ["metadata", "patch"], + "additionalProperties": false + }, + "metadata": { + "type": "object", + "properties": { + "digests": { "$ref": "#/definitions/patchset/digests" }, + "labels": { + "type": "array", + "items": { "type": "string", "pattern": "^[a-zA-Z0-9_]+$" }, + "minItems": 1 + }, + "description": { "type": "string" }, + "references": { "$ref": "#/definitions/patchset/references" } + }, + "required": ["references", "digests", "labels", "description"], + "additionalProperties": true + } + } + } +} diff --git a/tests/test_schema/customschema/1.1.0/jsonpatch.json b/tests/test_schema/customschema/0.1.0/jsonpatch.json similarity index 60% rename from tests/test_schema/customschema/1.1.0/jsonpatch.json rename to tests/test_schema/customschema/0.1.0/jsonpatch.json index 63b4496bc7..fb0958733e 100644 --- a/tests/test_schema/customschema/1.1.0/jsonpatch.json +++ b/tests/test_schema/customschema/0.1.0/jsonpatch.json @@ -1,5 +1,5 @@ { "$schema": "http://json-schema.org/draft-06/schema#", - "$id": "1.1.0/jsonpatch.json", + "$id": "https://scikit-hep.org/pyhf/schemas/0.1.0/jsonpatch.json", "$ref": "defs.json#/definitions/jsonpatch" } diff --git a/tests/test_schema/customschema/1.1.0/measurement.json b/tests/test_schema/customschema/0.1.0/measurement.json similarity index 60% rename from tests/test_schema/customschema/1.1.0/measurement.json rename to tests/test_schema/customschema/0.1.0/measurement.json index 124d84a522..3a5043bfe5 100644 --- a/tests/test_schema/customschema/1.1.0/measurement.json +++ b/tests/test_schema/customschema/0.1.0/measurement.json @@ -1,5 +1,5 @@ { "$schema": "http://json-schema.org/draft-06/schema#", - "$id": "1.1.0/measurement.json", + "$id": "https://scikit-hep.org/pyhf/schemas/0.1.0/measurement.json", "$ref": "defs.json#/definitions/measurement" } diff --git a/tests/test_schema/customschema/1.1.0/model.json b/tests/test_schema/customschema/0.1.0/model.json similarity index 61% rename from tests/test_schema/customschema/1.1.0/model.json rename to tests/test_schema/customschema/0.1.0/model.json index f44f47edc6..b7d1d3601b 100644 --- a/tests/test_schema/customschema/1.1.0/model.json +++ b/tests/test_schema/customschema/0.1.0/model.json @@ -1,5 +1,5 @@ { "$schema": "http://json-schema.org/draft-06/schema#", - "$id": "1.1.0/model.json", + "$id": "https://scikit-hep.org/pyhf/schemas/0.1.0/model.json", "$ref": "defs.json#/definitions/model" } diff --git a/tests/test_schema/customschema/1.1.0/patchset.json b/tests/test_schema/customschema/0.1.0/patchset.json similarity index 60% rename from tests/test_schema/customschema/1.1.0/patchset.json rename to tests/test_schema/customschema/0.1.0/patchset.json index c7f5596dc5..5ced7126e9 100644 --- a/tests/test_schema/customschema/1.1.0/patchset.json +++ b/tests/test_schema/customschema/0.1.0/patchset.json @@ -1,5 +1,5 @@ { "$schema": "http://json-schema.org/draft-06/schema#", - "$id": "1.1.0/patchset.json", + "$id": "https://scikit-hep.org/pyhf/schemas/0.1.0/patchset.json", "$ref": "defs.json#/definitions/patchset" } diff --git a/tests/test_schema/customschema/1.1.0/workspace.json b/tests/test_schema/customschema/0.1.0/workspace.json similarity index 60% rename from tests/test_schema/customschema/1.1.0/workspace.json rename to tests/test_schema/customschema/0.1.0/workspace.json index 5e91630381..8f439118c4 100644 --- a/tests/test_schema/customschema/1.1.0/workspace.json +++ b/tests/test_schema/customschema/0.1.0/workspace.json @@ -1,5 +1,5 @@ { "$schema": "http://json-schema.org/draft-06/schema#", - "$id": "1.1.0/workspace.json", + "$id": "https://scikit-hep.org/pyhf/schemas/0.1.0/workspace.json", "$ref": "defs.json#/definitions/workspace" } diff --git a/tests/test_schema/customschema/custom.json b/tests/test_schema/customschema/custom.json index f35ea110aa..028f5ed112 100644 --- a/tests/test_schema/customschema/custom.json +++ b/tests/test_schema/customschema/custom.json @@ -70,5 +70,5 @@ "name": "singlechannel" } ], - "version": "1.1.0" + "version": "0.1.0" } diff --git a/tests/test_scripts.py b/tests/test_scripts.py index 9d00814c8e..c7dd694295 100644 --- a/tests/test_scripts.py +++ b/tests/test_scripts.py @@ -770,3 +770,39 @@ def test_sort_outfile(tmp_path, script_runner): ret = script_runner.run(shlex.split(command)) assert ret.success + + +@pytest.mark.parametrize('output_file', [False, True]) +def test_upgrade_workspace(tmpdir, datadir, script_runner, output_file): + temp = tmpdir.join("upgraded_output.json") + command = f'pyhf upgrade workspace {datadir.joinpath("workspace_1.0.0.json")} --version 1.0.1' + if output_file: + command += f" --output-file {temp.strpath}" + + ret = script_runner.run(*shlex.split(command)) + + assert ret.success + if output_file: + extracted_output = json.loads(temp.read()) + else: + extracted_output = json.loads(ret.stdout) + + assert extracted_output['version'] == '1.0.1' + + +@pytest.mark.parametrize('output_file', [False, True]) +def test_upgrade_patchset(tmpdir, datadir, script_runner, output_file): + temp = tmpdir.join("upgraded_output.json") + command = f'pyhf upgrade patchset {datadir.joinpath("patchset_1.0.0.json")} --version 1.0.1' + if output_file: + command += f" --output-file {temp.strpath}" + + ret = script_runner.run(*shlex.split(command)) + + assert ret.success + if output_file: + extracted_output = json.loads(temp.read()) + else: + extracted_output = json.loads(ret.stdout) + + assert extracted_output['version'] == '1.0.1' diff --git a/tests/test_scripts/patchset_1.0.0.json b/tests/test_scripts/patchset_1.0.0.json new file mode 120000 index 0000000000..962dd90d3a --- /dev/null +++ b/tests/test_scripts/patchset_1.0.0.json @@ -0,0 +1 @@ +../test_upgrade/patchset_1.0.0.json \ No newline at end of file diff --git a/tests/test_scripts/workspace_1.0.0.json b/tests/test_scripts/workspace_1.0.0.json new file mode 120000 index 0000000000..59100bc292 --- /dev/null +++ b/tests/test_scripts/workspace_1.0.0.json @@ -0,0 +1 @@ +../test_upgrade/workspace_1.0.0.json \ No newline at end of file diff --git a/tests/test_upgrade.py b/tests/test_upgrade.py new file mode 100644 index 0000000000..5295bcc19e --- /dev/null +++ b/tests/test_upgrade.py @@ -0,0 +1,78 @@ +import pyhf +import pyhf.schema +import json +import logging +import pytest + + +def test_upgrade_bad_version(datadir): + with pytest.raises(ValueError): + pyhf.schema.upgrade(to_version='0.9.0') + + +def test_upgrade_to_latest(datadir): + with datadir.joinpath("workspace_1.0.0.json").open(encoding="utf-8") as fp: + ws = json.load(fp) + pyhf.schema.upgrade().workspace(ws) + + with datadir.joinpath("workspace_1.0.0.json").open(encoding="utf-8") as fp: + ps = json.load(fp) + pyhf.schema.upgrade().patchset(ps) + + +def test_1_0_0_workspace(datadir, caplog, monkeypatch): + """ + Test upgrading a workspace from 1.0.0 to 1.0.1 + """ + with datadir.joinpath("workspace_1.0.0.json").open(encoding="utf-8") as fp: + spec = json.load(fp) + + monkeypatch.setitem(pyhf.schema.versions, 'workspace.json', '1.0.1') + with caplog.at_level(logging.INFO, 'pyhf.schema'): + pyhf.schema.validate(spec, 'workspace.json', version='1.0.0') + assert 'Specification requested version 1.0.0' in caplog.text + + caplog.clear() + + new_spec = pyhf.schema.upgrade(to_version='1.0.1').workspace(spec) + assert new_spec['version'] == '1.0.1' + + +def test_1_0_0_patchset(datadir, caplog, monkeypatch): + """ + Test upgrading a patchset from 1.0.0 to 1.0.1 + """ + with datadir.joinpath("patchset_1.0.0.json").open(encoding="utf-8") as fp: + spec = json.load(fp) + + monkeypatch.setitem(pyhf.schema.versions, 'patchset.json', '1.0.1') + with caplog.at_level(logging.INFO, 'pyhf.schema'): + pyhf.schema.validate(spec, 'patchset.json', version='1.0.0') + assert 'Specification requested version 1.0.0' in caplog.text + + caplog.clear() + + new_spec = pyhf.schema.upgrade(to_version='1.0.1').patchset(spec) + assert new_spec['version'] == '1.0.1' + + +def test_1_0_1_workspace(datadir): + """ + Test upgrading a workspace from 1.0.1 to 1.0.1 + """ + with datadir.joinpath("workspace_1.0.1.json").open(encoding="utf-8") as fp: + spec = json.load(fp) + + new_spec = pyhf.schema.upgrade(to_version='1.0.1').workspace(spec) + assert new_spec['version'] == '1.0.1' + + +def test_1_0_1_patchset(datadir): + """ + Test upgrading a patchset from 1.0.1 to 1.0.1 + """ + with datadir.joinpath("patchset_1.0.1.json").open(encoding="utf-8") as fp: + spec = json.load(fp) + + new_spec = pyhf.schema.upgrade(to_version='1.0.1').patchset(spec) + assert new_spec['version'] == '1.0.1' diff --git a/tests/test_upgrade/patchset_1.0.0.json b/tests/test_upgrade/patchset_1.0.0.json new file mode 100644 index 0000000000..04dcee3b50 --- /dev/null +++ b/tests/test_upgrade/patchset_1.0.0.json @@ -0,0 +1,29 @@ +{ + "metadata": { + "references": { "hepdata": "ins1234567" }, + "description": "patchset for validation/xmlimport_input/config/example.xml", + "digests": { "sha256": "7c32ca3b8db75cbafcf5cd7ed4672fa2b1fa69e391c9b89068dd947a521866ec" }, + "labels": ["x"] + }, + "patches": [ + { + "metadata": { + "name": "patch_channel1_signal_syst1", + "values": [0] + }, + "patch": [ + { + "op": "replace", + "path": "/channels/0/samples/0/modifiers/0/data/hi", + "value": 1.2 + }, + { + "op": "replace", + "path": "/channels/0/samples/0/modifiers/0/data/lo", + "value": 0.8 + } + ] + } + ], + "version": "1.0.0" +} diff --git a/tests/test_upgrade/patchset_1.0.1.json b/tests/test_upgrade/patchset_1.0.1.json new file mode 100644 index 0000000000..f4518bd0b9 --- /dev/null +++ b/tests/test_upgrade/patchset_1.0.1.json @@ -0,0 +1,29 @@ +{ + "metadata": { + "references": { "hepdata": "ins1234567" }, + "description": "patchset for validation/xmlimport_input/config/example.xml", + "digests": { "sha256": "7c32ca3b8db75cbafcf5cd7ed4672fa2b1fa69e391c9b89068dd947a521866ec" }, + "labels": ["x"] + }, + "patches": [ + { + "metadata": { + "name": "patch_channel1_signal_syst1", + "values": [0] + }, + "patch": [ + { + "op": "replace", + "path": "/channels/0/samples/0/modifiers/0/data/hi", + "value": 1.2 + }, + { + "op": "replace", + "path": "/channels/0/samples/0/modifiers/0/data/lo", + "value": 0.8 + } + ] + } + ], + "version": "1.0.1" +} diff --git a/tests/test_upgrade/workspace_1.0.0.json b/tests/test_upgrade/workspace_1.0.0.json new file mode 100644 index 0000000000..bda01fa940 --- /dev/null +++ b/tests/test_upgrade/workspace_1.0.0.json @@ -0,0 +1,82 @@ +{ + "channels": [ + { + "name": "singlechannel", + "samples": [ + { + "data": [ + 5 + ], + "modifiers": [ + { + "data": null, + "name": "mu", + "type": "normfactor" + } + ], + "name": "signal" + }, + { + "data": [ + 50 + ], + "modifiers": [ + { + "data": [ + 6 + ], + "name": "uncorr_bkguncrt", + "type": "shapesys" + } + ], + "name": "background" + } + ] + } + ], + "measurements": [ + { + "config": { + "parameters": [ + { + "bounds": [ + [ + 0, + 10 + ] + ], + "fixed": false, + "inits": [ + 1 + ], + "name": "mu" + }, + { + "bounds": [ + [ + 1e-10, + 10 + ] + ], + "fixed": false, + "inits": [ + 1 + ], + "name": "uncorr_bkguncrt" + } + ], + "poi": "mu" + }, + "name": "measurement" + } + ], + "observations": [ + { + "data": [ + 50 + ], + "name": "singlechannel" + } + ], + "version": "1.0.0" +} diff --git a/tests/test_upgrade/workspace_1.0.1.json b/tests/test_upgrade/workspace_1.0.1.json new file mode 100644 index 0000000000..72876811a2 --- /dev/null +++ b/tests/test_upgrade/workspace_1.0.1.json @@ -0,0 +1,82 @@ +{ + "channels": [ + { + "name": "singlechannel", + "samples": [ + { + "data": [ + 5 + ], + "modifiers": [ + { + "data": null, + "name": "mu", + "type": "normfactor" + } + ], + "name": "signal" + }, + { + "data": [ + 50 + ], + "modifiers": [ + { + "data": [ + 6 + ], + "name": "uncorr_bkguncrt", + "type": "shapesys" + } + ], + "name": "background" + } + ] + } + ], + "measurements": [ + { + "config": { + "parameters": [ + { + "bounds": [ + [ + 0, + 10 + ] + ], + "fixed": false, + "inits": [ + 1 + ], + "name": "mu" + }, + { + "bounds": [ + [ + 1e-10, + 10 + ] + ], + "fixed": false, + "inits": [ + 1 + ], + "name": "uncorr_bkguncrt" + } + ], + "poi": "mu" + }, + "name": "measurement" + } + ], + "observations": [ + { + "data": [ + 50 + ], + "name": "singlechannel" + } + ], + "version": "1.0.1" +} diff --git a/tests/test_workspace.py b/tests/test_workspace.py index e966d6c3f1..0bfb12758c 100644 --- a/tests/test_workspace.py +++ b/tests/test_workspace.py @@ -452,6 +452,7 @@ def test_combine_workspace_incompatible_poi(workspace_factory, join): def test_combine_workspace_diff_version(workspace_factory, join): ws = workspace_factory() ws.version = '1.0.0' + ws['version'] = '1.0.0' new_ws = ws.rename( channels={channel: f'renamed_{channel}' for channel in ws.channels}, samples={sample: f'renamed_{sample}' for sample in ws.samples},