From 6a306d3c57f01e2930b75a476b99f78f87e8fca5 Mon Sep 17 00:00:00 2001 From: Giordon Stark Date: Thu, 8 Sep 2022 08:10:52 -0700 Subject: [PATCH 01/17] add in the notebook from lukas --- pyproject.toml | 2 + src/pyhf/experimental/__init__.py | 0 src/pyhf/experimental/modifiers.py | 160 +++++++++++++++++++++++++++++ tests/test_experimental.py | 77 ++++++++++++++ 4 files changed, 239 insertions(+) create mode 100644 src/pyhf/experimental/__init__.py create mode 100644 src/pyhf/experimental/modifiers.py create mode 100644 tests/test_experimental.py diff --git a/pyproject.toml b/pyproject.toml index 1e50c697ce..04d2942e9e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -244,6 +244,7 @@ warn_unreachable = true module = [ 'jax.*', 'matplotlib.*', + 'numexpr.*', 'scipy.*', 'tensorflow.*', 'tensorflow_probability.*', @@ -262,6 +263,7 @@ module = [ 'pyhf.cli.*', 'pyhf.modifiers.*', 'pyhf.exceptions.*', + 'pyhf.experimental.*', 'pyhf.parameters.*', 'pyhf.schema.*', 'pyhf.writexml', diff --git a/src/pyhf/experimental/__init__.py b/src/pyhf/experimental/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/pyhf/experimental/modifiers.py b/src/pyhf/experimental/modifiers.py new file mode 100644 index 0000000000..cf4c0dc7e2 --- /dev/null +++ b/src/pyhf/experimental/modifiers.py @@ -0,0 +1,160 @@ +from __future__ import annotations +import pyhf +from pyhf.parameters import ParamViewer +from pyhf import get_backend +from pyhf import events + +from typing import Sequence, Callable, Any + + +class BaseApplier: + ... + + +class BaseBuilder: + ... + + +def _allocate_new_param( + p: dict[str, Sequence[float]] +) -> dict[str, str | bool | int | Sequence[float]]: + return { + 'paramset_type': 'unconstrained', + 'n_parameters': 1, + 'is_shared': True, + 'inits': p['inits'], + 'bounds': p['bounds'], + 'is_scalar': True, + 'fixed': False, + } + + +def make_func(expression: str, deps: list[str]) -> Callable[[Sequence[float]], Any]: + def func(d: Sequence[float]) -> Any: + import numexpr as ne + + return ne.evaluate(expression, local_dict=dict(zip(deps, d))) + + return func + + +def make_builder( + funcname: str, deps: list[str], newparams: dict[str, dict[str, Sequence[float]]] +) -> BaseBuilder: + class _builder(BaseBuilder): + def __init__(self, config): + self.builder_data = {'funcs': {}} + self.config = config + + def collect(self, thismod, nom): + maskval = True if thismod else False + mask = [maskval] * len(nom) + return {'mask': mask} + + def append(self, key, channel, sample, thismod, defined_samp): + self.builder_data.setdefault(key, {}).setdefault(sample, {}).setdefault( + 'data', {'mask': []} + ) + nom = ( + defined_samp['data'] + if defined_samp + else [0.0] * self.config.channel_nbins[channel] + ) + moddata = self.collect(thismod, nom) + self.builder_data[key][sample]['data']['mask'] += moddata['mask'] + if thismod: + if thismod['name'] != funcname: + print(thismod) + self.builder_data['funcs'].setdefault( + thismod['name'], thismod['data']['expr'] + ) + self.required_parsets = { + k: [_allocate_new_param(v)] for k, v in newparams.items() + } + + def finalize(self): + return self.builder_data + + return _builder + + +def make_applier( + funcname: str, deps: list[str], newparams: dict[str, dict[str, Sequence[float]]] +) -> BaseApplier: + class _applier(BaseApplier): + name = funcname + op_code = 'multiplication' + + def __init__(self, modifiers, pdfconfig, builder_data, batch_size=None): + self.funcs = [make_func(v, deps) for v in builder_data['funcs'].values()] + + self.batch_size = batch_size + pars_for_applier = deps + _modnames = [f'{mtype}/{m}' for m, mtype in modifiers] + + parfield_shape = ( + (self.batch_size, pdfconfig.npars) + if self.batch_size + else (pdfconfig.npars,) + ) + self.param_viewer = ParamViewer( + parfield_shape, pdfconfig.par_map, pars_for_applier + ) + self._custommod_mask = [ + [[builder_data[modname][s]['data']['mask']] for s in pdfconfig.samples] + for modname in _modnames + ] + self._precompute() + events.subscribe('tensorlib_changed')(self._precompute) + + def _precompute(self): + tensorlib, _ = get_backend() + if not self.param_viewer.index_selection: + return + self.custommod_mask = tensorlib.tile( + tensorlib.astensor(self._custommod_mask), + (1, 1, self.batch_size or 1, 1), + ) + self.custommod_mask_bool = tensorlib.astensor( + self.custommod_mask, dtype="bool" + ) + self.custommod_default = tensorlib.ones(self.custommod_mask.shape) + + def apply(self, pars): + """ + Returns: + modification tensor: Shape (n_modifiers, n_global_samples, n_alphas, n_global_bin) + """ + if not self.param_viewer.index_selection: + return + tensorlib, _ = get_backend() + if self.batch_size is None: + deps = self.param_viewer.get(pars) + print('deps', deps.shape) + results = tensorlib.astensor([f(deps) for f in self.funcs]) + results = tensorlib.einsum('msab,m->msab', self.custommod_mask, results) + else: + deps = self.param_viewer.get(pars) + print('deps', deps.shape) + results = tensorlib.astensor([f(deps) for f in self.funcs]) + results = tensorlib.einsum( + 'msab,ma->msab', self.custommod_mask, results + ) + results = tensorlib.where( + self.custommod_mask_bool, results, self.custommod_default + ) + return results + + return _applier + + +def add_custom_modifier( + funcname: str, deps: list[str], newparams: dict[str, dict[str, Sequence[float]]] +) -> dict[str, tuple[BaseBuilder, BaseApplier]]: + + _builder = make_builder(funcname, deps, newparams) + _applier = make_applier(funcname, deps, newparams) + + modifier_set = {_applier.name: (_builder, _applier)} + modifier_set.update(**pyhf.modifiers.histfactory_set) + return modifier_set diff --git a/tests/test_experimental.py b/tests/test_experimental.py new file mode 100644 index 0000000000..32ca49861c --- /dev/null +++ b/tests/test_experimental.py @@ -0,0 +1,77 @@ +import pyhf +import pyhf.experimental.modifiers + + +def test_add_custom_modifier(backend): + tensorlib, _ = backend + + new_params = { + 'm1': {'inits': (1.0,), 'bounds': ((-5.0, 5.0),)}, + 'm2': {'inits': (1.0,), 'bounds': ((-5.0, 5.0),)}, + } + + expanded_pyhf = pyhf.experimental.modifiers.add_custom_modifier( + 'customfunc', ['m1', 'm2'], new_params + ) + model = pyhf.Model( + { + 'channels': [ + { + 'name': 'singlechannel', + 'samples': [ + { + 'name': 'signal', + 'data': [10] * 20, + 'modifiers': [ + { + 'name': 'f2', + 'type': 'customfunc', + 'data': {'expr': 'm1'}, + }, + ], + }, + { + 'name': 'background', + 'data': [100] * 20, + 'modifiers': [ + { + 'name': 'f1', + 'type': 'customfunc', + 'data': {'expr': 'm1+(m2**2)'}, + }, + ], + }, + ], + } + ] + }, + modifier_set=expanded_pyhf, + poi_name='m1', + validate=False, + batch_size=1, + ) + + assert tensorlib.tolist(model.expected_actualdata([[1.0, 2.0]])) == [ + [ + 510.0, + 510.0, + 510.0, + 510.0, + 510.0, + 510.0, + 510.0, + 510.0, + 510.0, + 510.0, + 510.0, + 510.0, + 510.0, + 510.0, + 510.0, + 510.0, + 510.0, + 510.0, + 510.0, + 510.0, + ] + ] From dffe901097e139f3eea51d1ba8f08e65b8f27b4e Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Wed, 25 Jan 2023 11:18:30 -0600 Subject: [PATCH 02/17] Add 'experimental' extra to pyproject.toml --- pyproject.toml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 04d2942e9e..7a22e56f71 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -82,8 +82,9 @@ contrib = [ "matplotlib>=3.0.0", "requests>=2.22.0", ] +experimental = ["numexpr>=2.8.0"] backends = ["pyhf[tensorflow,torch,jax,minuit]"] -all = ["pyhf[backends,xmlio,contrib,shellcomplete]"] +all = ["pyhf[backends,xmlio,contrib,experimental,shellcomplete]"] # Developer extras test = [ @@ -105,7 +106,7 @@ test = [ "pytest-socket>=0.2.0", # c.f. PR #1917 ] docs = [ - "pyhf[xmlio,contrib]", + "pyhf[xmlio,contrib,experimental]", "sphinx>=7.0.0", # c.f. https://github.com/scikit-hep/pyhf/pull/2271 "sphinxcontrib-bibtex~=2.1", "sphinx-click", From d572c67843f38f1c605108e232243c9e43c82cb7 Mon Sep 17 00:00:00 2001 From: Giordon Stark Date: Thu, 8 Sep 2022 08:14:55 -0700 Subject: [PATCH 03/17] add is_shared --- src/pyhf/experimental/modifiers.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/pyhf/experimental/modifiers.py b/src/pyhf/experimental/modifiers.py index cf4c0dc7e2..42793575f7 100644 --- a/src/pyhf/experimental/modifiers.py +++ b/src/pyhf/experimental/modifiers.py @@ -42,6 +42,8 @@ def make_builder( funcname: str, deps: list[str], newparams: dict[str, dict[str, Sequence[float]]] ) -> BaseBuilder: class _builder(BaseBuilder): + is_shared = False + def __init__(self, config): self.builder_data = {'funcs': {}} self.config = config From c1dcf8120aab534f57a466e8bce04586c5fbf59a Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Wed, 25 Jan 2023 11:22:25 -0600 Subject: [PATCH 04/17] format --- src/pyhf/experimental/modifiers.py | 48 +++++++++++++++--------------- tests/test_experimental.py | 38 +++++++++++------------ 2 files changed, 43 insertions(+), 43 deletions(-) diff --git a/src/pyhf/experimental/modifiers.py b/src/pyhf/experimental/modifiers.py index 42793575f7..167a446d25 100644 --- a/src/pyhf/experimental/modifiers.py +++ b/src/pyhf/experimental/modifiers.py @@ -19,13 +19,13 @@ def _allocate_new_param( p: dict[str, Sequence[float]] ) -> dict[str, str | bool | int | Sequence[float]]: return { - 'paramset_type': 'unconstrained', - 'n_parameters': 1, - 'is_shared': True, - 'inits': p['inits'], - 'bounds': p['bounds'], - 'is_scalar': True, - 'fixed': False, + "paramset_type": "unconstrained", + "n_parameters": 1, + "is_shared": True, + "inits": p["inits"], + "bounds": p["bounds"], + "is_scalar": True, + "fixed": False, } @@ -45,30 +45,30 @@ class _builder(BaseBuilder): is_shared = False def __init__(self, config): - self.builder_data = {'funcs': {}} + self.builder_data = {"funcs": {}} self.config = config def collect(self, thismod, nom): maskval = True if thismod else False mask = [maskval] * len(nom) - return {'mask': mask} + return {"mask": mask} def append(self, key, channel, sample, thismod, defined_samp): self.builder_data.setdefault(key, {}).setdefault(sample, {}).setdefault( - 'data', {'mask': []} + "data", {"mask": []} ) nom = ( - defined_samp['data'] + defined_samp["data"] if defined_samp else [0.0] * self.config.channel_nbins[channel] ) moddata = self.collect(thismod, nom) - self.builder_data[key][sample]['data']['mask'] += moddata['mask'] + self.builder_data[key][sample]["data"]["mask"] += moddata["mask"] if thismod: - if thismod['name'] != funcname: + if thismod["name"] != funcname: print(thismod) - self.builder_data['funcs'].setdefault( - thismod['name'], thismod['data']['expr'] + self.builder_data["funcs"].setdefault( + thismod["name"], thismod["data"]["expr"] ) self.required_parsets = { k: [_allocate_new_param(v)] for k, v in newparams.items() @@ -85,14 +85,14 @@ def make_applier( ) -> BaseApplier: class _applier(BaseApplier): name = funcname - op_code = 'multiplication' + op_code = "multiplication" def __init__(self, modifiers, pdfconfig, builder_data, batch_size=None): - self.funcs = [make_func(v, deps) for v in builder_data['funcs'].values()] + self.funcs = [make_func(v, deps) for v in builder_data["funcs"].values()] self.batch_size = batch_size pars_for_applier = deps - _modnames = [f'{mtype}/{m}' for m, mtype in modifiers] + _modnames = [f"{mtype}/{m}" for m, mtype in modifiers] parfield_shape = ( (self.batch_size, pdfconfig.npars) @@ -103,11 +103,11 @@ def __init__(self, modifiers, pdfconfig, builder_data, batch_size=None): parfield_shape, pdfconfig.par_map, pars_for_applier ) self._custommod_mask = [ - [[builder_data[modname][s]['data']['mask']] for s in pdfconfig.samples] + [[builder_data[modname][s]["data"]["mask"]] for s in pdfconfig.samples] for modname in _modnames ] self._precompute() - events.subscribe('tensorlib_changed')(self._precompute) + events.subscribe("tensorlib_changed")(self._precompute) def _precompute(self): tensorlib, _ = get_backend() @@ -132,15 +132,15 @@ def apply(self, pars): tensorlib, _ = get_backend() if self.batch_size is None: deps = self.param_viewer.get(pars) - print('deps', deps.shape) + print("deps", deps.shape) results = tensorlib.astensor([f(deps) for f in self.funcs]) - results = tensorlib.einsum('msab,m->msab', self.custommod_mask, results) + results = tensorlib.einsum("msab,m->msab", self.custommod_mask, results) else: deps = self.param_viewer.get(pars) - print('deps', deps.shape) + print("deps", deps.shape) results = tensorlib.astensor([f(deps) for f in self.funcs]) results = tensorlib.einsum( - 'msab,ma->msab', self.custommod_mask, results + "msab,ma->msab", self.custommod_mask, results ) results = tensorlib.where( self.custommod_mask_bool, results, self.custommod_default diff --git a/tests/test_experimental.py b/tests/test_experimental.py index 32ca49861c..3f83e52aa0 100644 --- a/tests/test_experimental.py +++ b/tests/test_experimental.py @@ -6,38 +6,38 @@ def test_add_custom_modifier(backend): tensorlib, _ = backend new_params = { - 'm1': {'inits': (1.0,), 'bounds': ((-5.0, 5.0),)}, - 'm2': {'inits': (1.0,), 'bounds': ((-5.0, 5.0),)}, + "m1": {"inits": (1.0,), "bounds": ((-5.0, 5.0),)}, + "m2": {"inits": (1.0,), "bounds": ((-5.0, 5.0),)}, } expanded_pyhf = pyhf.experimental.modifiers.add_custom_modifier( - 'customfunc', ['m1', 'm2'], new_params + "customfunc", ["m1", "m2"], new_params ) model = pyhf.Model( { - 'channels': [ + "channels": [ { - 'name': 'singlechannel', - 'samples': [ + "name": "singlechannel", + "samples": [ { - 'name': 'signal', - 'data': [10] * 20, - 'modifiers': [ + "name": "signal", + "data": [10] * 20, + "modifiers": [ { - 'name': 'f2', - 'type': 'customfunc', - 'data': {'expr': 'm1'}, + "name": "f2", + "type": "customfunc", + "data": {"expr": "m1"}, }, ], }, { - 'name': 'background', - 'data': [100] * 20, - 'modifiers': [ + "name": "background", + "data": [100] * 20, + "modifiers": [ { - 'name': 'f1', - 'type': 'customfunc', - 'data': {'expr': 'm1+(m2**2)'}, + "name": "f1", + "type": "customfunc", + "data": {"expr": "m1+(m2**2)"}, }, ], }, @@ -46,7 +46,7 @@ def test_add_custom_modifier(backend): ] }, modifier_set=expanded_pyhf, - poi_name='m1', + poi_name="m1", validate=False, batch_size=1, ) From 10be48a38715a5ddd31c3d00ef15c394b7d9497a Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 8 Feb 2023 05:42:44 +0000 Subject: [PATCH 05/17] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- src/pyhf/experimental/modifiers.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/pyhf/experimental/modifiers.py b/src/pyhf/experimental/modifiers.py index 167a446d25..d6b510237d 100644 --- a/src/pyhf/experimental/modifiers.py +++ b/src/pyhf/experimental/modifiers.py @@ -153,7 +153,6 @@ def apply(self, pars): def add_custom_modifier( funcname: str, deps: list[str], newparams: dict[str, dict[str, Sequence[float]]] ) -> dict[str, tuple[BaseBuilder, BaseApplier]]: - _builder = make_builder(funcname, deps, newparams) _applier = make_applier(funcname, deps, newparams) From f4f3c771f35d8b415a09439c0861691942245c5d Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Tue, 10 Oct 2023 13:25:36 -0500 Subject: [PATCH 06/17] Apply isort --- src/pyhf/experimental/modifiers.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/pyhf/experimental/modifiers.py b/src/pyhf/experimental/modifiers.py index d6b510237d..b7d868575d 100644 --- a/src/pyhf/experimental/modifiers.py +++ b/src/pyhf/experimental/modifiers.py @@ -1,10 +1,10 @@ from __future__ import annotations + +from typing import Any, Callable, Sequence + import pyhf +from pyhf import events, get_backend from pyhf.parameters import ParamViewer -from pyhf import get_backend -from pyhf import events - -from typing import Sequence, Callable, Any class BaseApplier: From d2d3497352c2b391ec99cb07a0d177a173b23488 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Tue, 10 Oct 2023 13:40:50 -0500 Subject: [PATCH 07/17] Make add_custom_modifier the only API and check if numexpr is installed --- src/pyhf/experimental/modifiers.py | 21 +++++++++++++++++++-- 1 file changed, 19 insertions(+), 2 deletions(-) diff --git a/src/pyhf/experimental/modifiers.py b/src/pyhf/experimental/modifiers.py index b7d868575d..889de1e4fc 100644 --- a/src/pyhf/experimental/modifiers.py +++ b/src/pyhf/experimental/modifiers.py @@ -1,11 +1,30 @@ from __future__ import annotations +import logging from typing import Any, Callable, Sequence import pyhf from pyhf import events, get_backend from pyhf.parameters import ParamViewer +log = logging.getLogger(__name__) + +__all__ = ["add_custom_modifier"] + + +def __dir__(): + return __all__ + + +try: + import numexpr as ne +except ModuleNotFoundError: + log.error( + "\nInstallation of the experimental extra is required to use pyhf.experimental.modifiers" + + "\nPlease install with: python -m pip install 'pyhf[experimental]'\n", + exc_info=True, + ) + class BaseApplier: ... @@ -31,8 +50,6 @@ def _allocate_new_param( def make_func(expression: str, deps: list[str]) -> Callable[[Sequence[float]], Any]: def func(d: Sequence[float]) -> Any: - import numexpr as ne - return ne.evaluate(expression, local_dict=dict(zip(deps, d))) return func From 109a3c3c56d22cc4ed0aa75124e5bf63e903e9ef Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Tue, 10 Oct 2023 14:04:34 -0500 Subject: [PATCH 08/17] Add docstrints to experimental init --- src/pyhf/experimental/__init__.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/pyhf/experimental/__init__.py b/src/pyhf/experimental/__init__.py index e69de29bb2..f8c3474586 100644 --- a/src/pyhf/experimental/__init__.py +++ b/src/pyhf/experimental/__init__.py @@ -0,0 +1,5 @@ +""" +Experimental features for pyhf. + +Modules in experimental may rapidly change with API breaking changes. +""" From cfad34a201dee100b3772898e7ca557df59cce4e Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Tue, 10 Oct 2023 14:21:08 -0500 Subject: [PATCH 09/17] test experimental extras error if not installed --- src/pyhf/experimental/modifiers.py | 1 + tests/test_experimental.py | 20 ++++++++++++++++++++ 2 files changed, 21 insertions(+) diff --git a/src/pyhf/experimental/modifiers.py b/src/pyhf/experimental/modifiers.py index 889de1e4fc..2afc19ad9f 100644 --- a/src/pyhf/experimental/modifiers.py +++ b/src/pyhf/experimental/modifiers.py @@ -24,6 +24,7 @@ def __dir__(): + "\nPlease install with: python -m pip install 'pyhf[experimental]'\n", exc_info=True, ) + raise class BaseApplier: diff --git a/tests/test_experimental.py b/tests/test_experimental.py index 3f83e52aa0..f1726b4206 100644 --- a/tests/test_experimental.py +++ b/tests/test_experimental.py @@ -1,7 +1,27 @@ +import sys +from importlib import import_module, reload +from unittest import mock + +import pytest + import pyhf import pyhf.experimental.modifiers +def test_missing_experimental_extra(): + """ + Verify ModuleNotFoundError if dependencies required of the experimental + extra are not installed. + """ + with mock.patch.dict(sys.modules): + sys.modules["numexpr"] = None + with pytest.raises(ModuleNotFoundError): + if "pyhf.experimental.modifiers" in sys.modules: + reload(sys.modules["pyhf.experimental.modifiers"]) + else: + import_module("pyhf.experimental.modifiers") + + def test_add_custom_modifier(backend): tensorlib, _ = backend From fabdf479b533c571aefbb2873ab1e123ca1f6949 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Tue, 10 Oct 2023 14:35:10 -0500 Subject: [PATCH 10/17] Simplify reload test --- tests/test_experimental.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/tests/test_experimental.py b/tests/test_experimental.py index f1726b4206..eff9512faa 100644 --- a/tests/test_experimental.py +++ b/tests/test_experimental.py @@ -1,5 +1,5 @@ import sys -from importlib import import_module, reload +from importlib import reload from unittest import mock import pytest @@ -16,10 +16,7 @@ def test_missing_experimental_extra(): with mock.patch.dict(sys.modules): sys.modules["numexpr"] = None with pytest.raises(ModuleNotFoundError): - if "pyhf.experimental.modifiers" in sys.modules: - reload(sys.modules["pyhf.experimental.modifiers"]) - else: - import_module("pyhf.experimental.modifiers") + reload(sys.modules["pyhf.experimental.modifiers"]) def test_add_custom_modifier(backend): From 8e1c3da50d5aff91cbd14e373248205eeed9534d Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Tue, 10 Oct 2023 14:37:58 -0500 Subject: [PATCH 11/17] Apply suggestion from Sourcery --- src/pyhf/experimental/modifiers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pyhf/experimental/modifiers.py b/src/pyhf/experimental/modifiers.py index 2afc19ad9f..ae5a273214 100644 --- a/src/pyhf/experimental/modifiers.py +++ b/src/pyhf/experimental/modifiers.py @@ -67,7 +67,7 @@ def __init__(self, config): self.config = config def collect(self, thismod, nom): - maskval = True if thismod else False + maskval = bool(thismod) mask = [maskval] * len(nom) return {"mask": mask} From c1a5f08b96620f602e6de260a1a640c9bc08c646 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Tue, 10 Oct 2023 14:43:13 -0500 Subject: [PATCH 12/17] split names with with _ --- src/pyhf/experimental/modifiers.py | 36 ++++++++++++++++-------------- 1 file changed, 19 insertions(+), 17 deletions(-) diff --git a/src/pyhf/experimental/modifiers.py b/src/pyhf/experimental/modifiers.py index ae5a273214..a16a2680aa 100644 --- a/src/pyhf/experimental/modifiers.py +++ b/src/pyhf/experimental/modifiers.py @@ -57,7 +57,7 @@ def func(d: Sequence[float]) -> Any: def make_builder( - funcname: str, deps: list[str], newparams: dict[str, dict[str, Sequence[float]]] + func_name: str, deps: list[str], new_params: dict[str, dict[str, Sequence[float]]] ) -> BaseBuilder: class _builder(BaseBuilder): is_shared = False @@ -83,13 +83,13 @@ def append(self, key, channel, sample, thismod, defined_samp): moddata = self.collect(thismod, nom) self.builder_data[key][sample]["data"]["mask"] += moddata["mask"] if thismod: - if thismod["name"] != funcname: + if thismod["name"] != func_name: print(thismod) self.builder_data["funcs"].setdefault( thismod["name"], thismod["data"]["expr"] ) self.required_parsets = { - k: [_allocate_new_param(v)] for k, v in newparams.items() + k: [_allocate_new_param(v)] for k, v in new_params.items() } def finalize(self): @@ -99,10 +99,10 @@ def finalize(self): def make_applier( - funcname: str, deps: list[str], newparams: dict[str, dict[str, Sequence[float]]] + func_name: str, deps: list[str], new_params: dict[str, dict[str, Sequence[float]]] ) -> BaseApplier: class _applier(BaseApplier): - name = funcname + name = func_name op_code = "multiplication" def __init__(self, modifiers, pdfconfig, builder_data, batch_size=None): @@ -120,7 +120,7 @@ def __init__(self, modifiers, pdfconfig, builder_data, batch_size=None): self.param_viewer = ParamViewer( parfield_shape, pdfconfig.par_map, pars_for_applier ) - self._custommod_mask = [ + self._custom_mod_mask = [ [[builder_data[modname][s]["data"]["mask"]] for s in pdfconfig.samples] for modname in _modnames ] @@ -131,14 +131,14 @@ def _precompute(self): tensorlib, _ = get_backend() if not self.param_viewer.index_selection: return - self.custommod_mask = tensorlib.tile( - tensorlib.astensor(self._custommod_mask), + self.custom_mod_mask = tensorlib.tile( + tensorlib.astensor(self._custom_mod_mask), (1, 1, self.batch_size or 1, 1), ) - self.custommod_mask_bool = tensorlib.astensor( - self.custommod_mask, dtype="bool" + self.custom_mod_mask_bool = tensorlib.astensor( + self.custom_mod_mask, dtype="bool" ) - self.custommod_default = tensorlib.ones(self.custommod_mask.shape) + self.custom_mod_default = tensorlib.ones(self.custom_mod_mask.shape) def apply(self, pars): """ @@ -152,16 +152,18 @@ def apply(self, pars): deps = self.param_viewer.get(pars) print("deps", deps.shape) results = tensorlib.astensor([f(deps) for f in self.funcs]) - results = tensorlib.einsum("msab,m->msab", self.custommod_mask, results) + results = tensorlib.einsum( + "msab,m->msab", self.custom_mod_mask, results + ) else: deps = self.param_viewer.get(pars) print("deps", deps.shape) results = tensorlib.astensor([f(deps) for f in self.funcs]) results = tensorlib.einsum( - "msab,ma->msab", self.custommod_mask, results + "msab,ma->msab", self.custom_mod_mask, results ) results = tensorlib.where( - self.custommod_mask_bool, results, self.custommod_default + self.custom_mod_mask_bool, results, self.custom_mod_default ) return results @@ -169,10 +171,10 @@ def apply(self, pars): def add_custom_modifier( - funcname: str, deps: list[str], newparams: dict[str, dict[str, Sequence[float]]] + func_name: str, deps: list[str], new_params: dict[str, dict[str, Sequence[float]]] ) -> dict[str, tuple[BaseBuilder, BaseApplier]]: - _builder = make_builder(funcname, deps, newparams) - _applier = make_applier(funcname, deps, newparams) + _builder = make_builder(func_name, deps, new_params) + _applier = make_applier(func_name, deps, new_params) modifier_set = {_applier.name: (_builder, _applier)} modifier_set.update(**pyhf.modifiers.histfactory_set) From 520df697eab059036964648f96d4c0bd503fdea5 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Tue, 10 Oct 2023 14:45:10 -0500 Subject: [PATCH 13/17] Install experimental extra in Docker image --- docker/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/Dockerfile b/docker/Dockerfile index 50c2f31e95..c18f61b373 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -16,7 +16,7 @@ RUN apt-get -qq -y update && \ python -m venv /usr/local/venv && \ cd /code && \ python -m pip --no-cache-dir install --upgrade pip setuptools wheel && \ - python -m pip --no-cache-dir install '.[xmlio,contrib]' && \ + python -m pip --no-cache-dir install '.[xmlio,contrib,experimental]' && \ python -m pip list FROM base From 0bf30b863f3cca648e3e69f1109f80118194fa1b Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Tue, 10 Oct 2023 14:53:24 -0500 Subject: [PATCH 14/17] split name on _ again --- src/pyhf/experimental/modifiers.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/src/pyhf/experimental/modifiers.py b/src/pyhf/experimental/modifiers.py index a16a2680aa..daf2471dca 100644 --- a/src/pyhf/experimental/modifiers.py +++ b/src/pyhf/experimental/modifiers.py @@ -71,17 +71,17 @@ def collect(self, thismod, nom): mask = [maskval] * len(nom) return {"mask": mask} - def append(self, key, channel, sample, thismod, defined_samp): + def append(self, key, channel, sample, thismod, defined_sample): self.builder_data.setdefault(key, {}).setdefault(sample, {}).setdefault( "data", {"mask": []} ) nom = ( - defined_samp["data"] - if defined_samp + defined_sample["data"] + if defined_sample else [0.0] * self.config.channel_nbins[channel] ) - moddata = self.collect(thismod, nom) - self.builder_data[key][sample]["data"]["mask"] += moddata["mask"] + mod_data = self.collect(thismod, nom) + self.builder_data[key][sample]["data"]["mask"] += mod_data["mask"] if thismod: if thismod["name"] != func_name: print(thismod) @@ -110,7 +110,7 @@ def __init__(self, modifiers, pdfconfig, builder_data, batch_size=None): self.batch_size = batch_size pars_for_applier = deps - _modnames = [f"{mtype}/{m}" for m, mtype in modifiers] + _mod_names = [f"{mtype}/{m}" for m, mtype in modifiers] parfield_shape = ( (self.batch_size, pdfconfig.npars) @@ -121,8 +121,8 @@ def __init__(self, modifiers, pdfconfig, builder_data, batch_size=None): parfield_shape, pdfconfig.par_map, pars_for_applier ) self._custom_mod_mask = [ - [[builder_data[modname][s]["data"]["mask"]] for s in pdfconfig.samples] - for modname in _modnames + [[builder_data[mod_name][s]["data"]["mask"]] for s in pdfconfig.samples] + for mod_name in _mod_names ] self._precompute() events.subscribe("tensorlib_changed")(self._precompute) From d9ccf670cf279660923852338627f3e330a122fc Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Tue, 10 Oct 2023 15:39:16 -0500 Subject: [PATCH 15/17] docs: Add docstring for public method --- docs/api.rst | 11 +++++ src/pyhf/experimental/modifiers.py | 66 ++++++++++++++++++++++++++++++ 2 files changed, 77 insertions(+) diff --git a/docs/api.rst b/docs/api.rst index 56f65a211a..f35903af05 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -226,6 +226,17 @@ Utilities digest citation +Experimental +------------ + +.. currentmodule:: pyhf.experimental + +.. autosummary:: + :toctree: _generated/ + :nosignatures: + + modifiers + Contrib ------- diff --git a/src/pyhf/experimental/modifiers.py b/src/pyhf/experimental/modifiers.py index daf2471dca..620fcf2641 100644 --- a/src/pyhf/experimental/modifiers.py +++ b/src/pyhf/experimental/modifiers.py @@ -173,6 +173,72 @@ def apply(self, pars): def add_custom_modifier( func_name: str, deps: list[str], new_params: dict[str, dict[str, Sequence[float]]] ) -> dict[str, tuple[BaseBuilder, BaseApplier]]: + r""" + Add a custom modifier type with the modifier data defined through a custom + numexpr string expression. + + Example: + + >>> import pyhf + >>> import pyhf.experimental.modifiers + >>> pyhf.set_backend("numpy") + >>> new_params = { + ... "m1": {"inits": (1.0,), "bounds": ((-5.0, 5.0),)}, + ... "m2": {"inits": (1.0,), "bounds": ((-5.0, 5.0),)}, + ... } + >>> expanded_pyhf = pyhf.experimental.modifiers.add_custom_modifier( + ... "custom", ["m1", "m2"], new_params + ... ) + >>> model = pyhf.Model( + ... { + ... "channels": [ + ... { + ... "name": "singlechannel", + ... "samples": [ + ... { + ... "name": "signal", + ... "data": [10, 20], + ... "modifiers": [ + ... { + ... "name": "f2", + ... "type": "custom", + ... "data": {"expr": "m1"}, + ... }, + ... ], + ... }, + ... { + ... "name": "background", + ... "data": [100, 150], + ... "modifiers": [ + ... { + ... "name": "f1", + ... "type": "custom", + ... "data": {"expr": "m1+(m2**2)"}, + ... }, + ... ], + ... }, + ... ], + ... } + ... ] + ... }, + ... modifier_set=expanded_pyhf, + ... poi_name="m1", + ... validate=False, + ... batch_size=1, + ... ) + >>> model.config.modifiers + [('f1', 'custom'), ('f2', 'custom')] + + Args: + func_name (:obj:`str`): The name of the custom modifier type. + deps (:obj:`list`): The names of the new parameters of the modifier + function. + new_params (:obj:`dict`): The new parameters. + + Returns: + :obj:`dict`: The updated ``pyhf.modifiers.histfactory_set`` with the added + custom modifier type. + """ _builder = make_builder(func_name, deps, new_params) _applier = make_applier(func_name, deps, new_params) From 5137845ce5de81028acc889d98f0df49262b55c6 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Tue, 10 Oct 2023 19:13:43 -0500 Subject: [PATCH 16/17] Add versionadded tag to docstring --- src/pyhf/experimental/modifiers.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/pyhf/experimental/modifiers.py b/src/pyhf/experimental/modifiers.py index 620fcf2641..829d3a461d 100644 --- a/src/pyhf/experimental/modifiers.py +++ b/src/pyhf/experimental/modifiers.py @@ -238,6 +238,8 @@ def add_custom_modifier( Returns: :obj:`dict`: The updated ``pyhf.modifiers.histfactory_set`` with the added custom modifier type. + + .. versionadded:: 0.8.0 """ _builder = make_builder(func_name, deps, new_params) _applier = make_applier(func_name, deps, new_params) From bb302c2423f702a489a42c8311dc412388e42bd9 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Tue, 10 Oct 2023 19:15:16 -0500 Subject: [PATCH 17/17] remove print commands (REVERT IF NEEDED FOR TESTING) --- src/pyhf/experimental/modifiers.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/src/pyhf/experimental/modifiers.py b/src/pyhf/experimental/modifiers.py index 829d3a461d..19b29ac3ed 100644 --- a/src/pyhf/experimental/modifiers.py +++ b/src/pyhf/experimental/modifiers.py @@ -84,7 +84,6 @@ def append(self, key, channel, sample, thismod, defined_sample): self.builder_data[key][sample]["data"]["mask"] += mod_data["mask"] if thismod: if thismod["name"] != func_name: - print(thismod) self.builder_data["funcs"].setdefault( thismod["name"], thismod["data"]["expr"] ) @@ -150,14 +149,12 @@ def apply(self, pars): tensorlib, _ = get_backend() if self.batch_size is None: deps = self.param_viewer.get(pars) - print("deps", deps.shape) results = tensorlib.astensor([f(deps) for f in self.funcs]) results = tensorlib.einsum( "msab,m->msab", self.custom_mod_mask, results ) else: deps = self.param_viewer.get(pars) - print("deps", deps.shape) results = tensorlib.astensor([f(deps) for f in self.funcs]) results = tensorlib.einsum( "msab,ma->msab", self.custom_mod_mask, results