-
Notifications
You must be signed in to change notification settings - Fork 85
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
feat: Experimental implementation of custom modifiers #1991
Draft
kratsg
wants to merge
17
commits into
main
Choose a base branch
from
feat/addCustomModifierPython
base: main
Could not load branches
Branch not found: {{ refName }}
Loading
Could not load tags
Nothing to show
Loading
Are you sure you want to change the base?
Some commits from the old base branch may be removed from the timeline,
and old review comments may become outdated.
Draft
Changes from 12 commits
Commits
Show all changes
17 commits
Select commit
Hold shift + click to select a range
6a306d3
add in the notebook from lukas
kratsg dffe901
Add 'experimental' extra to pyproject.toml
matthewfeickert d572c67
add is_shared
kratsg c1dcf81
format
matthewfeickert 10be48a
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] f4f3c77
Apply isort
matthewfeickert d2d3497
Make add_custom_modifier the only API and check if numexpr is installed
matthewfeickert 109a3c3
Add docstrints to experimental init
matthewfeickert cfad34a
test experimental extras error if not installed
matthewfeickert fabdf47
Simplify reload test
matthewfeickert 8e1c3da
Apply suggestion from Sourcery
matthewfeickert c1a5f08
split names with with _
matthewfeickert 520df69
Install experimental extra in Docker image
matthewfeickert 0bf30b8
split name on _ again
matthewfeickert d9ccf67
docs: Add docstring for public method
matthewfeickert 5137845
Add versionadded tag to docstring
matthewfeickert bb302c2
remove print commands (REVERT IF NEEDED FOR TESTING)
matthewfeickert File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,5 @@ | ||
""" | ||
Experimental features for pyhf. | ||
|
||
Modules in experimental may rapidly change with API breaking changes. | ||
""" |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,181 @@ | ||
from __future__ import annotations | ||
|
||
import logging | ||
from typing import Any, Callable, Sequence | ||
|
||
import pyhf | ||
from pyhf import events, get_backend | ||
from pyhf.parameters import ParamViewer | ||
|
||
log = logging.getLogger(__name__) | ||
|
||
__all__ = ["add_custom_modifier"] | ||
|
||
|
||
def __dir__(): | ||
return __all__ | ||
|
||
|
||
try: | ||
import numexpr as ne | ||
except ModuleNotFoundError: | ||
log.error( | ||
"\nInstallation of the experimental extra is required to use pyhf.experimental.modifiers" | ||
+ "\nPlease install with: python -m pip install 'pyhf[experimental]'\n", | ||
exc_info=True, | ||
) | ||
raise | ||
|
||
|
||
class BaseApplier: | ||
... | ||
|
||
|
||
class BaseBuilder: | ||
... | ||
|
||
|
||
def _allocate_new_param( | ||
p: dict[str, Sequence[float]] | ||
) -> dict[str, str | bool | int | Sequence[float]]: | ||
return { | ||
"paramset_type": "unconstrained", | ||
"n_parameters": 1, | ||
"is_shared": True, | ||
"inits": p["inits"], | ||
"bounds": p["bounds"], | ||
"is_scalar": True, | ||
"fixed": False, | ||
} | ||
|
||
|
||
def make_func(expression: str, deps: list[str]) -> Callable[[Sequence[float]], Any]: | ||
def func(d: Sequence[float]) -> Any: | ||
return ne.evaluate(expression, local_dict=dict(zip(deps, d))) | ||
|
||
return func | ||
|
||
|
||
def make_builder( | ||
func_name: str, deps: list[str], new_params: dict[str, dict[str, Sequence[float]]] | ||
) -> BaseBuilder: | ||
class _builder(BaseBuilder): | ||
is_shared = False | ||
|
||
def __init__(self, config): | ||
self.builder_data = {"funcs": {}} | ||
self.config = config | ||
|
||
def collect(self, thismod, nom): | ||
maskval = bool(thismod) | ||
mask = [maskval] * len(nom) | ||
return {"mask": mask} | ||
|
||
def append(self, key, channel, sample, thismod, defined_samp): | ||
self.builder_data.setdefault(key, {}).setdefault(sample, {}).setdefault( | ||
"data", {"mask": []} | ||
) | ||
nom = ( | ||
defined_samp["data"] | ||
if defined_samp | ||
else [0.0] * self.config.channel_nbins[channel] | ||
) | ||
moddata = self.collect(thismod, nom) | ||
self.builder_data[key][sample]["data"]["mask"] += moddata["mask"] | ||
if thismod: | ||
if thismod["name"] != func_name: | ||
print(thismod) | ||
self.builder_data["funcs"].setdefault( | ||
thismod["name"], thismod["data"]["expr"] | ||
) | ||
self.required_parsets = { | ||
k: [_allocate_new_param(v)] for k, v in new_params.items() | ||
} | ||
|
||
def finalize(self): | ||
return self.builder_data | ||
|
||
return _builder | ||
|
||
|
||
def make_applier( | ||
func_name: str, deps: list[str], new_params: dict[str, dict[str, Sequence[float]]] | ||
) -> BaseApplier: | ||
class _applier(BaseApplier): | ||
name = func_name | ||
op_code = "multiplication" | ||
|
||
def __init__(self, modifiers, pdfconfig, builder_data, batch_size=None): | ||
self.funcs = [make_func(v, deps) for v in builder_data["funcs"].values()] | ||
|
||
self.batch_size = batch_size | ||
pars_for_applier = deps | ||
_modnames = [f"{mtype}/{m}" for m, mtype in modifiers] | ||
|
||
parfield_shape = ( | ||
(self.batch_size, pdfconfig.npars) | ||
if self.batch_size | ||
else (pdfconfig.npars,) | ||
) | ||
self.param_viewer = ParamViewer( | ||
parfield_shape, pdfconfig.par_map, pars_for_applier | ||
) | ||
self._custom_mod_mask = [ | ||
[[builder_data[modname][s]["data"]["mask"]] for s in pdfconfig.samples] | ||
for modname in _modnames | ||
] | ||
self._precompute() | ||
events.subscribe("tensorlib_changed")(self._precompute) | ||
|
||
def _precompute(self): | ||
tensorlib, _ = get_backend() | ||
if not self.param_viewer.index_selection: | ||
return | ||
self.custom_mod_mask = tensorlib.tile( | ||
tensorlib.astensor(self._custom_mod_mask), | ||
(1, 1, self.batch_size or 1, 1), | ||
) | ||
self.custom_mod_mask_bool = tensorlib.astensor( | ||
self.custom_mod_mask, dtype="bool" | ||
) | ||
self.custom_mod_default = tensorlib.ones(self.custom_mod_mask.shape) | ||
|
||
def apply(self, pars): | ||
""" | ||
Returns: | ||
modification tensor: Shape (n_modifiers, n_global_samples, n_alphas, n_global_bin) | ||
""" | ||
if not self.param_viewer.index_selection: | ||
return | ||
tensorlib, _ = get_backend() | ||
if self.batch_size is None: | ||
deps = self.param_viewer.get(pars) | ||
print("deps", deps.shape) | ||
matthewfeickert marked this conversation as resolved.
Show resolved
Hide resolved
|
||
results = tensorlib.astensor([f(deps) for f in self.funcs]) | ||
results = tensorlib.einsum( | ||
"msab,m->msab", self.custom_mod_mask, results | ||
) | ||
else: | ||
deps = self.param_viewer.get(pars) | ||
print("deps", deps.shape) | ||
matthewfeickert marked this conversation as resolved.
Show resolved
Hide resolved
|
||
results = tensorlib.astensor([f(deps) for f in self.funcs]) | ||
results = tensorlib.einsum( | ||
"msab,ma->msab", self.custom_mod_mask, results | ||
) | ||
results = tensorlib.where( | ||
self.custom_mod_mask_bool, results, self.custom_mod_default | ||
) | ||
return results | ||
|
||
return _applier | ||
|
||
|
||
def add_custom_modifier( | ||
func_name: str, deps: list[str], new_params: dict[str, dict[str, Sequence[float]]] | ||
) -> dict[str, tuple[BaseBuilder, BaseApplier]]: | ||
_builder = make_builder(func_name, deps, new_params) | ||
_applier = make_applier(func_name, deps, new_params) | ||
|
||
modifier_set = {_applier.name: (_builder, _applier)} | ||
modifier_set.update(**pyhf.modifiers.histfactory_set) | ||
return modifier_set |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,94 @@ | ||
import sys | ||
from importlib import reload | ||
from unittest import mock | ||
|
||
import pytest | ||
|
||
import pyhf | ||
import pyhf.experimental.modifiers | ||
|
||
|
||
def test_missing_experimental_extra(): | ||
""" | ||
Verify ModuleNotFoundError if dependencies required of the experimental | ||
extra are not installed. | ||
""" | ||
with mock.patch.dict(sys.modules): | ||
sys.modules["numexpr"] = None | ||
with pytest.raises(ModuleNotFoundError): | ||
reload(sys.modules["pyhf.experimental.modifiers"]) | ||
|
||
|
||
def test_add_custom_modifier(backend): | ||
tensorlib, _ = backend | ||
|
||
new_params = { | ||
"m1": {"inits": (1.0,), "bounds": ((-5.0, 5.0),)}, | ||
"m2": {"inits": (1.0,), "bounds": ((-5.0, 5.0),)}, | ||
} | ||
|
||
expanded_pyhf = pyhf.experimental.modifiers.add_custom_modifier( | ||
"customfunc", ["m1", "m2"], new_params | ||
) | ||
model = pyhf.Model( | ||
{ | ||
"channels": [ | ||
{ | ||
"name": "singlechannel", | ||
"samples": [ | ||
{ | ||
"name": "signal", | ||
"data": [10] * 20, | ||
"modifiers": [ | ||
{ | ||
"name": "f2", | ||
"type": "customfunc", | ||
"data": {"expr": "m1"}, | ||
}, | ||
], | ||
}, | ||
{ | ||
"name": "background", | ||
"data": [100] * 20, | ||
"modifiers": [ | ||
{ | ||
"name": "f1", | ||
"type": "customfunc", | ||
"data": {"expr": "m1+(m2**2)"}, | ||
}, | ||
], | ||
}, | ||
], | ||
} | ||
] | ||
}, | ||
modifier_set=expanded_pyhf, | ||
poi_name="m1", | ||
validate=False, | ||
batch_size=1, | ||
) | ||
|
||
assert tensorlib.tolist(model.expected_actualdata([[1.0, 2.0]])) == [ | ||
[ | ||
510.0, | ||
510.0, | ||
510.0, | ||
510.0, | ||
510.0, | ||
510.0, | ||
510.0, | ||
510.0, | ||
510.0, | ||
510.0, | ||
510.0, | ||
510.0, | ||
510.0, | ||
510.0, | ||
510.0, | ||
510.0, | ||
510.0, | ||
510.0, | ||
510.0, | ||
510.0, | ||
] | ||
] |
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I've removed the prints in bb302c2 but if we need them back for debugging quickly just rever it.