From b119fd18487d3b4cd7e72e4f7ab303cad3ce2836 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Tue, 25 Apr 2023 16:15:26 -0500 Subject: [PATCH 01/42] ci: Use PyPI Trusted Publisher for publishing package (#2183) * Split the build and publish steps into two separate jobs. The 'build' job builds and checks the distributions and then uploads them as a job artifact. The 'publish' job downloads the required artifact from the 'build' job and the publishes them to TestPyPI or PyPI if the typical publishing requirements are met. * Use the OpenID Connect (OIDC) standard to publish to PyPI and TestPyPI using PyPI's "Trusted Publisher" implementation to publish without using API tokens stored as GitHub Actions secrets. Use an optional GitHub Actions environment to further restrict publishing to selected branches ('main' and 'release/*') for additional security. - c.f. https://blog.pypi.org/posts/2023-04-20-introducing-trusted-publishers/ - c.f. https://docs.pypi.org/trusted-publishers/ --- .github/workflows/publish-package.yml | 35 ++++++++++++++++++++++++--- 1 file changed, 31 insertions(+), 4 deletions(-) diff --git a/.github/workflows/publish-package.yml b/.github/workflows/publish-package.yml index 2b5aa67ffa..e43342d035 100644 --- a/.github/workflows/publish-package.yml +++ b/.github/workflows/publish-package.yml @@ -25,8 +25,8 @@ concurrency: cancel-in-progress: true jobs: - build-and-publish: - name: Build and publish Python distro to (Test)PyPI + build: + name: Build Python distribution runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 @@ -88,6 +88,35 @@ jobs: - name: List contents of wheel run: python -m zipfile --list dist/pyhf-*.whl + - name: Upload distribution artifact + uses: actions/upload-artifact@v3 + with: + name: dist-artifact + path: dist + + publish: + name: Publish Python distribution to (Test)PyPI + if: github.event_name != 'pull_request' + needs: build + runs-on: ubuntu-latest + # Mandatory for publishing with a trusted publisher + # c.f. https://docs.pypi.org/trusted-publishers/using-a-publisher/ + permissions: + id-token: write + # Restrict to the environment set for the trusted publisher + environment: + name: publish-package + + steps: + - name: Download distribution artifact + uses: actions/download-artifact@v3 + with: + name: dist-artifact + path: dist + + - name: List all files + run: ls -lh dist + - name: Publish distribution 📦 to Test PyPI # Publish to TestPyPI on tag events of if manually triggered # Compare to 'true' string as booleans get turned into strings in the console @@ -96,7 +125,6 @@ jobs: || (github.event_name == 'workflow_dispatch' && github.event.inputs.publish == 'true' && github.repository == 'scikit-hep/pyhf') uses: pypa/gh-action-pypi-publish@v1.8.5 with: - password: ${{ secrets.test_pypi_password }} repository-url: https://test.pypi.org/legacy/ print-hash: true @@ -104,5 +132,4 @@ jobs: if: github.event_name == 'release' && github.event.action == 'published' && github.repository == 'scikit-hep/pyhf' uses: pypa/gh-action-pypi-publish@v1.8.5 with: - password: ${{ secrets.pypi_password }} print-hash: true From 3b685914fe711c3c996bf3c422909dd7a7a1b460 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Wed, 26 Apr 2023 10:49:28 -0500 Subject: [PATCH 02/42] docs: Use Plausible for page visit statistics (#2186) * Use the Scientific Python org's Plausible instance to collect page visit statistics for the pyhf docs by adding it to the html_js_files in the Sphinx config. - c.f. https://views.scientific-python.org/pyhf.readthedocs.io/ --- docs/conf.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docs/conf.py b/docs/conf.py index e48765b18b..51bec49927 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -274,6 +274,10 @@ def setup(app): html_js_files = [ 'js/custom.js', + ( + 'https://views.scientific-python.org/js/plausible.js', + {"data-domain": "pyhf.readthedocs.io", "defer": "defer"}, + ), ] # Add any extra paths that contain custom files (such as robots.txt or From b0e8f93f2413ab087f61ec723ffdc524c393704b Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Thu, 27 Apr 2023 15:14:50 -0500 Subject: [PATCH 03/42] chore: Remove GKE mybinder build (#2189) * The GKE Binder Federation instance is being taken offline on 2023-04-28 and so requests made to gke.mybinder.org will fail. To keep the GHA workflow from failing remove the GKE build triggers. - c.f. https://blog.jupyter.org/mybinder-org-reducing-capacity-c93ccfc6413f --- .github/workflows/merged.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/merged.yml b/.github/workflows/merged.yml index 2f6342cf1e..3ea8a5cc4d 100644 --- a/.github/workflows/merged.yml +++ b/.github/workflows/merged.yml @@ -15,5 +15,4 @@ jobs: - name: Trigger Binder build run: | # Use Binder build API to trigger repo2docker to build image on Google Cloud and Turing Institute Binder Federation clusters - bash binder/trigger_binder.sh https://gke.mybinder.org/build/gh/scikit-hep/pyhf/main bash binder/trigger_binder.sh https://turing.mybinder.org/build/gh/scikit-hep/pyhf/main From b8e942aa6d9428ddeb880284c4cffd446537c577 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Fri, 28 Apr 2023 12:04:57 -0500 Subject: [PATCH 04/42] docs: Use jupyterlite-sphinx to embed JupyterLite in docs (#2187) * Add `jupyterlite-sphinx`, `jupyterlite-pyodide-kernel`, and `jupytext` to 'docs' extra. * Add docs/lite/jupyterlite.py as jupytext 'py:percent' textfile representation of jupyterlite demo notebook. * Use jupyterlite-sphinx extension to embed JupyterLite in the docs. - Use jupytext to convert the jupyterlite.py Python file into a Jupyter notebook at docs build time. - Use the retrolite directive to embed the RetroLab Lite JupyterLab distribution in the docs and load the converted notebook at launch time. This also nicely requires the user to request installing and loading as opposed to doing so on page load. - Use the jupyterlite memoryStorageDriver to disable local storage to avoid users having to manually clear their browser cache to reset to a default state. * Remove docs/generate_jupyterlite_iframe.py in favor of embedding JupyterLite. * Add ignores for pre-commit hooks for the docs/lite/jupyterlite.py and add check in docs building that the built html/lite directory is not empty. Co-authored-by: Angus Hollands --- .github/workflows/docs.yml | 2 ++ docs/conf.py | 16 ++++++++++++++- docs/generate_jupyterlite_iframe.py | 30 ----------------------------- docs/jupyterlite.rst | 28 ++++++--------------------- docs/lite/jupyter-lite.json | 8 ++++++++ docs/lite/jupyterlite.py | 27 ++++++++++++++++++++++++++ docs/lite/jupytext.toml | 3 +++ noxfile.py | 2 +- pyproject.toml | 6 ++++++ tbump.toml | 5 +---- 10 files changed, 69 insertions(+), 58 deletions(-) delete mode 100644 docs/generate_jupyterlite_iframe.py create mode 100644 docs/lite/jupyter-lite.json create mode 100644 docs/lite/jupyterlite.py create mode 100644 docs/lite/jupytext.toml diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 65a49e5039..4ca72e4ff4 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -78,6 +78,8 @@ jobs: [ ! -L "docs/_build/html/schemas" ] # is not empty [ "$(ls -A docs/_build/html/schemas)" ] + # is not empty + [ "$(ls -A docs/_build/html/lite)" ] - name: Upload artifact uses: actions/upload-pages-artifact@v1 diff --git a/docs/conf.py b/docs/conf.py index 51bec49927..e23a9d1744 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -15,12 +15,21 @@ # add these directories to sys.path here. If the directory is relative to the # documentation root, use Path('../relative_path_to_dir').resolve() to make it absolute, like shown here. -from pathlib import Path import sys +from pathlib import Path + +import jupytext from pkg_resources import get_distribution sys.path.insert(0, str(Path('./exts').resolve())) +# Convert jupyterlite example to ipynb +docs_dir = Path(__file__).resolve().parent +py_percent_as_notebook = jupytext.read(docs_dir / "lite" / "jupyterlite.py") +jupytext.write( + py_percent_as_notebook, docs_dir / "lite" / "jupyterlite.ipynb", fmt="ipynb" +) + def setup(app): app.add_css_file( @@ -54,6 +63,7 @@ def setup(app): 'sphinx_copybutton', 'sphinx_togglebutton', 'xref', + 'jupyterlite_sphinx', ] bibtex_bibfiles = [ "bib/docs.bib", @@ -177,6 +187,7 @@ def setup(app): exclude_patterns = [ '_build', 'JOSS', + 'lite', '**.ipynb_checkpoints', 'examples/experiments/edwardpyhf.ipynb', 'examples/notebooks/ImpactPlot.ipynb', @@ -510,3 +521,6 @@ def setup(app): r'https://pyhf.readthedocs.io/en/.*', ] linkcheck_retries = 50 + +# JupyterLite configuration +jupyterlite_dir = "lite" diff --git a/docs/generate_jupyterlite_iframe.py b/docs/generate_jupyterlite_iframe.py deleted file mode 100644 index 31a49feac8..0000000000 --- a/docs/generate_jupyterlite_iframe.py +++ /dev/null @@ -1,30 +0,0 @@ -import urllib.parse - - -def main(): - code = """\ -import piplite -await piplite.install(["pyhf==0.7.1", "matplotlib>=3.0.0"]) -%matplotlib inline -import pyhf\ -""" - - parsed_url = urllib.parse.quote(code) - url_base = "https://jupyterlite.github.io/demo/repl/index.html" - jupyterlite_options = "?kernel=python&toolbar=1&code=" - jupyterlite_url = url_base + jupyterlite_options + parsed_url - - print(f"# jupyterlite URL:\n{jupyterlite_url}") - - jupyterlite_iframe_rst = f"""\ - \ -""" - print(f"\n# RST for iframe for jupyterlite.rst:\n{jupyterlite_iframe_rst}") - - -if __name__ == "__main__": - raise SystemExit(main()) diff --git a/docs/jupyterlite.rst b/docs/jupyterlite.rst index 88b942bb58..2fc7e4ba35 100644 --- a/docs/jupyterlite.rst +++ b/docs/jupyterlite.rst @@ -1,30 +1,14 @@ Try out now with JupyterLite_ ----------------------------- -.. admonition:: To try pyhf code in the browser with the Pyolite JupyterLite kernel: - :class: dropdown - - #. Type (or copy and paste) code in the input cell. - #. To execute the code, press ``Shift + Enter`` or click on the (Run) ▶ button in the toolbar. - - To get going try copying and pasting the "Hello World" example below! - - .. admonition:: Kernel not loading? - :class: dropdown - - If the kernel isn't loading and you just see a blank iframe, you will need your - browser to accept cookies from jupyterlite.github.io and then reload the page. - .. - Comment: iframe below generated by docs/generate_jupyterlite_iframe.py - -.. raw:: html + Comment: Use https://github.com/jupyterlite/jupyterlite-sphinx - +.. retrolite:: lite/jupyterlite.ipynb + :width: 100% + :height: 600px + :prompt: Try pyhf! + :prompt_color: #3a77b0 .. Comment: Add an extra blank line as a spacer diff --git a/docs/lite/jupyter-lite.json b/docs/lite/jupyter-lite.json new file mode 100644 index 0000000000..db3ecaa59d --- /dev/null +++ b/docs/lite/jupyter-lite.json @@ -0,0 +1,8 @@ +{ + "jupyter-lite-schema-version": 0, + "jupyter-config-data": { + "enableMemoryStorage": true, + "settingsStorageDrivers": ["memoryStorageDriver"], + "contentsStorageDrivers": ["memoryStorageDriver"] + } +} diff --git a/docs/lite/jupyterlite.py b/docs/lite/jupyterlite.py new file mode 100644 index 0000000000..62c8e70ab6 --- /dev/null +++ b/docs/lite/jupyterlite.py @@ -0,0 +1,27 @@ +# --- +# jupyter: +# kernelspec: +# display_name: Python (Pyodide) +# language: python +# name: python +# --- + +# %% [markdown] +# # `pyhf` in the browser + +# %% [markdown] +# * To run the code, click on the first cell (gray box) and press Shift+Enter or click on the (Run) ▶ button to run each cell. +# * Alternatively, from the `Run` menu select `Run All Cells`. +# * Feel free to experiment, and if you need to restore the original code reload this browser page. Any changes you make will be lost when you reload. +# +# To get going try copying and pasting the "Hello World" example below! + +# %% +import piplite + +# Install pyhf in the browser +await piplite.install(["pyhf==0.7.1", "matplotlib>=3.0.0"]) +# %matplotlib inline +import pyhf + +# You can now use pyhf! diff --git a/docs/lite/jupytext.toml b/docs/lite/jupytext.toml new file mode 100644 index 0000000000..2e5c093f23 --- /dev/null +++ b/docs/lite/jupytext.toml @@ -0,0 +1,3 @@ +# Always pair ipynb notebooks in the current directory to py:percent files +formats = ["ipynb", "py:percent"] +notebook_metadata_filter = "-all,kernelspec" diff --git a/noxfile.py b/noxfile.py index a01049cbfc..ba75fae440 100644 --- a/noxfile.py +++ b/noxfile.py @@ -130,7 +130,7 @@ def docs(session): Example: - $ nox --session docs -- serve + $ nox --session docs -- serve # Need for local jupyterlite preview $ nox --session docs -- clean """ session.install("--upgrade", "--editable", ".[backends,contrib,docs]") diff --git a/pyproject.toml b/pyproject.toml index ba5659fe6f..011c566e30 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -114,6 +114,9 @@ docs = [ "sphinx-issues", "sphinx-copybutton>=0.3.2", "sphinx-togglebutton>=0.3.0", + "jupyterlite-sphinx>=0.8.0", + "jupyterlite-pyodide-kernel>=0.0.7", + "jupytext>=1.14.0", "ipython!=8.7.0", # c.f. https://github.com/scikit-hep/pyhf/pull/2068 ] develop = [ @@ -296,3 +299,6 @@ unfixable = [ "F841", # Removes unused variables ] flake8-tidy-imports.ban-relative-imports = "all" + +[tool.ruff.per-file-ignores] +"docs/lite/jupyterlite.py" = ["F401", "F704"] diff --git a/tbump.toml b/tbump.toml index 01560be47c..f424ed8803 100644 --- a/tbump.toml +++ b/tbump.toml @@ -55,10 +55,7 @@ src = "codemeta.json" src = "CITATION.cff" [[file]] -src = "docs/generate_jupyterlite_iframe.py" - -[[file]] -src = "docs/jupyterlite.rst" +src = "docs/lite/jupyterlite.py" [[field]] # the name of the field From 0cac4583ba33de33450daba6ab901ee712c74e73 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Sun, 30 Apr 2023 01:26:12 -0500 Subject: [PATCH 05/42] build: Remove sphinx-togglebutton from 'docs' extra (#2190) * Remove sphinx-togglebutton dependency from the 'docs' extra as sphinx-togglebutton was used only to make a instructional admonition into a toggleable dropdown. The admonition was removed in PR #2187 and so sphinx-togglebutton is no longer needed. * Remove use of sphinx_togglebutton extension from docs/conf.py. * Amends PR https://github.com/scikit-hep/pyhf/pull/2187. --- docs/conf.py | 1 - pyproject.toml | 1 - 2 files changed, 2 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index e23a9d1744..30b9f2c6aa 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -61,7 +61,6 @@ def setup(app): 'nbsphinx', 'sphinx_issues', 'sphinx_copybutton', - 'sphinx_togglebutton', 'xref', 'jupyterlite_sphinx', ] diff --git a/pyproject.toml b/pyproject.toml index 011c566e30..9d1318d18d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -113,7 +113,6 @@ docs = [ "ipywidgets", "sphinx-issues", "sphinx-copybutton>=0.3.2", - "sphinx-togglebutton>=0.3.0", "jupyterlite-sphinx>=0.8.0", "jupyterlite-pyodide-kernel>=0.0.7", "jupytext>=1.14.0", From bc801cac0387d860f9e5ee46b514c939fe7a7b00 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Sun, 30 Apr 2023 02:56:54 -0500 Subject: [PATCH 06/42] fix: Disallow sphinx-copybutton v0.5.1 to avoid output in copy (#2192) * sphinx-copybutton v0.5.1 has a bug that will place lines of output in the copy contents, so explicitly disallow v0.5.1. This bug is fixed in sphinx-copybutton v0.5.2. --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 9d1318d18d..717dc3b121 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -112,7 +112,7 @@ docs = [ "nbsphinx!=0.8.8", # c.f. https://github.com/spatialaudio/nbsphinx/issues/620 "ipywidgets", "sphinx-issues", - "sphinx-copybutton>=0.3.2", + "sphinx-copybutton>=0.3.2,!=0.5.1", "jupyterlite-sphinx>=0.8.0", "jupyterlite-pyodide-kernel>=0.0.7", "jupytext>=1.14.0", From c477637bab5484aa671319b5910dd7fa001c973c Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 1 May 2023 23:52:59 -0500 Subject: [PATCH 07/42] chore: [pre-commit.ci] pre-commit autoupdate (#2193) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Update pre-commit hooks: - github.com/charliermarsh/ruff-pre-commit: v0.0.260 → v0.0.263 - github.com/pre-commit/mirrors-mypy: v1.1.1 → v1.2.0 --- .pre-commit-config.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b5f99da3cf..c81ca55b45 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -27,7 +27,7 @@ repos: exclude: ^validation/|\.dtd$|\.xml$ - repo: https://github.com/charliermarsh/ruff-pre-commit - rev: "v0.0.260" + rev: "v0.0.263" hooks: - id: ruff args: ["--fix", "--show-fixes"] @@ -44,7 +44,7 @@ repos: additional_dependencies: [black==23.3.0] - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.1.1 + rev: v1.2.0 # check the oldest and newest supported Pythons hooks: - &mypy @@ -62,7 +62,7 @@ repos: rev: 1.7.0 hooks: - id: nbqa-ruff - additional_dependencies: [ruff==0.0.260] + additional_dependencies: [ruff==0.0.263] args: ["--extend-ignore=F821,F401,F841,F811"] - repo: https://github.com/codespell-project/codespell From 5161749fe8509dde6f5f9c1d6fcb7d7b8a796a9f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 10 May 2023 17:57:14 -0400 Subject: [PATCH 08/42] build(deps): bump pypa/gh-action-pypi-publish from 1.8.5 to 1.8.6 (#2195) Bumps [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish) from 1.8.5 to 1.8.6. - [Release notes](https://github.com/pypa/gh-action-pypi-publish/releases) - [Commits](https://github.com/pypa/gh-action-pypi-publish/compare/v1.8.5...v1.8.6) --- updated-dependencies: - dependency-name: pypa/gh-action-pypi-publish dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- .github/workflows/publish-package.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/publish-package.yml b/.github/workflows/publish-package.yml index e43342d035..d39034a63c 100644 --- a/.github/workflows/publish-package.yml +++ b/.github/workflows/publish-package.yml @@ -123,13 +123,13 @@ jobs: if: >- (github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v') && github.repository == 'scikit-hep/pyhf') || (github.event_name == 'workflow_dispatch' && github.event.inputs.publish == 'true' && github.repository == 'scikit-hep/pyhf') - uses: pypa/gh-action-pypi-publish@v1.8.5 + uses: pypa/gh-action-pypi-publish@v1.8.6 with: repository-url: https://test.pypi.org/legacy/ print-hash: true - name: Publish distribution 📦 to PyPI if: github.event_name == 'release' && github.event.action == 'published' && github.repository == 'scikit-hep/pyhf' - uses: pypa/gh-action-pypi-publish@v1.8.5 + uses: pypa/gh-action-pypi-publish@v1.8.6 with: print-hash: true From 71fd07688ce774256867e050a1934f246fc3c6d6 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Thu, 11 May 2023 15:24:29 -0400 Subject: [PATCH 09/42] docs: Correct 'tensorflow' typo in v0.7.1 release notes (#2198) * Fix 'tensorlfow' -> 'tensorflow'. --- docs/release-notes/v0.7.1.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/release-notes/v0.7.1.rst b/docs/release-notes/v0.7.1.rst index 6682a642e0..bd8680952f 100644 --- a/docs/release-notes/v0.7.1.rst +++ b/docs/release-notes/v0.7.1.rst @@ -9,7 +9,7 @@ Important Notes * All backends are now fully compatible and tested with `Python 3.11 `_. (PR :pr:`2145`) -* The ``tensorflow`` extra (``'pyhf[tensorlfow]'``) now automatically installs +* The ``tensorflow`` extra (``'pyhf[tensorflow]'``) now automatically installs ``tensorflow-macos`` for Apple silicon machines. (PR :pr:`2119`) From b031bb9e748186bac84f33cb18981f2148388184 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Mon, 15 May 2023 19:10:04 -0500 Subject: [PATCH 10/42] docs: Add use citation from Belle II lepton-flavor-violating decays paper (#2199) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Add use citation from 'Search for lepton-flavor-violating τ−→ℓ−ϕ decays in 2019-2021 Belle II data'. - c.f. https://inspirehep.net/literature/2657628 --- docs/bib/use_citations.bib | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/docs/bib/use_citations.bib b/docs/bib/use_citations.bib index 2b7276593f..162d4aa471 100644 --- a/docs/bib/use_citations.bib +++ b/docs/bib/use_citations.bib @@ -1,3 +1,16 @@ +% 2023-05-08 +@article{Belle-II:2023bnh, + author = "Belle II Collaboration", + title = "{Search for lepton-flavor-violating $\tau^- \to \ell^-\phi$ decays in 2019-2021 Belle II data}", + eprint = "2305.04759", + archivePrefix = "arXiv", + primaryClass = "hep-ex", + reportNumber = "BELLE2-CONF-2023-004", + month = "5", + year = "2023", + journal = "" +} + % 2023-04-11 @article{Shadura:2023zks, author = "Shadura, Oksana and Held, Alexander", From 22c1699f044f798f2d0fe904d336f3ce9ee50b92 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Tue, 16 May 2023 00:09:27 -0500 Subject: [PATCH 11/42] docs: Update citations references to include journal pubs through 2023-04 (#2201) * Update use citations references to include their journal publication information. - 'Simplified likelihoods using linearized systematic uncertainties' is published as JHEP 04 (2023) 084. https://doi.org/10.1007/JHEP04(2023)084 - 'HighPT: A tool for high-pT Drell-Yan tails beyond the standard model' is published as Comput.Phys.Commun. 289 (2023) 108749. https://doi.org/10.1016/j.cpc.2023.108749 --- docs/bib/use_citations.bib | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/docs/bib/use_citations.bib b/docs/bib/use_citations.bib index 162d4aa471..8a47a1e04f 100644 --- a/docs/bib/use_citations.bib +++ b/docs/bib/use_citations.bib @@ -79,9 +79,11 @@ @article{Berger:2023bat eprint = "2301.05676", archivePrefix = "arXiv", primaryClass = "hep-ex", - month = "1", - year = "2023", - journal = "" + doi = "10.1007/JHEP04(2023)084", + journal = "JHEP", + volume = "04", + pages = "084", + year = "2023" } % 2022-12-06 @@ -167,14 +169,16 @@ @article{Buanes:2022wgm % 2022-07-21 @article{Allwicher:2022mcg, author = "Allwicher, Lukas and Faroughy, Darius. A. and Jaffredo, Florentin and Sumensari, Olcyr and Wilsch, Felix", - title = "{HighPT: A Tool for high-$p_T$ Drell-Yan Tails Beyond the Standard Model}", + title = "{HighPT: A tool for high-pT Drell-Yan tails beyond the standard model}", eprint = "2207.10756", archivePrefix = "arXiv", primaryClass = "hep-ph", reportNumber = "ZU-TH-29/22", - month = "7", - year = "2022", - journal = "" + doi = "10.1016/j.cpc.2023.108749", + journal = "Comput. Phys. Commun.", + volume = "289", + pages = "108749", + year = "2023" } % 2022-07-15 From 30dc7565c9a972732dcb25032db1cd6ae8b91118 Mon Sep 17 00:00:00 2001 From: Alexander Held <45009355+alexander-held@users.noreply.github.com> Date: Wed, 17 May 2023 19:11:33 -0400 Subject: [PATCH 12/42] fix: Catch use of multi-component parameters as POI with error message (#2197) * Raise exceptions.InvalidModel for multiple component parameter of interest. This guards against modifiers like 'shapefactor', 'shapesys', and 'staterror' from being used as POIs. - Remove use of 'assert' to check the same information. * Add test to test_pdf.py to validate. --- src/pyhf/pdf.py | 9 ++++++--- tests/test_pdf.py | 30 ++++++++++++++++++++++++++++++ 2 files changed, 36 insertions(+), 3 deletions(-) diff --git a/src/pyhf/pdf.py b/src/pyhf/pdf.py index f6cf54b362..3b55f2369a 100644 --- a/src/pyhf/pdf.py +++ b/src/pyhf/pdf.py @@ -464,10 +464,13 @@ def set_poi(self, name): raise exceptions.InvalidModel( f"The parameter of interest '{name:s}' cannot be fit as it is not declared in the model specification." ) - s = self.par_slice(name) - assert s.stop - s.start == 1 + if self.param_set(name).n_parameters > 1: + # multi-parameter modifiers are not supported as POIs + raise exceptions.InvalidModel( + f"The parameter '{name:s}' contains multiple components and is not currently supported as parameter of interest." + ) self._poi_name = name - self._poi_index = s.start + self._poi_index = self.par_slice(name).start def _create_and_register_paramsets(self, required_paramsets): next_index = 0 diff --git a/tests/test_pdf.py b/tests/test_pdf.py index 045575d725..e680dad3f6 100644 --- a/tests/test_pdf.py +++ b/tests/test_pdf.py @@ -1329,3 +1329,33 @@ def test_is_shared_paramset_shapesys_same_sample_same_channel(): with pytest.raises(pyhf.exceptions.InvalidModel): pyhf.Workspace(spec).model() + + +def test_multi_component_poi(): + spec = { + "channels": [ + { + "name": "SR", + "samples": [ + { + "data": [5.0, 10.0], + "modifiers": [ + {"data": None, "name": "mu", "type": "shapefactor"} + ], + "name": "Signal", + } + ], + } + ], + "measurements": [ + {"config": {"parameters": [], "poi": "mu"}, "name": "example"} + ], + "observations": [{"data": [5.0, 10.0], "name": "SR"}], + "version": "1.0.0", + } + + with pytest.raises( + pyhf.exceptions.InvalidModel, + match="The parameter 'mu' contains multiple components and is not currently supported as parameter of interest.", + ): + pyhf.Workspace(spec).model() From 0efb2ee2ef018317025acb2872321a8916de7700 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Wed, 17 May 2023 19:00:59 -0500 Subject: [PATCH 13/42] fix: Add TYPE_CHECKING guard for numpy.typing (#2208) * The addition of typing.TYPE_CHECKING avoids situations in which the version of NumPy (whose lower bounds is enforced by SciPy) doesn't have numpy.typing, which was added in NumPy v1.21.0. * Exclude lines that match 'if TYPE_CHECKING:' from the coverage report to avoid artificial drops in code coverage. - c.f. https://coverage.readthedocs.io/en/stable/excluding.html#advanced-exclusion Co-authored-by: Giordon Stark --- pyproject.toml | 3 +++ src/pyhf/tensor/numpy_backend.py | 10 ++++++++-- 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 717dc3b121..15124029f5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -225,6 +225,9 @@ omit = ["*/pyhf/typing.py"] precision = 1 sort = "cover" show_missing = true +exclude_also = [ + "if TYPE_CHECKING:" +] [tool.mypy] files = "src" diff --git a/src/pyhf/tensor/numpy_backend.py b/src/pyhf/tensor/numpy_backend.py index fe60abd4ee..b1bdcf7880 100644 --- a/src/pyhf/tensor/numpy_backend.py +++ b/src/pyhf/tensor/numpy_backend.py @@ -2,10 +2,16 @@ from __future__ import annotations import logging -from typing import Callable, Generic, Mapping, Sequence, TypeVar, Union +from typing import TYPE_CHECKING, Callable, Generic, Mapping, Sequence, TypeVar, Union import numpy as np -from numpy.typing import ArrayLike, DTypeLike, NBitBase, NDArray + +# Needed while numpy lower bound is older than v1.21.0 +if TYPE_CHECKING: + from numpy.typing import ArrayLike, DTypeLike, NBitBase, NDArray +else: + NBitBase = "NBitBase" + from scipy import special from scipy.special import gammaln, xlogy from scipy.stats import norm, poisson From 2e7f2c02307c20dcfb97cbc8611f70587ef5f637 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Wed, 17 May 2023 23:14:46 -0500 Subject: [PATCH 14/42] docs: Update release checklist (#2209) * Update the maintainer release checklist to reflect the current release workflow. - Add 'After Release Tag Pushed To GitHub' section. - Add checks for the container images being published to multiple container image registries. - Add additional example Software Process and Infrastructure JIRA tickets. * Add selection of the branch for pushing the tag back to the developer docs. --- .github/ISSUE_TEMPLATE/~release-checklist.md | 36 ++++++++++++++------ docs/development.rst | 1 + 2 files changed, 27 insertions(+), 10 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/~release-checklist.md b/.github/ISSUE_TEMPLATE/~release-checklist.md index 12de456ab8..36a7426c85 100644 --- a/.github/ISSUE_TEMPLATE/~release-checklist.md +++ b/.github/ISSUE_TEMPLATE/~release-checklist.md @@ -10,23 +10,40 @@ about: Checklist for core developers to complete as part of making a release * [ ] Migrate any unresolved Issues or PRs from the [release GitHub project board](https://github.com/scikit-hep/pyhf/projects/) to a new project board. * [ ] Verify that there is a release notes file for the release under [``docs/release-notes``](https://github.com/scikit-hep/pyhf/tree/main/docs/release-notes). * [ ] Verify that the release notes files correctly summarize all development changes since the last release. -* [ ] Draft email to [``pyhf-announcements`` mailing list](https://groups.google.com/group/pyhf-announcements/subscribe) that summarizes the main points of the release notes and circulate it for development team approval. * [ ] Update the checklist Issue template in the [``.github/ISSUE_TEMPLATE``](https://github.com/scikit-hep/pyhf/tree/main/.github/ISSUE_TEMPLATE) directory if there are revisions. -* [ ] Make a release to [TestPyPI][TestPyPI_pyhf] using the [workflow dispatch event trigger](https://github.com/scikit-hep/pyhf/actions/workflows/publish-package.yml). -* [ ] Verify that the project README is displaying correctly on [TestPyPI][TestPyPI_pyhf]. * [ ] Add any new use citations or published statistical models to the [Use and Citations page][citations_page]. -* [ ] Verify that the citations on the [Use and Citations page][citations_page] are up to date with their current [INSPIRE](https://inspirehep.net/) record. -* [ ] Update the [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish) GitHub Action used for deployment to TestPyPI and PyPI to the latest stable release. +* [ ] Verify that the citations on the [Use and Citations page][citations_page] are up to date with their current [INSPIRE](https://inspirehep.net/) record. Checking the [Dimensions listing of publication citations](https://app.dimensions.ai/discover/publication?or_subset_publication_citations=pub.1135154020) can be helpful to catch citations that are now journal publications. * [ ] Update the ``codemeta.json`` file in the release PR if its requirements have updated. +* [ ] Update the [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish) GitHub Action used for deployment to TestPyPI and PyPI to the latest stable release. +* [ ] Make a release to [TestPyPI][TestPyPI_pyhf] using the [workflow dispatch event trigger](https://github.com/scikit-hep/pyhf/actions/workflows/publish-package.yml). +* [ ] Verify that the project README is displaying correctly on [TestPyPI][TestPyPI_pyhf]. +* [ ] Draft email to [``pyhf-announcements`` mailing list](https://groups.google.com/group/pyhf-announcements/subscribe) that summarizes the main points of the release notes and circulate it for development team approval. [TestPyPI_pyhf]: https://test.pypi.org/project/pyhf/ [citations_page]: https://scikit-hep.org/pyhf/citations.html -## Once Release PR is Merged +## Create Release Tag -* [ ] Watch the CI to ensure that the deployment to [PyPI](https://pypi.org/project/pyhf/) is successful. -* [ ] Create a [GitHub release](https://github.com/scikit-hep/pyhf/releases) from the generated PR tag and copy the release notes published to the GitHub release page. The creation of the GitHub release triggers all other release related activities. +For a video walkthrough consult the [``pyhf`` ``v0.7.1`` release recording](https://youtu.be/ZV20tr3EpTw) on YouTube. + +* [ ] Use the [bump version](https://github.com/scikit-hep/pyhf/actions/workflows/bump-version.yml) GitHub Actions workflow perform a [dry run](https://scikit-hep.org/pyhf/development.html#release-tags) of the bump version to the new release tag. +* [ ] Check the annotated tag in the dry run workflow logs to make sure it looks correct. +* [ ] If the dry run passes as expected, run the same workflow with the dry run option set to ``false`` to bump the release tag version and push the new tag back to GitHub. +* [ ] Verify the release tag was pushed to the correct branch. +* [ ] Verify the release tag commit has bumped the correct versions. +* [ ] Watch the CI to verify all tag based jobs finish correctly. +* [ ] Verify the release for the tag on [TestPyPI][TestPyPI_pyhf] looks correct. + +## After Release Tag Pushed To GitHub + +* [ ] Create a [GitHub release](https://github.com/scikit-hep/pyhf/releases) from the new release tag and copy the release notes published to the GitHub release page. The creation of the GitHub release triggers all other release related activities. - [ ] Before pasting in the release notes copy the changes that the GitHub bot has already queued up and pasted into the tag and place them in the "Changes" section of the release notes. If the release notes are published before these are copied then they will be overwritten and you'll have to add them back in by hand. + - [ ] Create a corresponding [announcement GitHub Discussion](https://github.com/scikit-hep/pyhf/discussions/categories/announcements) for the release. +* [ ] Watch the CI to ensure that the deployment to [PyPI](https://pypi.org/project/pyhf/) is successful. +* [ ] Verify Docker images with the correct tags have been deployed to all container image registries. + - [ ] [Docker Hub](https://hub.docker.com/r/pyhf/pyhf/tags) + - [ ] [OSG Harbor](https://hub.opensciencegrid.org/harbor/projects/866/repositories/pyhf/) + - [ ] [CERN Harbor](https://registry.cern.ch/harbor/projects/3550/repositories/pyhf/artifacts-tab) * [ ] Verify there is a new [Zenodo DOI](https://doi.org/10.5281/zenodo.1169739) minted for the release. - [ ] Verify that the new release archive metadata on Zenodo matches is being picked up as expected from [`CITATION.cff`](https://github.com/scikit-hep/pyhf/blob/main/CITATION.cff). * [ ] Verify that a Binder has properly built for the new release. @@ -44,6 +61,5 @@ about: Checklist for core developers to complete as part of making a release * [ ] Update the [tutorial](https://github.com/pyhf/pyhf-tutorial) to use the new release number and API. * [ ] Make a PR to use the new release in the [CUDA enabled Docker images](https://github.com/pyhf/cuda-images). * [ ] Open a ticket on the CERN [Software Process and Infrastructure JIRA](https://sft.its.cern.ch/jira/browse/SPI) to update the version of `pyhf` available in the next LCG release. - - c.f. the [`v0.6.3` request ticket](https://sft.its.cern.ch/jira/browse/SPI-2086) as an example. -* [ ] If the release is a **major** or **minor** release, open a [GitHub Release Radar](https://github.com/github/release-radar) Issue for the release to potentially get featured on GitHub's [Release Radar blog](https://github.blog/?s=release+radar). + - c.f. the [`v0.6.3` request ticket](https://sft.its.cern.ch/jira/browse/SPI-2086) and the [`v0.7.1` request ticket](https://sft.its.cern.ch/jira/browse/SPI-2333) as examples. * [ ] Close the [release GitHub Project board](https://github.com/scikit-hep/pyhf/projects/). diff --git a/docs/development.rst b/docs/development.rst index a35b2cba96..324b8110bf 100644 --- a/docs/development.rst +++ b/docs/development.rst @@ -200,6 +200,7 @@ The maintainer needs to: * Select the semantic versioning (SemVer) type (major, minor, patch) of the release tag. * Select if the release tag is a release candidate or not. * Input the SemVer version number of the release tag. +* Select the branch to push the new release tag to. * Select if to override the SemVer compatibility of the previous options (default is to run checks). * Select if a dry run should be performed (default is to do a dry run to avoid accidental From 179424acc6f97aaa79c5cc9e0d6be20987bc8a13 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Thu, 18 May 2023 00:39:47 -0500 Subject: [PATCH 15/42] docs: Update talk list through May 2023 (#2210) * Update pyhf talk and tutorial list through May 2023. * Fix DESY Indico URL for 'Traditional inference with machine learning tools' to avoid timeout error during linkcheck. --- docs/bib/talks.bib | 94 +++++++++++++++++++++++++++++++++++++++++- docs/bib/tutorials.bib | 11 +++++ 2 files changed, 104 insertions(+), 1 deletion(-) diff --git a/docs/bib/talks.bib b/docs/bib/talks.bib index b6538e9b8e..efe3363da9 100644 --- a/docs/bib/talks.bib +++ b/docs/bib/talks.bib @@ -1,6 +1,98 @@ % NB: entries with same author-title-year are not picked up: % https://github.com/mcmtroffaes/sphinxcontrib-bibtex/issues/117 +@unpublished{Feickert_20230412, + title = {{pyhf: pure-Python implementation of HistFactory with tensors and automatic differentiation}}, + author = {Matthew Feickert}, + year = {2023}, + month = {Apr}, + day = {12}, + note = {(Internal) CMS Common Analysis Tools General Meeting (April 2023)}, + url = {https://indico.cern.ch/event/1264029/contributions/5308065/}, +} + +@unpublished{Feickert_20230303, + title = {{How to contribute to pyhf development}}, + author = {Matthew Feickert}, + year = {2023}, + month = {Mar}, + day = {3}, + note = {Belle II pyhf workshop 2023}, + url = {https://indico.belle2.org/event/8470/contributions/55871/}, +} + +@unpublished{Feickert_20220906, + title = {{pyhf and analysis optimization with automatic differentiation}}, + author = {Matthew Feickert}, + year = {2022}, + month = {Sep}, + day = {6}, + note = {(Internal) ATLAS HDBS Workshop 2022}, + url = {https://indico.cern.ch/event/1132691/contributions/4994710/}, +} + +@unpublished{Feickert_20220708, + title = {{pyhf: pure-Python statistical fitting library with tensors and automatic differentiation}}, + author = {Matthew Feickert}, + year = {2022}, + month = {Jul}, + day = {8}, + note = {International Conference on High Energy Physics (ICHEP) 2022}, + url = {https://agenda.infn.it/event/28874/contributions/169217/}, +} + +@unpublished{Feickert_20220425, + title = {{Statistical inference: pyhf and cabinetry}}, + author = {Matthew Feickert}, + year = {2022}, + month = {Apr}, + day = {25}, + note = {IRIS-HEP Analysis Grand Challenge Tools 2022 Workshop}, + url = {https://indico.cern.ch/event/1126109/contributions/4780155/}, +} + +@unpublished{Feickert_20211201, + title = {{pyhf: pure-Python implementation of HistFactory with tensors and automatic differentiation}}, + author = {Matthew Feickert}, + year = {2021}, + month = {Dec}, + day = {1}, + note = {CMS Analysis Tools Task Force}, + url = {https://indico.cern.ch/event/1100873/contributions/4631656/}, +} + +@unpublished{Feickert_20210715, + title = {{Distributed statistical inference with pyhf powered by funcX}}, + author = {Matthew Feickert}, + year = {2021}, + month = {Jul}, + day = {15}, + note = {20th Python in Science Conference (SciPy 2021)}, + doi = {10.25080/majora-1b6fd038-023}, + url = {https://conference.scipy.org/proceedings/scipy2021/slides.html}, +} + +@unpublished{Feickert_20210706, + title = {{Distributed statistical inference with pyhf}}, + author = {Matthew Feickert}, + year = {2021}, + month = {Jul}, + day = {6}, + note = {PyHEP 2021 (virtual) Workshop}, + doi = {10.5281/zenodo.5136819}, + url = {https://indico.cern.ch/event/1019958/contributions/4418598/}, +} + +@unpublished{Feickert_20210520, + title = {{Distributed statistical inference with pyhf enabled through funcX}}, + author = {Matthew Feickert}, + year = {2021}, + month = {May}, + day = {20}, + note = {vCHEP 2021 Conference}, + url = {https://indico.cern.ch/event/948465/contributions/4324013/}, +} + @unpublished{Feickert_20201103, title = {{pyhf: pure-Python implementation of HistFactory with tensors and automatic differentiation}}, author = {Matthew Feickert}, @@ -53,7 +145,7 @@ @unpublished{Heinrich20191030 day = {30}, note = {1st Pan-European Advanced School on Statistics in High Energy Physics}, organization = {DESY}, - url = {https://indico.desy.de/indico/event/22731/session/4/contribution/19}, + url = {https://indico.desy.de/event/22731/contributions/47953/}, } @unpublished{Stark20191023, diff --git a/docs/bib/tutorials.bib b/docs/bib/tutorials.bib index 5960a29bb7..94329a2eb0 100644 --- a/docs/bib/tutorials.bib +++ b/docs/bib/tutorials.bib @@ -1,6 +1,17 @@ % NB: entries with same author-title-year are not picked up: % https://github.com/mcmtroffaes/sphinxcontrib-bibtex/issues/117 +@unpublished{Feickert_20210407, + title = {{Tutorial on pyhf}}, + author = {Matthew Feickert}, + year = {2021}, + month = {Apr}, + day = {7}, + note = {PyHEP Python Module of the Month (April 2021)}, + doi = {10.5281/zenodo.4670322}, + url = {https://indico.cern.ch/event/985425/}, +} + @unpublished{GStark20200925, title = {{ATLAS Exotics + SUSY Workshop 2020 pyhf Tutorial}}, author = {Giordon Stark}, From e99733fec94b52b20f2303844f6f8b0fd4b9b41b Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Thu, 18 May 2023 03:29:24 -0500 Subject: [PATCH 16/42] =?UTF-8?q?docs:=20Bump=20version:=200.7.1=20?= =?UTF-8?q?=E2=86=92=200.7.2=20(#2217)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Forward port the version bump information from release v0.7.2 on the release/v0.7.x branch to main. --- .zenodo.json | 6 +++--- CITATION.cff | 8 ++++---- README.rst | 10 +++++----- codemeta.json | 2 +- docs/lite/jupyterlite.py | 2 +- src/pyhf/data/citation.bib | 6 +++--- src/pyhf/utils.py | 2 +- tbump.toml | 4 ++-- 8 files changed, 20 insertions(+), 20 deletions(-) diff --git a/.zenodo.json b/.zenodo.json index 12e93a79a9..642133318a 100644 --- a/.zenodo.json +++ b/.zenodo.json @@ -1,8 +1,8 @@ { "description": "pure-Python HistFactory implementation with tensors and autodiff", "license": "Apache-2.0", - "title": "scikit-hep/pyhf: v0.7.1", - "version": "v0.7.1", + "title": "scikit-hep/pyhf: v0.7.2", + "version": "v0.7.2", "upload_type": "software", "creators": [ { @@ -36,7 +36,7 @@ "related_identifiers": [ { "scheme": "url", - "identifier": "https://github.com/scikit-hep/pyhf/tree/v0.7.1", + "identifier": "https://github.com/scikit-hep/pyhf/tree/v0.7.2", "relation": "isSupplementTo" } ] diff --git a/CITATION.cff b/CITATION.cff index 9f10a88534..19396e412d 100644 --- a/CITATION.cff +++ b/CITATION.cff @@ -14,11 +14,11 @@ authors: given-names: "Giordon" orcid: "https://orcid.org/0000-0001-6616-3433" affiliation: "SCIPP, University of California, Santa Cruz" -title: "pyhf: v0.7.1" -version: 0.7.1 +title: "pyhf: v0.7.2" +version: 0.7.2 doi: 10.5281/zenodo.1169739 -repository-code: "https://github.com/scikit-hep/pyhf/releases/tag/v0.7.1" -url: "https://pyhf.readthedocs.io/en/v0.7.1/" +repository-code: "https://github.com/scikit-hep/pyhf/releases/tag/v0.7.2" +url: "https://pyhf.readthedocs.io/en/v0.7.2/" keywords: - python - physics diff --git a/README.rst b/README.rst index c2dc4cdbc0..14d38d62aa 100644 --- a/README.rst +++ b/README.rst @@ -309,11 +309,11 @@ the preferred BibTeX entry for citation of ``pyhf`` includes both the @software{pyhf, author = {Lukas Heinrich and Matthew Feickert and Giordon Stark}, - title = "{pyhf: v0.7.1}", - version = {0.7.1}, + title = "{pyhf: v0.7.2}", + version = {0.7.2}, doi = {10.5281/zenodo.1169739}, url = {https://doi.org/10.5281/zenodo.1169739}, - note = {https://github.com/scikit-hep/pyhf/releases/tag/v0.7.1} + note = {https://github.com/scikit-hep/pyhf/releases/tag/v0.7.2} } @article{pyhf_joss, @@ -365,7 +365,7 @@ and grant `OAC-1450377 =3.0.0"]) +await piplite.install(["pyhf==0.7.2", "matplotlib>=3.0.0"]) # %matplotlib inline import pyhf diff --git a/src/pyhf/data/citation.bib b/src/pyhf/data/citation.bib index c7f94dc057..5cef8add01 100644 --- a/src/pyhf/data/citation.bib +++ b/src/pyhf/data/citation.bib @@ -1,10 +1,10 @@ @software{pyhf, author = {Lukas Heinrich and Matthew Feickert and Giordon Stark}, - title = "{pyhf: v0.7.1}", - version = {0.7.1}, + title = "{pyhf: v0.7.2}", + version = {0.7.2}, doi = {10.5281/zenodo.1169739}, url = {https://doi.org/10.5281/zenodo.1169739}, - note = {https://github.com/scikit-hep/pyhf/releases/tag/v0.7.1} + note = {https://github.com/scikit-hep/pyhf/releases/tag/v0.7.2} } @article{pyhf_joss, diff --git a/src/pyhf/utils.py b/src/pyhf/utils.py index 8e24ac191e..e6da9f3de2 100644 --- a/src/pyhf/utils.py +++ b/src/pyhf/utils.py @@ -111,7 +111,7 @@ def citation(oneline=False): >>> import pyhf >>> pyhf.utils.citation(oneline=True) - '@software{pyhf, author = {Lukas Heinrich and Matthew Feickert and Giordon Stark}, title = "{pyhf: v0.7.1}", version = {0.7.1}, doi = {10.5281/zenodo.1169739}, url = {https://doi.org/10.5281/zenodo.1169739}, note = {https://github.com/scikit-hep/pyhf/releases/tag/v0.7.1}}@article{pyhf_joss, doi = {10.21105/joss.02823}, url = {https://doi.org/10.21105/joss.02823}, year = {2021}, publisher = {The Open Journal}, volume = {6}, number = {58}, pages = {2823}, author = {Lukas Heinrich and Matthew Feickert and Giordon Stark and Kyle Cranmer}, title = {pyhf: pure-Python implementation of HistFactory statistical models}, journal = {Journal of Open Source Software}}' + '@software{pyhf, author = {Lukas Heinrich and Matthew Feickert and Giordon Stark}, title = "{pyhf: v0.7.2}", version = {0.7.2}, doi = {10.5281/zenodo.1169739}, url = {https://doi.org/10.5281/zenodo.1169739}, note = {https://github.com/scikit-hep/pyhf/releases/tag/v0.7.2}}@article{pyhf_joss, doi = {10.21105/joss.02823}, url = {https://doi.org/10.21105/joss.02823}, year = {2021}, publisher = {The Open Journal}, volume = {6}, number = {58}, pages = {2823}, author = {Lukas Heinrich and Matthew Feickert and Giordon Stark and Kyle Cranmer}, title = {pyhf: pure-Python implementation of HistFactory statistical models}, journal = {Journal of Open Source Software}}' Keyword Args: oneline (:obj:`bool`): Whether to provide citation with new lines (default) or as a one-liner. diff --git a/tbump.toml b/tbump.toml index f424ed8803..d3f25bbd58 100644 --- a/tbump.toml +++ b/tbump.toml @@ -1,7 +1,7 @@ github_url = "https://github.com/scikit-hep/pyhf/" [version] -current = "0.7.1" +current = "0.7.2" # Example of a semver regexp. # Make sure this matches current_version before @@ -19,7 +19,7 @@ regex = ''' [git] # The current version will get updated when tbump is run -message_template = "Bump version: 0.7.1 → {new_version}" +message_template = "Bump version: 0.7.2 → {new_version}" tag_template = "v{new_version}" # For each file to patch, add a [[file]] config From a7e06c09d71f7bae6d0bcd7ff6efc543b0a97731 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Thu, 18 May 2023 08:09:34 -0500 Subject: [PATCH 17/42] docs: Add v0.7.2 release notes (#2218) * Forward port PR #2214 from release/v0.7.x to main. * Add release notes for pyhf v0.7.2. --- docs/release-notes.rst | 1 + docs/release-notes/v0.7.2.rst | 43 +++++++++++++++++++++++++++++++++++ 2 files changed, 44 insertions(+) create mode 100644 docs/release-notes/v0.7.2.rst diff --git a/docs/release-notes.rst b/docs/release-notes.rst index 8698351460..ec60c92997 100644 --- a/docs/release-notes.rst +++ b/docs/release-notes.rst @@ -2,6 +2,7 @@ Release Notes ============= +.. include:: release-notes/v0.7.2.rst .. include:: release-notes/v0.7.1.rst .. include:: release-notes/v0.7.0.rst .. include:: release-notes/v0.6.3.rst diff --git a/docs/release-notes/v0.7.2.rst b/docs/release-notes/v0.7.2.rst new file mode 100644 index 0000000000..35cf8e095c --- /dev/null +++ b/docs/release-notes/v0.7.2.rst @@ -0,0 +1,43 @@ +|release v0.7.2|_ +================= + +This is a patch release from ``v0.7.1`` → ``v0.7.2``. + +Important Notes +--------------- + +* ``pyhf`` became a `NumFOCUS Affiliated Project + `__ on + 2022-12-19. |NumFOCUS Affiliated Project| + ``v0.7.1`` is the first release to appear in a NumFOCUS Newsletter and + ``v0.7.2`` is the first release to appear as part of the Affiliated Projects + page. + (PR :pr:`2179`) + +Fixes +----- + +* If a multiple component parameter of interest is used raise + :class:`~pyhf.exceptions.InvalidModel`. + This guards against modifiers like :class:`~pyhf.modifiers.shapefactor`, + :class:`~pyhf.modifiers.shapesys`, and :class:`~pyhf.modifiers.staterror` + from being used as POIs. + (PR :pr:`2197`) +* Use :data:`typing.TYPE_CHECKING` guard to avoid causing a + :class:`ModuleNotFoundError` when the version of NumPy installed is older + than ``v1.21.0``, which is the first NumPy release to include + :mod:`numpy.typing`. + (PR :pr:`2208`) + +Contributors +------------ + +``v0.7.2`` benefited from contributions from: + +* Alexander Held + +.. |release v0.7.2| replace:: ``v0.7.2`` +.. _`release v0.7.2`: https://github.com/scikit-hep/pyhf/releases/tag/v0.7.2 + +.. |NumFOCUS Affiliated Project| image:: https://img.shields.io/badge/NumFOCUS-Affiliated%20Project-orange.svg?style=flat&colorA=E1523D&colorB=007D8A + :target: https://numfocus.org/sponsored-projects/affiliated-projects From 2219abf4f2f3e453d23c38bdbdceb42b6aef87b5 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Mon, 22 May 2023 19:52:37 -0500 Subject: [PATCH 18/42] fix: Pass script_runner commands as a single sequence (#2221) * Require pytest-console-scripts v1.4.0+ in 'test' extra. pytest-console-scripts v1.4.0 introduces a DeprecationWarning: > script_runner commands should be passed as a single sequence, not as multiple arguments. > Replace `script_runner.run(a, b, c)` calls with `script_runner.run([a, b, c])` * Remove unpacking of the command to be passed to script_runner with shlex.split to pass a list and not multiple arguments. --- pyproject.toml | 2 +- tests/test_examples.py | 2 +- tests/test_scripts.py | 144 ++++++++++++++++++++--------------------- 3 files changed, 74 insertions(+), 74 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 15124029f5..ffb42daaf1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -94,7 +94,7 @@ test = [ "pytest-mock", "requests-mock>=1.9.0", "pytest-benchmark[histogram]", - "pytest-console-scripts", + "pytest-console-scripts>=1.4.0", "pytest-mpl", "pydocstyle", "papermill~=2.3.4", diff --git a/tests/test_examples.py b/tests/test_examples.py index 8891384932..9d4c2a1e1c 100644 --- a/tests/test_examples.py +++ b/tests/test_examples.py @@ -3,5 +3,5 @@ def test_2bin_1channel(tmpdir, script_runner): command = f"pyhf inspect {'docs/examples/json/2-bin_1-channel.json':s}" - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success diff --git a/tests/test_scripts.py b/tests/test_scripts.py index f51dbaee61..0dd88e9b8a 100644 --- a/tests/test_scripts.py +++ b/tests/test_scripts.py @@ -30,7 +30,7 @@ def tarfile_path(tmpdir): def test_version(script_runner): command = 'pyhf --version' start = time.time() - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) end = time.time() elapsed = end - start assert ret.success @@ -44,7 +44,7 @@ def test_version(script_runner): def test_citation(script_runner, flag): command = f'pyhf {flag}' start = time.time() - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) end = time.time() elapsed = end - start assert ret.success @@ -60,7 +60,7 @@ def test_citation(script_runner, flag): def test_import_prepHistFactory(tmpdir, script_runner): temp = tmpdir.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s} --hide-progress' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success assert ret.stdout == '' assert ret.stderr == '' @@ -73,7 +73,7 @@ def test_import_prepHistFactory(tmpdir, script_runner): def test_import_prepHistFactory_withProgress(tmpdir, script_runner): temp = tmpdir.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success assert ret.stdout == '' assert ret.stderr != '' @@ -81,7 +81,7 @@ def test_import_prepHistFactory_withProgress(tmpdir, script_runner): def test_import_prepHistFactory_stdout(tmpdir, script_runner): command = 'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success assert ret.stdout != '' assert ret.stderr != '' @@ -92,10 +92,10 @@ def test_import_prepHistFactory_stdout(tmpdir, script_runner): def test_import_prepHistFactory_and_fit(tmpdir, script_runner): temp = tmpdir.join("parsed_output.json") command = f"pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}" - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) command = f"pyhf fit {temp.strpath:s}" - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success ret_json = json.loads(ret.stdout) @@ -110,7 +110,7 @@ def test_import_prepHistFactory_and_fit(tmpdir, script_runner): "ConstExample", ]: command = f"pyhf fit {temp.strpath:s} --value --measurement {measurement:s}" - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success ret_json = json.loads(ret.stdout) @@ -121,7 +121,7 @@ def test_import_prepHistFactory_and_fit(tmpdir, script_runner): tmp_out = tmpdir.join(f"{measurement:s}_output.json") # make sure output file works too command += f" --output-file {tmp_out.strpath:s}" - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success ret_json = json.load(tmp_out) assert "mle_parameters" in ret_json @@ -131,10 +131,10 @@ def test_import_prepHistFactory_and_fit(tmpdir, script_runner): def test_import_prepHistFactory_and_cls(tmpdir, script_runner): temp = tmpdir.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) command = f'pyhf cls {temp.strpath:s}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success d = json.loads(ret.stdout) @@ -149,7 +149,7 @@ def test_import_prepHistFactory_and_cls(tmpdir, script_runner): 'ConstExample', ]: command = f'pyhf cls {temp.strpath:s} --measurement {measurement:s}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success d = json.loads(ret.stdout) @@ -160,7 +160,7 @@ def test_import_prepHistFactory_and_cls(tmpdir, script_runner): tmp_out = tmpdir.join(f'{measurement:s}_output.json') # make sure output file works too command += f' --output-file {tmp_out.strpath:s}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success d = json.load(tmp_out) assert 'CLs_obs' in d @@ -173,7 +173,7 @@ def test_import_usingMounts(datadir, tmpdir, script_runner): temp = tmpdir.join("parsed_output.json") command = f'pyhf xml2json --hide-progress -v {data}:/absolute/path/to -v {data}:/another/absolute/path/to --output-file {temp.strpath:s} {data.joinpath("config/example.xml")}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success assert ret.stdout == '' assert ret.stderr == '' @@ -189,7 +189,7 @@ def test_import_usingMounts_badDelimitedPaths(datadir, tmpdir, script_runner): temp = tmpdir.join("parsed_output.json") command = f'pyhf xml2json --hide-progress -v {data}::/absolute/path/to -v {data}/another/absolute/path/to --output-file {temp.strpath:s} {data.joinpath("config/example.xml")}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert not ret.success assert ret.stdout == '' assert 'is not a valid colon-separated option' in ret.stderr @@ -199,10 +199,10 @@ def test_import_usingMounts_badDelimitedPaths(datadir, tmpdir, script_runner): def test_fit_backend_option(tmpdir, script_runner, backend): temp = tmpdir.join("parsed_output.json") command = f"pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}" - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) command = f"pyhf fit --backend {backend:s} {temp.strpath:s}" - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success ret_json = json.loads(ret.stdout) @@ -214,10 +214,10 @@ def test_fit_backend_option(tmpdir, script_runner, backend): def test_cls_backend_option(tmpdir, script_runner, backend): temp = tmpdir.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) command = f'pyhf cls --backend {backend:s} {temp.strpath:s}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success d = json.loads(ret.stdout) @@ -229,10 +229,10 @@ def test_cls_backend_option(tmpdir, script_runner, backend): def test_import_and_export(tmpdir, script_runner): temp = tmpdir.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) command = f"pyhf json2xml {temp.strpath:s} --output-dir {tmpdir.mkdir('output').strpath:s}" - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success @@ -247,23 +247,23 @@ def test_patch(tmpdir, script_runner): temp = tmpdir.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) command = f'pyhf cls {temp.strpath:s} --patch {patch.strpath:s}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success command = f"pyhf json2xml {temp.strpath:s} --output-dir {tmpdir.mkdir('output_1').strpath:s} --patch {patch.strpath:s}" - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success command = f'pyhf cls {temp.strpath:s} --patch -' - ret = script_runner.run(*shlex.split(command), stdin=patch) + ret = script_runner.run(shlex.split(command), stdin=patch) assert ret.success command = f"pyhf json2xml {temp.strpath:s} --output-dir {tmpdir.mkdir('output_2').strpath:s} --patch -" - ret = script_runner.run(*shlex.split(command), stdin=patch) + ret = script_runner.run(shlex.split(command), stdin=patch) assert ret.success @@ -274,24 +274,24 @@ def test_patch_fail(tmpdir, script_runner): temp = tmpdir.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) command = f'pyhf cls {temp.strpath:s} --patch {patch.strpath:s}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert not ret.success command = f"pyhf json2xml {temp.strpath:s} --output-dir {tmpdir.mkdir('output').strpath:s} --patch {patch.strpath:s}" - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert not ret.success def test_bad_measurement_name(tmpdir, script_runner): temp = tmpdir.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) command = f'pyhf cls {temp.strpath:s} --measurement "a-fake-measurement-name"' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert not ret.success # assert 'no measurement by name' in ret.stderr # numpy swallows the log.error() here, dunno why @@ -299,14 +299,14 @@ def test_bad_measurement_name(tmpdir, script_runner): def test_testpoi(tmpdir, script_runner): temp = tmpdir.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) pois = [1.0, 0.5, 0.001] results_exp = [] results_obs = [] for test_poi in pois: command = f'pyhf cls {temp.strpath:s} --test-poi {test_poi:f}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success d = json.loads(ret.stdout) @@ -334,11 +334,11 @@ def test_testpoi(tmpdir, script_runner): def test_fit_optimizer(tmpdir, script_runner, optimizer, opts, success): temp = tmpdir.join("parsed_output.json") command = f"pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}" - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) optconf = " ".join(f"--optconf {opt}" for opt in opts) command = f"pyhf fit --optimizer {optimizer} {optconf} {temp.strpath}" - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success == success @@ -350,11 +350,11 @@ def test_fit_optimizer(tmpdir, script_runner, optimizer, opts, success): def test_cls_optimizer(tmpdir, script_runner, optimizer, opts, success): temp = tmpdir.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) optconf = " ".join(f"--optconf {opt}" for opt in opts) command = f'pyhf cls {temp.strpath} --optimizer {optimizer} {optconf}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success == success @@ -362,21 +362,21 @@ def test_cls_optimizer(tmpdir, script_runner, optimizer, opts, success): def test_inspect(tmpdir, script_runner): temp = tmpdir.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s} --hide-progress' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) command = f'pyhf inspect {temp.strpath:s}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success def test_inspect_outfile(tmpdir, script_runner): temp = tmpdir.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s} --hide-progress' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) tempout = tmpdir.join("inspect_output.json") command = f'pyhf inspect {temp.strpath:s} --output-file {tempout.strpath:s}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success summary = json.loads(tempout.read()) @@ -399,23 +399,23 @@ def test_inspect_outfile(tmpdir, script_runner): def test_prune(tmpdir, script_runner): temp = tmpdir.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s} --hide-progress' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) command = ( f"pyhf prune -m staterror_channel1 --measurement GammaExample {temp.strpath:s}" ) - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success def test_prune_outfile(tmpdir, script_runner): temp = tmpdir.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s} --hide-progress' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) tempout = tmpdir.join("prune_output.json") command = f'pyhf prune -m staterror_channel1 --measurement GammaExample {temp.strpath:s} --output-file {tempout.strpath:s}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success spec = json.loads(temp.read()) @@ -431,21 +431,21 @@ def test_prune_outfile(tmpdir, script_runner): def test_rename(tmpdir, script_runner): temp = tmpdir.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s} --hide-progress' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) command = f'pyhf rename -m staterror_channel1 staterror_channelone --measurement GammaExample GamEx {temp.strpath:s}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success def test_rename_outfile(tmpdir, script_runner): temp = tmpdir.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s} --hide-progress' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) tempout = tmpdir.join("rename_output.json") command = f'pyhf rename -m staterror_channel1 staterror_channelone --measurement GammaExample GamEx {temp.strpath:s} --output-file {tempout.strpath:s}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success spec = json.loads(temp.read()) @@ -466,7 +466,7 @@ def test_combine(tmpdir, script_runner): temp_1 = tmpdir.join("parsed_output.json") temp_2 = tmpdir.join("renamed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp_1.strpath:s} --hide-progress' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) rename_channels = {'channel1': 'channel2'} rename_measurements = { @@ -483,10 +483,10 @@ def test_combine(tmpdir, script_runner): ' --measurement ' + ' '.join(item) for item in rename_measurements.items() ) command = f"pyhf rename {temp_1.strpath:s} {_opts_channels:s} {_opts_measurements:s} --output-file {temp_2.strpath:s}" - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) command = f'pyhf combine {temp_1.strpath:s} {temp_2.strpath:s}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success @@ -494,7 +494,7 @@ def test_combine_outfile(tmpdir, script_runner): temp_1 = tmpdir.join("parsed_output.json") temp_2 = tmpdir.join("renamed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp_1.strpath:s} --hide-progress' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) rename_channels = {'channel1': 'channel2'} rename_measurements = { @@ -511,11 +511,11 @@ def test_combine_outfile(tmpdir, script_runner): ' --measurement ' + ' '.join(item) for item in rename_measurements.items() ) command = f"pyhf rename {temp_1.strpath:s} {_opts_channels:s} {_opts_measurements:s} --output-file {temp_2.strpath:s}" - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) tempout = tmpdir.join("combined_output.json") command = f'pyhf combine {temp_1.strpath:s} {temp_2.strpath:s} --output-file {tempout.strpath:s}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success combined_spec = json.loads(tempout.read()) @@ -528,18 +528,18 @@ def test_combine_merge_channels(tmpdir, script_runner): temp_1 = tmpdir.join("parsed_output.json") temp_2 = tmpdir.join("renamed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp_1.strpath} --hide-progress' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success command = ( f'pyhf prune {temp_1.strpath} --sample signal --output-file {temp_2.strpath}' ) - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success command = f'pyhf combine --merge-channels --join "left outer" {temp_1.strpath} {temp_2.strpath}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success @@ -555,10 +555,10 @@ def test_workspace_digest(tmpdir, script_runner, algorithms, do_json): temp = tmpdir.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath} --hide-progress' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) command = f"pyhf digest {temp.strpath} -a {' -a '.join(algorithms)}{' -j' if do_json else ''}" - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success assert all(algorithm in ret.stdout for algorithm in algorithms) if do_json: @@ -591,19 +591,19 @@ def test_workspace_digest(tmpdir, script_runner, algorithms, do_json): def test_patchset_download(tmpdir, script_runner, requests_mock, tarfile_path, archive): requests_mock.get(archive, content=open(tarfile_path, "rb").read()) command = f'pyhf contrib download {archive} {tmpdir.join("likelihoods").strpath}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success # Run with all optional flags command = f'pyhf contrib download --verbose --force {archive} {tmpdir.join("likelihoods").strpath}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success requests_mock.get( "https://www.pyhfthisdoesnotexist.org/record/resource/1234567", status_code=200 ) command = f'pyhf contrib download --verbose https://www.pyhfthisdoesnotexist.org/record/resource/1234567 {tmpdir.join("likelihoods").strpath}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert not ret.success assert ( "pyhf.exceptions.InvalidArchiveHost: www.pyhfthisdoesnotexist.org is not an approved archive host" @@ -615,7 +615,7 @@ def test_patchset_download(tmpdir, script_runner, requests_mock, tarfile_path, a "https://httpstat.us/404/record/resource/1234567", status_code=404 ) command = f'pyhf contrib download --verbose --force https://httpstat.us/404/record/resource/1234567 {tmpdir.join("likelihoods").strpath}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert not ret.success assert "gives a response code of 404" in ret.stderr @@ -680,7 +680,7 @@ def test_missing_contrib_download(caplog): def test_patchset_inspect(datadir, script_runner): command = f'pyhf patchset inspect {datadir.joinpath("example_patchset.json")}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert 'patch_channel1_signal_syst1' in ret.stdout @@ -694,7 +694,7 @@ def test_patchset_extract(datadir, tmpdir, script_runner, output_file, with_meta if with_metadata: command += " --with-metadata" - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success if output_file: @@ -714,7 +714,7 @@ def test_patchset_extract(datadir, tmpdir, script_runner, output_file, with_meta def test_patchset_verify(datadir, script_runner): command = f'pyhf patchset verify {datadir.joinpath("example_bkgonly.json")} {datadir.joinpath("example_patchset.json")}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success assert 'All good' in ret.stdout @@ -727,7 +727,7 @@ def test_patchset_apply(datadir, tmpdir, script_runner, output_file): if output_file: command += f" --output-file {temp.strpath}" - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success if output_file: @@ -743,21 +743,21 @@ def test_patchset_apply(datadir, tmpdir, script_runner, output_file): def test_sort(tmpdir, script_runner): temp = tmpdir.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s} --hide-progress' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) command = f'pyhf sort {temp.strpath}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success def test_sort_outfile(tmpdir, script_runner): temp = tmpdir.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s} --hide-progress' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) tempout = tmpdir.join("sort_output.json") command = f'pyhf sort {temp.strpath} --output-file {tempout.strpath}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success From 980eb738b3fb15dbe3d160b9e9d07eb8d9f71474 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Mon, 22 May 2023 21:07:56 -0500 Subject: [PATCH 19/42] docs: Add sphinx_rtd_theme to extensions (#2220) * sphinx_rtd_theme must be added to the sphinx extensions in sphinx v6.0+ to properly load jQuery. - c.f. https://sphinx-rtd-theme.readthedocs.io/en/1.2.0/changelog.html#known-issues --- docs/conf.py | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/conf.py b/docs/conf.py index 30b9f2c6aa..cda874ee89 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -55,6 +55,7 @@ def setup(app): 'sphinx.ext.viewcode', 'sphinx.ext.githubpages', 'sphinx.ext.intersphinx', + 'sphinx_rtd_theme', 'sphinxcontrib.bibtex', 'sphinx.ext.napoleon', 'sphinx_click.ext', From afba6da7e892d0296807d006e5b6abce305e59f7 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Wed, 31 May 2023 16:08:34 -0500 Subject: [PATCH 20/42] ci: Use scientific-python-nightly-wheels package index for dependency nightlies (#2225) * Nightly wheels for the Scientific Python community have migrated from the Anaconda Cloud https://anaconda.org/scipy-wheels-nightly/ package index to https://anaconda.org/scientific-python-nightly-wheels. - c.f. https://scientific-python.org/specs/spec-0004/ - c.f. https://github.com/scientific-python/specs/pull/182 - c.f. https://github.com/matplotlib/matplotlib/pull/25950 * For matplotlib use --index-url of scientific-python-nightly-wheels and use --extra-index-url of public PyPI (https://pypi.org/) as some of matplotlib's dependencies aren't on scientific-python-nightly-wheels. --- .github/workflows/dependencies-head.yml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/dependencies-head.yml b/.github/workflows/dependencies-head.yml index 1b69121271..5fe1e19eba 100644 --- a/.github/workflows/dependencies-head.yml +++ b/.github/workflows/dependencies-head.yml @@ -64,7 +64,7 @@ jobs: python -m pip install --upgrade pip setuptools wheel python -m pip --no-cache-dir --quiet install --upgrade .[test] python -m pip uninstall --yes scipy - python -m pip install --upgrade --index-url https://pypi.anaconda.org/scipy-wheels-nightly/simple scipy + python -m pip install --upgrade --index-url https://pypi.anaconda.org/scientific-python-nightly-wheels/simple scipy python -m pip list - name: Test with pytest @@ -143,12 +143,13 @@ jobs: python -m pip install --upgrade pip setuptools wheel python -m pip --no-cache-dir --quiet install --upgrade .[test] python -m pip uninstall --yes matplotlib - # Need to use --extra-index-url as dependencies aren't on scipy-wheels-nightly package index. + # Need to use --extra-index-url as dependencies aren't on scientific-python-nightly-wheels package index. # Need to use --pre as dev releases will need priority over stable releases. python -m pip install \ --upgrade \ --pre \ - --extra-index-url https://pypi.anaconda.org/scipy-wheels-nightly/simple \ + --index-url https://pypi.anaconda.org/scientific-python-nightly-wheels/simple \ + --extra-index-url https://pypi.org/simple/ \ matplotlib - name: List installed Python packages From f2fe377f224b624f1a2e9606ec114e9f78d65ce2 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Thu, 1 Jun 2023 23:46:18 -0500 Subject: [PATCH 21/42] docs: Add use citation from Jay Chan's Ph.D. thesis (#2226) * Add use citation from 'Investigation of Higgs Boson Decaying to Di-muon, Dark Matter Produced in Association with a Higgs Boson Decaying to b-quarks and Unbinned Profiled Unfolding'. - c.f. https://inspirehep.net/literature/2664141 - Ph.D. dissertation of Jay Chan --- docs/bib/use_citations.bib | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/docs/bib/use_citations.bib b/docs/bib/use_citations.bib index 8a47a1e04f..fa3fcd0852 100644 --- a/docs/bib/use_citations.bib +++ b/docs/bib/use_citations.bib @@ -1,3 +1,15 @@ +% 2023-05-30 +@phdthesis{Chan:2023kah, + author = "Chan, Jay", + title = "{Investigation of Higgs Boson Decaying to Di-muon, Dark Matter Produced in Association with a Higgs Boson Decaying to $b$-quarks and Unbinned Profiled Unfolding}", + eprint = "2305.19436", + archivePrefix = "arXiv", + primaryClass = "hep-ex", + school = "University of Wisconsin-Madison", + month = "5", + year = "2023" +} + % 2023-05-08 @article{Belle-II:2023bnh, author = "Belle II Collaboration", From 6bdc5010a45c5654f1ab38e308b5ddf65df969e0 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 6 Jun 2023 00:19:54 -0500 Subject: [PATCH 22/42] chore: [pre-commit.ci] pre-commit autoupdate (#2227) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Update pre-commit hooks: - github.com/charliermarsh/ruff-pre-commit: v0.0.263 → v0.0.270 - github.com/pre-commit/mirrors-mypy: v1.2.0 → v1.3.0 --- .pre-commit-config.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index c81ca55b45..db9eae418b 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -27,7 +27,7 @@ repos: exclude: ^validation/|\.dtd$|\.xml$ - repo: https://github.com/charliermarsh/ruff-pre-commit - rev: "v0.0.263" + rev: "v0.0.270" hooks: - id: ruff args: ["--fix", "--show-fixes"] @@ -44,7 +44,7 @@ repos: additional_dependencies: [black==23.3.0] - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.2.0 + rev: v1.3.0 # check the oldest and newest supported Pythons hooks: - &mypy @@ -62,7 +62,7 @@ repos: rev: 1.7.0 hooks: - id: nbqa-ruff - additional_dependencies: [ruff==0.0.263] + additional_dependencies: [ruff==0.0.270] args: ["--extend-ignore=F821,F401,F841,F811"] - repo: https://github.com/codespell-project/codespell From 617301d7653cf179a51a08b4c0053af6c6842d22 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Wed, 7 Jun 2023 11:40:40 -0500 Subject: [PATCH 23/42] fix: Add filterwarnings ignore for ml-dtypes DeprecationWarning (#2229) * Add an ignore to filterwarnings to avoid ml_dtypes DeprecationWarning from use of jaxlib > DeprecationWarning: ml_dtypes.float8_e4m3b11 is deprecated. Use ml_dtypes.float8_e4m3b11fnuz ml_dtypes deprecated this behavior in v0.2.0 (https://github.com/jax-ml/ml_dtypes/releases/tag/v0.2.0) and jaxlib will avoid the deprecation in jaxlib v0.4.12. - c.f. https://github.com/google/jax/pull/16277 --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index ffb42daaf1..8385fc49c5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -214,6 +214,7 @@ filterwarnings = [ 'ignore:Call to deprecated create function:DeprecationWarning', # protobuf via tensorflow 'ignore:`np.bool8` is a deprecated alias for `np.bool_`:DeprecationWarning', # numpy via tensorflow "ignore:module 'sre_constants' is deprecated:DeprecationWarning", # tensorflow v2.12.0+ for Python 3.11+ + "ignore:ml_dtypes.float8_e4m3b11 is deprecated.", #FIXME: Can remove when jaxlib>=0.4.12 ] [tool.coverage.run] From 233d1839b5b2de176b6dca79b8772010926a06c8 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Wed, 7 Jun 2023 12:22:22 -0500 Subject: [PATCH 24/42] test: Ignore DESY Indico presentation URL during Sphinx linkcheck (#2230) * Ignore URLs for https://indico.desy.de/event/22731 from Sphinx linkcheck tests as the URL https://indico.desy.de/event/22731/contributions/47953/ is frequently generating 403 Client Error. --- docs/conf.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/conf.py b/docs/conf.py index cda874ee89..3d70174c56 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -515,6 +515,8 @@ def setup(app): r'https://doi\.org/10\.31526/.*', # https://doi.org/10.1051/epjconf/x DOI URLs will periodically generate 500 Server Error r'https://doi\.org/10\.1051/epjconf/.*', + # https://indico.desy.de/event/22731/contributions/47953/ is frequently generating 403 Client Error + r'https://indico.desy.de/event/22731/.*', # tags for a release won't exist until it is made, but the release notes # and ReadTheDocs need to reference them r'https://github.com/scikit-hep/pyhf/releases/tag/.*', From 09371dac3615924ecd0af98a6890f1c3711ad4d1 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Wed, 7 Jun 2023 12:51:28 -0500 Subject: [PATCH 25/42] docs: Add general use citation from Julia in HEP paper (#2231) * Add general citation from 'Potential of the Julia programming language for high energy physics computing'. - c.f. https://inspirehep.net/literature/2666479 --- docs/bib/general_citations.bib | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/docs/bib/general_citations.bib b/docs/bib/general_citations.bib index 30b69cf29d..1657266291 100644 --- a/docs/bib/general_citations.bib +++ b/docs/bib/general_citations.bib @@ -1,3 +1,15 @@ +% 2023-06-06 +@article{Eschle:2023ikn, + author = "Eschle, J. and others", + title = "{Potential of the Julia programming language for high energy physics computing}", + eprint = "2306.03675", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + month = "6", + year = "2023", + journal = "" +} + % 2023-02-02 @article{Bockelman:2023gbj, author = "Bockelman, Brian and Elmer, Peter and Watts, Gordon", From b94107e7830de79c55240d1d98ffe1467f03b9e8 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Fri, 16 Jun 2023 14:23:19 -0500 Subject: [PATCH 26/42] docs: Use SVG version of logo for docs (#2234) * Add SVG version of the pyhf logo to docs/_static/img/pyhf-logo.svg - Keep old PNG version for the time being. * Use the SVG version in the docs. --- README.rst | 2 +- docs/_static/img/pyhf-logo.svg | 165 +++++++++++++++++++++++++++++++++ 2 files changed, 166 insertions(+), 1 deletion(-) create mode 100644 docs/_static/img/pyhf-logo.svg diff --git a/README.rst b/README.rst index 14d38d62aa..e17abf6b0d 100644 --- a/README.rst +++ b/README.rst @@ -1,4 +1,4 @@ -.. image:: https://raw.githubusercontent.com/scikit-hep/pyhf/main/docs/_static/img/pyhf-logo-small.png +.. image:: https://raw.githubusercontent.com/scikit-hep/pyhf/main/docs/_static/img/pyhf-logo.svg :alt: pyhf logo :width: 320 :align: center diff --git a/docs/_static/img/pyhf-logo.svg b/docs/_static/img/pyhf-logo.svg new file mode 100644 index 0000000000..2769a0a809 --- /dev/null +++ b/docs/_static/img/pyhf-logo.svg @@ -0,0 +1,165 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + From 68b92c0c77410bd8b935e5e51f2794ae4f919fce Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Thu, 22 Jun 2023 11:51:18 +0200 Subject: [PATCH 27/42] docs: Add use citation from mapyde paper (#2236) * Add use citation from 'Reduce, Reuse, Reinterpret: an end-to-end pipeline for recycling particle physics results'. - c.f. https://inspirehep.net/literature/2669860 --- docs/bib/use_citations.bib | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/docs/bib/use_citations.bib b/docs/bib/use_citations.bib index fa3fcd0852..1545ce85ce 100644 --- a/docs/bib/use_citations.bib +++ b/docs/bib/use_citations.bib @@ -1,3 +1,15 @@ +% 2023-06-19 +@article{Stark:2023ont, + author = "Stark, Giordon and Ots, Camila Aristimuno and Hance, Mike", + title = "{Reduce, Reuse, Reinterpret: an end-to-end pipeline for recycling particle physics results}", + eprint = "2306.11055", + archivePrefix = "arXiv", + primaryClass = "hep-ex", + month = "6", + year = "2023", + journal = "" +} + % 2023-05-30 @phdthesis{Chan:2023kah, author = "Chan, Jay", From 9dbdb7eb9df6a1c7151524277a0023334c6959d4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 27 Jun 2023 01:26:11 +0200 Subject: [PATCH 28/42] build(deps): bump pypa/gh-action-pypi-publish from 1.8.6 to 1.8.7 (#2237) Bumps [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish) from 1.8.6 to 1.8.7. - [Release notes](https://github.com/pypa/gh-action-pypi-publish/releases) - [Commits](https://github.com/pypa/gh-action-pypi-publish/compare/v1.8.6...v1.8.7) --- updated-dependencies: - dependency-name: pypa/gh-action-pypi-publish dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- .github/workflows/publish-package.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/publish-package.yml b/.github/workflows/publish-package.yml index d39034a63c..98f0798386 100644 --- a/.github/workflows/publish-package.yml +++ b/.github/workflows/publish-package.yml @@ -123,13 +123,13 @@ jobs: if: >- (github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v') && github.repository == 'scikit-hep/pyhf') || (github.event_name == 'workflow_dispatch' && github.event.inputs.publish == 'true' && github.repository == 'scikit-hep/pyhf') - uses: pypa/gh-action-pypi-publish@v1.8.6 + uses: pypa/gh-action-pypi-publish@v1.8.7 with: repository-url: https://test.pypi.org/legacy/ print-hash: true - name: Publish distribution 📦 to PyPI if: github.event_name == 'release' && github.event.action == 'published' && github.repository == 'scikit-hep/pyhf' - uses: pypa/gh-action-pypi-publish@v1.8.6 + uses: pypa/gh-action-pypi-publish@v1.8.7 with: print-hash: true From 383a178c96ce595f0f531b8c793b410fd5f29963 Mon Sep 17 00:00:00 2001 From: Henry Schreiner Date: Mon, 3 Jul 2023 16:14:45 -0400 Subject: [PATCH 29/42] chore: target-version no longer needed by Black or Ruff (#2239) * Remove target-version from Black and Ruff metadata in pyproject.toml. - c.f. https://github.com/scientific-python/cookie/issues/201 --- pyproject.toml | 2 -- 1 file changed, 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 8385fc49c5..81ae1feb74 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -153,7 +153,6 @@ packages = ["src/pyhf"] [tool.black] line-length = 88 -target-version = ['py38', 'py39', 'py310', 'py311'] skip-string-normalization = true include = '\.pyi?$' exclude = ''' @@ -295,7 +294,6 @@ ignore = [ "RUF001", # String contains ambiguous unicode character "RUF005", # unpack-instead-of-concatenating-to-collection-literal ] -target-version = "py38" src = ["src"] typing-modules = ["pyhf.typing"] unfixable = [ From 951912f12a9624b5bfb7a3dd46c68fd78ef9b61b Mon Sep 17 00:00:00 2001 From: Henry Schreiner Date: Mon, 3 Jul 2023 16:35:45 -0400 Subject: [PATCH 30/42] chore: ruff moved to astral-sh (#2238) * Update Ruff pre-commit hook url to https://github.com/astral-sh/ruff-pre-commit. - c.f. https://github.com/scientific-python/cookie/pull/200 --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index db9eae418b..8f263e21ac 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -26,7 +26,7 @@ repos: # exclude generated files exclude: ^validation/|\.dtd$|\.xml$ -- repo: https://github.com/charliermarsh/ruff-pre-commit +- repo: https://github.com/astral-sh/ruff-pre-commit rev: "v0.0.270" hooks: - id: ruff From a25f196fc1684f38048ef27bb920e1610d2508b6 Mon Sep 17 00:00:00 2001 From: Alexander Held <45009355+alexander-held@users.noreply.github.com> Date: Wed, 5 Jul 2023 06:38:19 +0200 Subject: [PATCH 31/42] fix: Replace deprecated np.product by np.prod (#2242) * Use np.prod API over np.product as np.product is deprecated as of NumPy v1.25.0. - c.f. https://numpy.org/devdocs/release/1.25.0-notes.html#deprecations --- docs/examples/notebooks/pytorch_tests_onoff.ipynb | 2 +- src/pyhf/tensor/numpy_backend.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/examples/notebooks/pytorch_tests_onoff.ipynb b/docs/examples/notebooks/pytorch_tests_onoff.ipynb index b4c0780a46..b1830d14ad 100644 --- a/docs/examples/notebooks/pytorch_tests_onoff.ipynb +++ b/docs/examples/notebooks/pytorch_tests_onoff.ipynb @@ -35,7 +35,7 @@ " self.auxdata.append(bkg_over_bsq)\n", "\n", " def alphas(self, pars):\n", - " return np.product([pars, self.bkg_over_db_squared], axis=0)\n", + " return np.prod([pars, self.bkg_over_db_squared], axis=0)\n", "\n", " def logpdf(self, a, alpha):\n", " return _log_poisson_impl(a, alpha)\n", diff --git a/src/pyhf/tensor/numpy_backend.py b/src/pyhf/tensor/numpy_backend.py index b1bdcf7880..bb96393937 100644 --- a/src/pyhf/tensor/numpy_backend.py +++ b/src/pyhf/tensor/numpy_backend.py @@ -254,7 +254,7 @@ def sum(self, tensor_in: Tensor[T], axis: int | None = None) -> ArrayLike: return np.sum(tensor_in, axis=axis) def product(self, tensor_in: Tensor[T], axis: Shape | None = None) -> ArrayLike: - return np.product(tensor_in, axis=axis) # type: ignore[arg-type] + return np.prod(tensor_in, axis=axis) # type: ignore[arg-type] def abs(self, tensor: Tensor[T]) -> ArrayLike: return np.abs(tensor) From b654be9d68935442e5b4e3b1acacaa49439d7d68 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 5 Jul 2023 10:24:20 -0500 Subject: [PATCH 32/42] chore: [pre-commit.ci] pre-commit autoupdate (#2240) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Update pre-commit hooks: - github.com/astral-sh/ruff-pre-commit: v0.0.270 → v0.0.276 - github.com/asottile/blacken-docs: 1.13.0 → 1.14.0 - github.com/pre-commit/mirrors-mypy: v1.3.0 → v1.4.1 - github.com/codespell-project/codespell: v2.2.4 → v2.2.5 * Add ClassVar type hint and apply isort. - Avoid RUF012 Mutable class attributes should be annotated with `typing.ClassVar`. --- .pre-commit-config.yaml | 10 +++++----- src/pyhf/workspace.py | 16 ++++++++++------ 2 files changed, 15 insertions(+), 11 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 8f263e21ac..a207343b89 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -27,7 +27,7 @@ repos: exclude: ^validation/|\.dtd$|\.xml$ - repo: https://github.com/astral-sh/ruff-pre-commit - rev: "v0.0.270" + rev: "v0.0.276" hooks: - id: ruff args: ["--fix", "--show-fixes"] @@ -38,13 +38,13 @@ repos: - id: black-jupyter - repo: https://github.com/asottile/blacken-docs - rev: 1.13.0 + rev: 1.14.0 hooks: - id: blacken-docs additional_dependencies: [black==23.3.0] - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.3.0 + rev: v1.4.1 # check the oldest and newest supported Pythons hooks: - &mypy @@ -62,11 +62,11 @@ repos: rev: 1.7.0 hooks: - id: nbqa-ruff - additional_dependencies: [ruff==0.0.270] + additional_dependencies: [ruff==0.0.276] args: ["--extend-ignore=F821,F401,F841,F811"] - repo: https://github.com/codespell-project/codespell - rev: v2.2.4 + rev: v2.2.5 hooks: - id: codespell files: ^.*\.(py|md|rst)$ diff --git a/src/pyhf/workspace.py b/src/pyhf/workspace.py index 5d1bfcc169..00abcf77f4 100644 --- a/src/pyhf/workspace.py +++ b/src/pyhf/workspace.py @@ -5,14 +5,18 @@ * the observed data (optional) * fit configurations ("measurements") """ +from __future__ import annotations + +import collections +import copy import logging +from typing import ClassVar + import jsonpatch -import copy -import collections -from pyhf import exceptions -from pyhf import schema -from pyhf.pdf import Model + +from pyhf import exceptions, schema from pyhf.mixins import _ChannelSummaryMixin +from pyhf.pdf import Model log = logging.getLogger(__name__) @@ -284,7 +288,7 @@ class Workspace(_ChannelSummaryMixin, dict): A JSON-serializable object that is built from an object that follows the :obj:`workspace.json` `schema `__. """ - valid_joins = ['none', 'outer', 'left outer', 'right outer'] + valid_joins: ClassVar[list[str]] = ['none', 'outer', 'left outer', 'right outer'] def __init__(self, spec, validate: bool = True, **config_kwargs): """ From a28d1a3b293efb3d034b0d55a75b9735d0e5d46f Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Wed, 5 Jul 2023 12:00:12 -0500 Subject: [PATCH 33/42] feat: Use non-root default user for Docker image (#2243) * Add non-root default user 'moby' with uid 1000 that owns the Python virtual environment. - Set default working directory to /home/moby/work/. * Add .dockerignore for local builds. --- .dockerignore | 2 ++ docker/Dockerfile | 35 +++++++++++++++++++++++++++++++++-- 2 files changed, 35 insertions(+), 2 deletions(-) create mode 100644 .dockerignore diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000000..37269f7472 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,2 @@ +.nox +.*cache diff --git a/docker/Dockerfile b/docker/Dockerfile index 93b4751711..50c2f31e95 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -16,16 +16,47 @@ RUN apt-get -qq -y update && \ python -m venv /usr/local/venv && \ cd /code && \ python -m pip --no-cache-dir install --upgrade pip setuptools wheel && \ - python -m pip --no-cache-dir install .[xmlio,contrib] && \ + python -m pip --no-cache-dir install '.[xmlio,contrib]' && \ python -m pip list FROM base + +USER root + +SHELL [ "/bin/bash", "-c" ] ENV PATH=/usr/local/venv/bin:"${PATH}" + RUN apt-get -qq -y update && \ apt-get -qq -y install --no-install-recommends \ curl && \ apt-get -y autoclean && \ apt-get -y autoremove && \ rm -rf /var/lib/apt/lists/* -COPY --from=builder /usr/local/venv /usr/local/venv + +# Create non-root user "moby" with uid 1000 +RUN adduser \ + --shell /bin/bash \ + --gecos "default user" \ + --uid 1000 \ + --disabled-password \ + moby && \ + chown -R moby /home/moby && \ + mkdir /work && \ + chown -R moby /work && \ + echo -e "\nexport PATH=/usr/local/venv/bin:${PATH}\n" >> /home/moby/.bashrc + +COPY --from=builder --chown=moby /usr/local/venv /usr/local/venv/ + +USER moby + +ENV USER ${USER} +ENV HOME /home/moby +WORKDIR ${HOME}/work + +# Use C.UTF-8 locale to avoid issues with ASCII encoding +ENV LC_ALL=C.UTF-8 +ENV LANG=C.UTF-8 + +ENV PATH=${HOME}/.local/bin:${PATH} + ENTRYPOINT ["/usr/local/venv/bin/pyhf"] From 576a855a3d60cfaf59ad21b0eb357f4c7396ba9c Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Thu, 6 Jul 2023 17:28:34 -0500 Subject: [PATCH 34/42] fix: Add filterwarnings ignore for jsonschema.RefResolver DeprecationWarning (#2246) * Add an ignore to filterwarnings to avoid jsonschema.RefResolver DeprecationWarning > DeprecationWarning: jsonschema.RefResolver is deprecated as of v4.18.0, in favor > of the https://github.com/python-jsonschema/referencing library, which provides > more compliant referencing behavior as well as more flexible APIs for customization. > A future release will remove RefResolver. Please file a feature request > (on referencing) if you are missing an API for the kind of customization you need. This filterwarning should be removed once pyhf updates to referencing. - c.f. https://github.com/scikit-hep/pyhf/issues/2139 --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index 81ae1feb74..e857c501e2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -214,6 +214,7 @@ filterwarnings = [ 'ignore:`np.bool8` is a deprecated alias for `np.bool_`:DeprecationWarning', # numpy via tensorflow "ignore:module 'sre_constants' is deprecated:DeprecationWarning", # tensorflow v2.12.0+ for Python 3.11+ "ignore:ml_dtypes.float8_e4m3b11 is deprecated.", #FIXME: Can remove when jaxlib>=0.4.12 + "ignore:jsonschema.RefResolver is deprecated as of v4.18.0, in favor of the:DeprecationWarning", # Issue #2139 ] [tool.coverage.run] From 64346a89de5be032dc2a0feb909b230bc3b259c8 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Thu, 6 Jul 2023 17:46:41 -0500 Subject: [PATCH 35/42] docs: Add use citation from SModelS v2.3 paper (#2245) * Add use citation from 'SModelS v2.3: enabling global likelihood analyses'. - c.f. https://inspirehep.net/literature/2673443 --- docs/bib/use_citations.bib | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/docs/bib/use_citations.bib b/docs/bib/use_citations.bib index 1545ce85ce..1b947d37bf 100644 --- a/docs/bib/use_citations.bib +++ b/docs/bib/use_citations.bib @@ -1,3 +1,15 @@ +% 2023-06-30 +@article{MahdiAltakach:2023bdn, + author = "Mahdi Altakach, Mohammad and Kraml, Sabine and Lessa, Andre and Narasimha, Sahana and Pascal, Timoth\'ee and Waltenberger, Wolfgang", + title = "{SModelS v2.3: enabling global likelihood analyses}", + eprint = "2306.17676", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + month = "6", + year = "2023", + journal = "" +} + % 2023-06-19 @article{Stark:2023ont, author = "Stark, Giordon and Ots, Camila Aristimuno and Hance, Mike", From 4845a154c18edfd5e2b29ef94167b2818fa6e674 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 10 Jul 2023 21:56:00 -0500 Subject: [PATCH 36/42] build(deps): bump actions/upload-pages-artifact from 1 to 2 (#2248) * Bumps [actions/upload-pages-artifact](https://github.com/actions/upload-pages-artifact) from 1 to 2. - [Release notes](https://github.com/actions/upload-pages-artifact/releases) - [Commits](actions/upload-pages-artifact@v1...v2) --- updated-dependencies: - dependency-name: actions/upload-pages-artifact dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/docs.yml | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 4ca72e4ff4..73ff09d5c1 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -81,8 +81,14 @@ jobs: # is not empty [ "$(ls -A docs/_build/html/lite)" ] + - name: Fix permissions if needed + run: | + chmod -c -R +rX "docs/_build/html/" | while read line; do + echo "::warning title=Invalid file permissions automatically fixed::$line" + done + - name: Upload artifact - uses: actions/upload-pages-artifact@v1 + uses: actions/upload-pages-artifact@v2 with: path: 'docs/_build/html' From 9797a210e22d2a7b173aef9d438f476f95726ab2 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Sat, 15 Jul 2023 14:59:07 -0500 Subject: [PATCH 37/42] feat: Add devcontainer config (#2250) * Add devcontainer config using the pyhf Dockerfile. - c.f. https://containers.dev/ - Motivated by Sarah Kaiser's SciPy 2023 lightning talk. --- .devcontainer/devcontainer.json | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) create mode 100644 .devcontainer/devcontainer.json diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json new file mode 100644 index 0000000000..1640f58a37 --- /dev/null +++ b/.devcontainer/devcontainer.json @@ -0,0 +1,22 @@ +{ + "name": "pyhf-devcontainer", + "build": { + "context": "..", + "dockerfile": "../docker/Dockerfile" + }, + "features": { + "ghcr.io/devcontainers/features/git:1": {} + }, + + "customizations": { + "vscode": { + "settings": { + "python.defaultInterpreterPath": "/opt/venv/bin/python" + }, + "extensions": [ + "ms-python.python", + "ms-toolsai.jupyter" + ] + } + } +} From ff9cb94025e5485b23ea81a06ce8916055297c7f Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Sat, 15 Jul 2023 17:01:18 -0500 Subject: [PATCH 38/42] docs: Update Binder to launch into JupyterLab environment (#2252) * Install ipywidgets and ipympl to have interactive IPython widgets in modern JupyterLab. * Use from IPython.display import display to avoid using deprecated APIs. * Use %matplotlib widget to get interactive matplotlib in JupyterLab. * Use 'labpath' to open up file paths in JupyterLab. --- README.rst | 2 +- binder/postBuild | 5 ++++- docs/examples.rst | 2 +- .../notebooks/binderexample/StatisticalAnalysis.ipynb | 4 ++-- 4 files changed, 8 insertions(+), 5 deletions(-) diff --git a/README.rst b/README.rst index e17abf6b0d..b152c5e90e 100644 --- a/README.rst +++ b/README.rst @@ -372,7 +372,7 @@ and grant `OAC-1450377 Date: Tue, 18 Jul 2023 00:14:47 -0500 Subject: [PATCH 39/42] build(deps): bump pypa/gh-action-pypi-publish from 1.8.7 to 1.8.8 (#2254) Bumps [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish) from 1.8.7 to 1.8.8. - [Release notes](https://github.com/pypa/gh-action-pypi-publish/releases) - [Commits](https://github.com/pypa/gh-action-pypi-publish/compare/v1.8.7...v1.8.8) --- updated-dependencies: - dependency-name: pypa/gh-action-pypi-publish dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- .github/workflows/publish-package.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/publish-package.yml b/.github/workflows/publish-package.yml index 98f0798386..91e8b73ad1 100644 --- a/.github/workflows/publish-package.yml +++ b/.github/workflows/publish-package.yml @@ -123,13 +123,13 @@ jobs: if: >- (github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v') && github.repository == 'scikit-hep/pyhf') || (github.event_name == 'workflow_dispatch' && github.event.inputs.publish == 'true' && github.repository == 'scikit-hep/pyhf') - uses: pypa/gh-action-pypi-publish@v1.8.7 + uses: pypa/gh-action-pypi-publish@v1.8.8 with: repository-url: https://test.pypi.org/legacy/ print-hash: true - name: Publish distribution 📦 to PyPI if: github.event_name == 'release' && github.event.action == 'published' && github.repository == 'scikit-hep/pyhf' - uses: pypa/gh-action-pypi-publish@v1.8.7 + uses: pypa/gh-action-pypi-publish@v1.8.8 with: print-hash: true From fb604e89c274aabfc3435e2e0efd504b549b51f3 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Tue, 18 Jul 2023 01:14:24 -0500 Subject: [PATCH 40/42] fix: Add ipympl to 'test' extra (#2255) * Add ipympl>=0.3.0 to the 'test' extra environment as it is used in the notebooks as of PR #2252. The lower bound of v0.3.0 is chosen as this corresponds in time with the 'contrib' lower bound on matplotlib of matplotlib>=3.0.0. * Amends PR #2252. --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index e857c501e2..fa17cf50cf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -96,6 +96,7 @@ test = [ "pytest-benchmark[histogram]", "pytest-console-scripts>=1.4.0", "pytest-mpl", + "ipympl>=0.3.0", "pydocstyle", "papermill~=2.3.4", "scrapbook~=0.5.0", From a021fcd17feb660576525cda767c768b1fdceaa3 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Thu, 20 Jul 2023 16:39:31 -0500 Subject: [PATCH 41/42] docs: Add use citation from Spey paper (#2258) * Add use citation from 'Spey: smooth inference for reinterpretation studies'. - c.f. https://inspirehep.net/literature/2677291 - Use comes from spey-pyhf optional backend https://github.com/SpeysideHEP/spey-pyhf --- docs/bib/use_citations.bib | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/docs/bib/use_citations.bib b/docs/bib/use_citations.bib index 1b947d37bf..e7f0c9e921 100644 --- a/docs/bib/use_citations.bib +++ b/docs/bib/use_citations.bib @@ -1,3 +1,16 @@ +% 2023-07-13 +@article{Araz:2023bwx, + author = "Araz, Jack Y.", + title = "{Spey: smooth inference for reinterpretation studies}", + eprint = "2307.06996", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + reportNumber = "IPPP/23/34", + month = "7", + year = "2023", + journal = "" +} + % 2023-06-30 @article{MahdiAltakach:2023bdn, author = "Mahdi Altakach, Mohammad and Kraml, Sabine and Lessa, Andre and Narasimha, Sahana and Pascal, Timoth\'ee and Waltenberger, Wolfgang", From bd8c21a33b2f3ce8b41ea78cb351eef91c21b92b Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Thu, 20 Jul 2023 17:29:41 -0500 Subject: [PATCH 42/42] docs: Add use citation from Gauge SU(2)f flavour paper (#2259) * Add use citation from 'Gauge SU(2)f flavour transfers'. - c.f. https://inspirehep.net/literature/2678488 --- docs/bib/use_citations.bib | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/docs/bib/use_citations.bib b/docs/bib/use_citations.bib index e7f0c9e921..994df7ba1e 100644 --- a/docs/bib/use_citations.bib +++ b/docs/bib/use_citations.bib @@ -1,3 +1,16 @@ +% 2023-07-18 +@article{Darme:2023nsy, + author = "Darm\'e, Luc and Deandrea, Aldo and Mahmoudi, Farvah", + title = "{Gauge $SU(2)_f$ flavour transfers}", + eprint = "2307.09595", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + reportNumber = "CERN-TH-2023-139", + month = "7", + year = "2023", + journal = "" +} + % 2023-07-13 @article{Araz:2023bwx, author = "Araz, Jack Y.",