diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json new file mode 100644 index 0000000000..1640f58a37 --- /dev/null +++ b/.devcontainer/devcontainer.json @@ -0,0 +1,22 @@ +{ + "name": "pyhf-devcontainer", + "build": { + "context": "..", + "dockerfile": "../docker/Dockerfile" + }, + "features": { + "ghcr.io/devcontainers/features/git:1": {} + }, + + "customizations": { + "vscode": { + "settings": { + "python.defaultInterpreterPath": "/opt/venv/bin/python" + }, + "extensions": [ + "ms-python.python", + "ms-toolsai.jupyter" + ] + } + } +} diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000000..37269f7472 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,2 @@ +.nox +.*cache diff --git a/.github/ISSUE_TEMPLATE/~release-checklist.md b/.github/ISSUE_TEMPLATE/~release-checklist.md index 12de456ab8..36a7426c85 100644 --- a/.github/ISSUE_TEMPLATE/~release-checklist.md +++ b/.github/ISSUE_TEMPLATE/~release-checklist.md @@ -10,23 +10,40 @@ about: Checklist for core developers to complete as part of making a release * [ ] Migrate any unresolved Issues or PRs from the [release GitHub project board](https://github.com/scikit-hep/pyhf/projects/) to a new project board. * [ ] Verify that there is a release notes file for the release under [``docs/release-notes``](https://github.com/scikit-hep/pyhf/tree/main/docs/release-notes). * [ ] Verify that the release notes files correctly summarize all development changes since the last release. -* [ ] Draft email to [``pyhf-announcements`` mailing list](https://groups.google.com/group/pyhf-announcements/subscribe) that summarizes the main points of the release notes and circulate it for development team approval. * [ ] Update the checklist Issue template in the [``.github/ISSUE_TEMPLATE``](https://github.com/scikit-hep/pyhf/tree/main/.github/ISSUE_TEMPLATE) directory if there are revisions. -* [ ] Make a release to [TestPyPI][TestPyPI_pyhf] using the [workflow dispatch event trigger](https://github.com/scikit-hep/pyhf/actions/workflows/publish-package.yml). -* [ ] Verify that the project README is displaying correctly on [TestPyPI][TestPyPI_pyhf]. * [ ] Add any new use citations or published statistical models to the [Use and Citations page][citations_page]. -* [ ] Verify that the citations on the [Use and Citations page][citations_page] are up to date with their current [INSPIRE](https://inspirehep.net/) record. -* [ ] Update the [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish) GitHub Action used for deployment to TestPyPI and PyPI to the latest stable release. +* [ ] Verify that the citations on the [Use and Citations page][citations_page] are up to date with their current [INSPIRE](https://inspirehep.net/) record. Checking the [Dimensions listing of publication citations](https://app.dimensions.ai/discover/publication?or_subset_publication_citations=pub.1135154020) can be helpful to catch citations that are now journal publications. * [ ] Update the ``codemeta.json`` file in the release PR if its requirements have updated. +* [ ] Update the [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish) GitHub Action used for deployment to TestPyPI and PyPI to the latest stable release. +* [ ] Make a release to [TestPyPI][TestPyPI_pyhf] using the [workflow dispatch event trigger](https://github.com/scikit-hep/pyhf/actions/workflows/publish-package.yml). +* [ ] Verify that the project README is displaying correctly on [TestPyPI][TestPyPI_pyhf]. +* [ ] Draft email to [``pyhf-announcements`` mailing list](https://groups.google.com/group/pyhf-announcements/subscribe) that summarizes the main points of the release notes and circulate it for development team approval. [TestPyPI_pyhf]: https://test.pypi.org/project/pyhf/ [citations_page]: https://scikit-hep.org/pyhf/citations.html -## Once Release PR is Merged +## Create Release Tag -* [ ] Watch the CI to ensure that the deployment to [PyPI](https://pypi.org/project/pyhf/) is successful. -* [ ] Create a [GitHub release](https://github.com/scikit-hep/pyhf/releases) from the generated PR tag and copy the release notes published to the GitHub release page. The creation of the GitHub release triggers all other release related activities. +For a video walkthrough consult the [``pyhf`` ``v0.7.1`` release recording](https://youtu.be/ZV20tr3EpTw) on YouTube. + +* [ ] Use the [bump version](https://github.com/scikit-hep/pyhf/actions/workflows/bump-version.yml) GitHub Actions workflow perform a [dry run](https://scikit-hep.org/pyhf/development.html#release-tags) of the bump version to the new release tag. +* [ ] Check the annotated tag in the dry run workflow logs to make sure it looks correct. +* [ ] If the dry run passes as expected, run the same workflow with the dry run option set to ``false`` to bump the release tag version and push the new tag back to GitHub. +* [ ] Verify the release tag was pushed to the correct branch. +* [ ] Verify the release tag commit has bumped the correct versions. +* [ ] Watch the CI to verify all tag based jobs finish correctly. +* [ ] Verify the release for the tag on [TestPyPI][TestPyPI_pyhf] looks correct. + +## After Release Tag Pushed To GitHub + +* [ ] Create a [GitHub release](https://github.com/scikit-hep/pyhf/releases) from the new release tag and copy the release notes published to the GitHub release page. The creation of the GitHub release triggers all other release related activities. - [ ] Before pasting in the release notes copy the changes that the GitHub bot has already queued up and pasted into the tag and place them in the "Changes" section of the release notes. If the release notes are published before these are copied then they will be overwritten and you'll have to add them back in by hand. + - [ ] Create a corresponding [announcement GitHub Discussion](https://github.com/scikit-hep/pyhf/discussions/categories/announcements) for the release. +* [ ] Watch the CI to ensure that the deployment to [PyPI](https://pypi.org/project/pyhf/) is successful. +* [ ] Verify Docker images with the correct tags have been deployed to all container image registries. + - [ ] [Docker Hub](https://hub.docker.com/r/pyhf/pyhf/tags) + - [ ] [OSG Harbor](https://hub.opensciencegrid.org/harbor/projects/866/repositories/pyhf/) + - [ ] [CERN Harbor](https://registry.cern.ch/harbor/projects/3550/repositories/pyhf/artifacts-tab) * [ ] Verify there is a new [Zenodo DOI](https://doi.org/10.5281/zenodo.1169739) minted for the release. - [ ] Verify that the new release archive metadata on Zenodo matches is being picked up as expected from [`CITATION.cff`](https://github.com/scikit-hep/pyhf/blob/main/CITATION.cff). * [ ] Verify that a Binder has properly built for the new release. @@ -44,6 +61,5 @@ about: Checklist for core developers to complete as part of making a release * [ ] Update the [tutorial](https://github.com/pyhf/pyhf-tutorial) to use the new release number and API. * [ ] Make a PR to use the new release in the [CUDA enabled Docker images](https://github.com/pyhf/cuda-images). * [ ] Open a ticket on the CERN [Software Process and Infrastructure JIRA](https://sft.its.cern.ch/jira/browse/SPI) to update the version of `pyhf` available in the next LCG release. - - c.f. the [`v0.6.3` request ticket](https://sft.its.cern.ch/jira/browse/SPI-2086) as an example. -* [ ] If the release is a **major** or **minor** release, open a [GitHub Release Radar](https://github.com/github/release-radar) Issue for the release to potentially get featured on GitHub's [Release Radar blog](https://github.blog/?s=release+radar). + - c.f. the [`v0.6.3` request ticket](https://sft.its.cern.ch/jira/browse/SPI-2086) and the [`v0.7.1` request ticket](https://sft.its.cern.ch/jira/browse/SPI-2333) as examples. * [ ] Close the [release GitHub Project board](https://github.com/scikit-hep/pyhf/projects/). diff --git a/.github/workflows/dependencies-head.yml b/.github/workflows/dependencies-head.yml index 1b69121271..5fe1e19eba 100644 --- a/.github/workflows/dependencies-head.yml +++ b/.github/workflows/dependencies-head.yml @@ -64,7 +64,7 @@ jobs: python -m pip install --upgrade pip setuptools wheel python -m pip --no-cache-dir --quiet install --upgrade .[test] python -m pip uninstall --yes scipy - python -m pip install --upgrade --index-url https://pypi.anaconda.org/scipy-wheels-nightly/simple scipy + python -m pip install --upgrade --index-url https://pypi.anaconda.org/scientific-python-nightly-wheels/simple scipy python -m pip list - name: Test with pytest @@ -143,12 +143,13 @@ jobs: python -m pip install --upgrade pip setuptools wheel python -m pip --no-cache-dir --quiet install --upgrade .[test] python -m pip uninstall --yes matplotlib - # Need to use --extra-index-url as dependencies aren't on scipy-wheels-nightly package index. + # Need to use --extra-index-url as dependencies aren't on scientific-python-nightly-wheels package index. # Need to use --pre as dev releases will need priority over stable releases. python -m pip install \ --upgrade \ --pre \ - --extra-index-url https://pypi.anaconda.org/scipy-wheels-nightly/simple \ + --index-url https://pypi.anaconda.org/scientific-python-nightly-wheels/simple \ + --extra-index-url https://pypi.org/simple/ \ matplotlib - name: List installed Python packages diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 65a49e5039..73ff09d5c1 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -78,9 +78,17 @@ jobs: [ ! -L "docs/_build/html/schemas" ] # is not empty [ "$(ls -A docs/_build/html/schemas)" ] + # is not empty + [ "$(ls -A docs/_build/html/lite)" ] + + - name: Fix permissions if needed + run: | + chmod -c -R +rX "docs/_build/html/" | while read line; do + echo "::warning title=Invalid file permissions automatically fixed::$line" + done - name: Upload artifact - uses: actions/upload-pages-artifact@v1 + uses: actions/upload-pages-artifact@v2 with: path: 'docs/_build/html' diff --git a/.github/workflows/merged.yml b/.github/workflows/merged.yml index 2f6342cf1e..3ea8a5cc4d 100644 --- a/.github/workflows/merged.yml +++ b/.github/workflows/merged.yml @@ -15,5 +15,4 @@ jobs: - name: Trigger Binder build run: | # Use Binder build API to trigger repo2docker to build image on Google Cloud and Turing Institute Binder Federation clusters - bash binder/trigger_binder.sh https://gke.mybinder.org/build/gh/scikit-hep/pyhf/main bash binder/trigger_binder.sh https://turing.mybinder.org/build/gh/scikit-hep/pyhf/main diff --git a/.github/workflows/publish-package.yml b/.github/workflows/publish-package.yml index 2b5aa67ffa..91e8b73ad1 100644 --- a/.github/workflows/publish-package.yml +++ b/.github/workflows/publish-package.yml @@ -25,8 +25,8 @@ concurrency: cancel-in-progress: true jobs: - build-and-publish: - name: Build and publish Python distro to (Test)PyPI + build: + name: Build Python distribution runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 @@ -88,21 +88,48 @@ jobs: - name: List contents of wheel run: python -m zipfile --list dist/pyhf-*.whl + - name: Upload distribution artifact + uses: actions/upload-artifact@v3 + with: + name: dist-artifact + path: dist + + publish: + name: Publish Python distribution to (Test)PyPI + if: github.event_name != 'pull_request' + needs: build + runs-on: ubuntu-latest + # Mandatory for publishing with a trusted publisher + # c.f. https://docs.pypi.org/trusted-publishers/using-a-publisher/ + permissions: + id-token: write + # Restrict to the environment set for the trusted publisher + environment: + name: publish-package + + steps: + - name: Download distribution artifact + uses: actions/download-artifact@v3 + with: + name: dist-artifact + path: dist + + - name: List all files + run: ls -lh dist + - name: Publish distribution 📦 to Test PyPI # Publish to TestPyPI on tag events of if manually triggered # Compare to 'true' string as booleans get turned into strings in the console if: >- (github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v') && github.repository == 'scikit-hep/pyhf') || (github.event_name == 'workflow_dispatch' && github.event.inputs.publish == 'true' && github.repository == 'scikit-hep/pyhf') - uses: pypa/gh-action-pypi-publish@v1.8.5 + uses: pypa/gh-action-pypi-publish@v1.8.8 with: - password: ${{ secrets.test_pypi_password }} repository-url: https://test.pypi.org/legacy/ print-hash: true - name: Publish distribution 📦 to PyPI if: github.event_name == 'release' && github.event.action == 'published' && github.repository == 'scikit-hep/pyhf' - uses: pypa/gh-action-pypi-publish@v1.8.5 + uses: pypa/gh-action-pypi-publish@v1.8.8 with: - password: ${{ secrets.pypi_password }} print-hash: true diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b5f99da3cf..a207343b89 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -26,8 +26,8 @@ repos: # exclude generated files exclude: ^validation/|\.dtd$|\.xml$ -- repo: https://github.com/charliermarsh/ruff-pre-commit - rev: "v0.0.260" +- repo: https://github.com/astral-sh/ruff-pre-commit + rev: "v0.0.276" hooks: - id: ruff args: ["--fix", "--show-fixes"] @@ -38,13 +38,13 @@ repos: - id: black-jupyter - repo: https://github.com/asottile/blacken-docs - rev: 1.13.0 + rev: 1.14.0 hooks: - id: blacken-docs additional_dependencies: [black==23.3.0] - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.1.1 + rev: v1.4.1 # check the oldest and newest supported Pythons hooks: - &mypy @@ -62,11 +62,11 @@ repos: rev: 1.7.0 hooks: - id: nbqa-ruff - additional_dependencies: [ruff==0.0.260] + additional_dependencies: [ruff==0.0.276] args: ["--extend-ignore=F821,F401,F841,F811"] - repo: https://github.com/codespell-project/codespell - rev: v2.2.4 + rev: v2.2.5 hooks: - id: codespell files: ^.*\.(py|md|rst)$ diff --git a/.zenodo.json b/.zenodo.json index 12e93a79a9..642133318a 100644 --- a/.zenodo.json +++ b/.zenodo.json @@ -1,8 +1,8 @@ { "description": "pure-Python HistFactory implementation with tensors and autodiff", "license": "Apache-2.0", - "title": "scikit-hep/pyhf: v0.7.1", - "version": "v0.7.1", + "title": "scikit-hep/pyhf: v0.7.2", + "version": "v0.7.2", "upload_type": "software", "creators": [ { @@ -36,7 +36,7 @@ "related_identifiers": [ { "scheme": "url", - "identifier": "https://github.com/scikit-hep/pyhf/tree/v0.7.1", + "identifier": "https://github.com/scikit-hep/pyhf/tree/v0.7.2", "relation": "isSupplementTo" } ] diff --git a/CITATION.cff b/CITATION.cff index 9f10a88534..19396e412d 100644 --- a/CITATION.cff +++ b/CITATION.cff @@ -14,11 +14,11 @@ authors: given-names: "Giordon" orcid: "https://orcid.org/0000-0001-6616-3433" affiliation: "SCIPP, University of California, Santa Cruz" -title: "pyhf: v0.7.1" -version: 0.7.1 +title: "pyhf: v0.7.2" +version: 0.7.2 doi: 10.5281/zenodo.1169739 -repository-code: "https://github.com/scikit-hep/pyhf/releases/tag/v0.7.1" -url: "https://pyhf.readthedocs.io/en/v0.7.1/" +repository-code: "https://github.com/scikit-hep/pyhf/releases/tag/v0.7.2" +url: "https://pyhf.readthedocs.io/en/v0.7.2/" keywords: - python - physics diff --git a/README.rst b/README.rst index c2dc4cdbc0..b152c5e90e 100644 --- a/README.rst +++ b/README.rst @@ -1,4 +1,4 @@ -.. image:: https://raw.githubusercontent.com/scikit-hep/pyhf/main/docs/_static/img/pyhf-logo-small.png +.. image:: https://raw.githubusercontent.com/scikit-hep/pyhf/main/docs/_static/img/pyhf-logo.svg :alt: pyhf logo :width: 320 :align: center @@ -309,11 +309,11 @@ the preferred BibTeX entry for citation of ``pyhf`` includes both the @software{pyhf, author = {Lukas Heinrich and Matthew Feickert and Giordon Stark}, - title = "{pyhf: v0.7.1}", - version = {0.7.1}, + title = "{pyhf: v0.7.2}", + version = {0.7.2}, doi = {10.5281/zenodo.1169739}, url = {https://doi.org/10.5281/zenodo.1169739}, - note = {https://github.com/scikit-hep/pyhf/releases/tag/v0.7.1} + note = {https://github.com/scikit-hep/pyhf/releases/tag/v0.7.2} } @article{pyhf_joss, @@ -365,14 +365,14 @@ and grant `OAC-1450377 > /home/moby/.bashrc + +COPY --from=builder --chown=moby /usr/local/venv /usr/local/venv/ + +USER moby + +ENV USER ${USER} +ENV HOME /home/moby +WORKDIR ${HOME}/work + +# Use C.UTF-8 locale to avoid issues with ASCII encoding +ENV LC_ALL=C.UTF-8 +ENV LANG=C.UTF-8 + +ENV PATH=${HOME}/.local/bin:${PATH} + ENTRYPOINT ["/usr/local/venv/bin/pyhf"] diff --git a/docs/_static/img/pyhf-logo.svg b/docs/_static/img/pyhf-logo.svg new file mode 100644 index 0000000000..2769a0a809 --- /dev/null +++ b/docs/_static/img/pyhf-logo.svg @@ -0,0 +1,165 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/bib/general_citations.bib b/docs/bib/general_citations.bib index 30b69cf29d..1657266291 100644 --- a/docs/bib/general_citations.bib +++ b/docs/bib/general_citations.bib @@ -1,3 +1,15 @@ +% 2023-06-06 +@article{Eschle:2023ikn, + author = "Eschle, J. and others", + title = "{Potential of the Julia programming language for high energy physics computing}", + eprint = "2306.03675", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + month = "6", + year = "2023", + journal = "" +} + % 2023-02-02 @article{Bockelman:2023gbj, author = "Bockelman, Brian and Elmer, Peter and Watts, Gordon", diff --git a/docs/bib/talks.bib b/docs/bib/talks.bib index b6538e9b8e..efe3363da9 100644 --- a/docs/bib/talks.bib +++ b/docs/bib/talks.bib @@ -1,6 +1,98 @@ % NB: entries with same author-title-year are not picked up: % https://github.com/mcmtroffaes/sphinxcontrib-bibtex/issues/117 +@unpublished{Feickert_20230412, + title = {{pyhf: pure-Python implementation of HistFactory with tensors and automatic differentiation}}, + author = {Matthew Feickert}, + year = {2023}, + month = {Apr}, + day = {12}, + note = {(Internal) CMS Common Analysis Tools General Meeting (April 2023)}, + url = {https://indico.cern.ch/event/1264029/contributions/5308065/}, +} + +@unpublished{Feickert_20230303, + title = {{How to contribute to pyhf development}}, + author = {Matthew Feickert}, + year = {2023}, + month = {Mar}, + day = {3}, + note = {Belle II pyhf workshop 2023}, + url = {https://indico.belle2.org/event/8470/contributions/55871/}, +} + +@unpublished{Feickert_20220906, + title = {{pyhf and analysis optimization with automatic differentiation}}, + author = {Matthew Feickert}, + year = {2022}, + month = {Sep}, + day = {6}, + note = {(Internal) ATLAS HDBS Workshop 2022}, + url = {https://indico.cern.ch/event/1132691/contributions/4994710/}, +} + +@unpublished{Feickert_20220708, + title = {{pyhf: pure-Python statistical fitting library with tensors and automatic differentiation}}, + author = {Matthew Feickert}, + year = {2022}, + month = {Jul}, + day = {8}, + note = {International Conference on High Energy Physics (ICHEP) 2022}, + url = {https://agenda.infn.it/event/28874/contributions/169217/}, +} + +@unpublished{Feickert_20220425, + title = {{Statistical inference: pyhf and cabinetry}}, + author = {Matthew Feickert}, + year = {2022}, + month = {Apr}, + day = {25}, + note = {IRIS-HEP Analysis Grand Challenge Tools 2022 Workshop}, + url = {https://indico.cern.ch/event/1126109/contributions/4780155/}, +} + +@unpublished{Feickert_20211201, + title = {{pyhf: pure-Python implementation of HistFactory with tensors and automatic differentiation}}, + author = {Matthew Feickert}, + year = {2021}, + month = {Dec}, + day = {1}, + note = {CMS Analysis Tools Task Force}, + url = {https://indico.cern.ch/event/1100873/contributions/4631656/}, +} + +@unpublished{Feickert_20210715, + title = {{Distributed statistical inference with pyhf powered by funcX}}, + author = {Matthew Feickert}, + year = {2021}, + month = {Jul}, + day = {15}, + note = {20th Python in Science Conference (SciPy 2021)}, + doi = {10.25080/majora-1b6fd038-023}, + url = {https://conference.scipy.org/proceedings/scipy2021/slides.html}, +} + +@unpublished{Feickert_20210706, + title = {{Distributed statistical inference with pyhf}}, + author = {Matthew Feickert}, + year = {2021}, + month = {Jul}, + day = {6}, + note = {PyHEP 2021 (virtual) Workshop}, + doi = {10.5281/zenodo.5136819}, + url = {https://indico.cern.ch/event/1019958/contributions/4418598/}, +} + +@unpublished{Feickert_20210520, + title = {{Distributed statistical inference with pyhf enabled through funcX}}, + author = {Matthew Feickert}, + year = {2021}, + month = {May}, + day = {20}, + note = {vCHEP 2021 Conference}, + url = {https://indico.cern.ch/event/948465/contributions/4324013/}, +} + @unpublished{Feickert_20201103, title = {{pyhf: pure-Python implementation of HistFactory with tensors and automatic differentiation}}, author = {Matthew Feickert}, @@ -53,7 +145,7 @@ @unpublished{Heinrich20191030 day = {30}, note = {1st Pan-European Advanced School on Statistics in High Energy Physics}, organization = {DESY}, - url = {https://indico.desy.de/indico/event/22731/session/4/contribution/19}, + url = {https://indico.desy.de/event/22731/contributions/47953/}, } @unpublished{Stark20191023, diff --git a/docs/bib/tutorials.bib b/docs/bib/tutorials.bib index 5960a29bb7..94329a2eb0 100644 --- a/docs/bib/tutorials.bib +++ b/docs/bib/tutorials.bib @@ -1,6 +1,17 @@ % NB: entries with same author-title-year are not picked up: % https://github.com/mcmtroffaes/sphinxcontrib-bibtex/issues/117 +@unpublished{Feickert_20210407, + title = {{Tutorial on pyhf}}, + author = {Matthew Feickert}, + year = {2021}, + month = {Apr}, + day = {7}, + note = {PyHEP Python Module of the Month (April 2021)}, + doi = {10.5281/zenodo.4670322}, + url = {https://indico.cern.ch/event/985425/}, +} + @unpublished{GStark20200925, title = {{ATLAS Exotics + SUSY Workshop 2020 pyhf Tutorial}}, author = {Giordon Stark}, diff --git a/docs/bib/use_citations.bib b/docs/bib/use_citations.bib index 2b7276593f..994df7ba1e 100644 --- a/docs/bib/use_citations.bib +++ b/docs/bib/use_citations.bib @@ -1,3 +1,78 @@ +% 2023-07-18 +@article{Darme:2023nsy, + author = "Darm\'e, Luc and Deandrea, Aldo and Mahmoudi, Farvah", + title = "{Gauge $SU(2)_f$ flavour transfers}", + eprint = "2307.09595", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + reportNumber = "CERN-TH-2023-139", + month = "7", + year = "2023", + journal = "" +} + +% 2023-07-13 +@article{Araz:2023bwx, + author = "Araz, Jack Y.", + title = "{Spey: smooth inference for reinterpretation studies}", + eprint = "2307.06996", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + reportNumber = "IPPP/23/34", + month = "7", + year = "2023", + journal = "" +} + +% 2023-06-30 +@article{MahdiAltakach:2023bdn, + author = "Mahdi Altakach, Mohammad and Kraml, Sabine and Lessa, Andre and Narasimha, Sahana and Pascal, Timoth\'ee and Waltenberger, Wolfgang", + title = "{SModelS v2.3: enabling global likelihood analyses}", + eprint = "2306.17676", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + month = "6", + year = "2023", + journal = "" +} + +% 2023-06-19 +@article{Stark:2023ont, + author = "Stark, Giordon and Ots, Camila Aristimuno and Hance, Mike", + title = "{Reduce, Reuse, Reinterpret: an end-to-end pipeline for recycling particle physics results}", + eprint = "2306.11055", + archivePrefix = "arXiv", + primaryClass = "hep-ex", + month = "6", + year = "2023", + journal = "" +} + +% 2023-05-30 +@phdthesis{Chan:2023kah, + author = "Chan, Jay", + title = "{Investigation of Higgs Boson Decaying to Di-muon, Dark Matter Produced in Association with a Higgs Boson Decaying to $b$-quarks and Unbinned Profiled Unfolding}", + eprint = "2305.19436", + archivePrefix = "arXiv", + primaryClass = "hep-ex", + school = "University of Wisconsin-Madison", + month = "5", + year = "2023" +} + +% 2023-05-08 +@article{Belle-II:2023bnh, + author = "Belle II Collaboration", + title = "{Search for lepton-flavor-violating $\tau^- \to \ell^-\phi$ decays in 2019-2021 Belle II data}", + eprint = "2305.04759", + archivePrefix = "arXiv", + primaryClass = "hep-ex", + reportNumber = "BELLE2-CONF-2023-004", + month = "5", + year = "2023", + journal = "" +} + % 2023-04-11 @article{Shadura:2023zks, author = "Shadura, Oksana and Held, Alexander", @@ -66,9 +141,11 @@ @article{Berger:2023bat eprint = "2301.05676", archivePrefix = "arXiv", primaryClass = "hep-ex", - month = "1", - year = "2023", - journal = "" + doi = "10.1007/JHEP04(2023)084", + journal = "JHEP", + volume = "04", + pages = "084", + year = "2023" } % 2022-12-06 @@ -154,14 +231,16 @@ @article{Buanes:2022wgm % 2022-07-21 @article{Allwicher:2022mcg, author = "Allwicher, Lukas and Faroughy, Darius. A. and Jaffredo, Florentin and Sumensari, Olcyr and Wilsch, Felix", - title = "{HighPT: A Tool for high-$p_T$ Drell-Yan Tails Beyond the Standard Model}", + title = "{HighPT: A tool for high-pT Drell-Yan tails beyond the standard model}", eprint = "2207.10756", archivePrefix = "arXiv", primaryClass = "hep-ph", reportNumber = "ZU-TH-29/22", - month = "7", - year = "2022", - journal = "" + doi = "10.1016/j.cpc.2023.108749", + journal = "Comput. Phys. Commun.", + volume = "289", + pages = "108749", + year = "2023" } % 2022-07-15 diff --git a/docs/conf.py b/docs/conf.py index e48765b18b..3d70174c56 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -15,12 +15,21 @@ # add these directories to sys.path here. If the directory is relative to the # documentation root, use Path('../relative_path_to_dir').resolve() to make it absolute, like shown here. -from pathlib import Path import sys +from pathlib import Path + +import jupytext from pkg_resources import get_distribution sys.path.insert(0, str(Path('./exts').resolve())) +# Convert jupyterlite example to ipynb +docs_dir = Path(__file__).resolve().parent +py_percent_as_notebook = jupytext.read(docs_dir / "lite" / "jupyterlite.py") +jupytext.write( + py_percent_as_notebook, docs_dir / "lite" / "jupyterlite.ipynb", fmt="ipynb" +) + def setup(app): app.add_css_file( @@ -46,14 +55,15 @@ def setup(app): 'sphinx.ext.viewcode', 'sphinx.ext.githubpages', 'sphinx.ext.intersphinx', + 'sphinx_rtd_theme', 'sphinxcontrib.bibtex', 'sphinx.ext.napoleon', 'sphinx_click.ext', 'nbsphinx', 'sphinx_issues', 'sphinx_copybutton', - 'sphinx_togglebutton', 'xref', + 'jupyterlite_sphinx', ] bibtex_bibfiles = [ "bib/docs.bib", @@ -177,6 +187,7 @@ def setup(app): exclude_patterns = [ '_build', 'JOSS', + 'lite', '**.ipynb_checkpoints', 'examples/experiments/edwardpyhf.ipynb', 'examples/notebooks/ImpactPlot.ipynb', @@ -274,6 +285,10 @@ def setup(app): html_js_files = [ 'js/custom.js', + ( + 'https://views.scientific-python.org/js/plausible.js', + {"data-domain": "pyhf.readthedocs.io", "defer": "defer"}, + ), ] # Add any extra paths that contain custom files (such as robots.txt or @@ -500,9 +515,14 @@ def setup(app): r'https://doi\.org/10\.31526/.*', # https://doi.org/10.1051/epjconf/x DOI URLs will periodically generate 500 Server Error r'https://doi\.org/10\.1051/epjconf/.*', + # https://indico.desy.de/event/22731/contributions/47953/ is frequently generating 403 Client Error + r'https://indico.desy.de/event/22731/.*', # tags for a release won't exist until it is made, but the release notes # and ReadTheDocs need to reference them r'https://github.com/scikit-hep/pyhf/releases/tag/.*', r'https://pyhf.readthedocs.io/en/.*', ] linkcheck_retries = 50 + +# JupyterLite configuration +jupyterlite_dir = "lite" diff --git a/docs/development.rst b/docs/development.rst index a35b2cba96..324b8110bf 100644 --- a/docs/development.rst +++ b/docs/development.rst @@ -200,6 +200,7 @@ The maintainer needs to: * Select the semantic versioning (SemVer) type (major, minor, patch) of the release tag. * Select if the release tag is a release candidate or not. * Input the SemVer version number of the release tag. +* Select the branch to push the new release tag to. * Select if to override the SemVer compatibility of the previous options (default is to run checks). * Select if a dry run should be performed (default is to do a dry run to avoid accidental diff --git a/docs/examples.rst b/docs/examples.rst index 8a9cfa19e8..255cf18d41 100644 --- a/docs/examples.rst +++ b/docs/examples.rst @@ -4,7 +4,7 @@ Examples Try out in Binder! |Binder| .. |Binder| image:: https://mybinder.org/badge_logo.svg - :target: https://mybinder.org/v2/gh/scikit-hep/pyhf/main?filepath=docs%2Fexamples%2Fnotebooks%2Fbinderexample%2FStatisticalAnalysis.ipynb + :target: https://mybinder.org/v2/gh/scikit-hep/pyhf/main?labpath=docs%2Fexamples%2Fnotebooks%2Fbinderexample%2FStatisticalAnalysis.ipynb Notebooks: diff --git a/docs/examples/notebooks/binderexample/StatisticalAnalysis.ipynb b/docs/examples/notebooks/binderexample/StatisticalAnalysis.ipynb index 42c3b4656d..8385164ba9 100644 --- a/docs/examples/notebooks/binderexample/StatisticalAnalysis.ipynb +++ b/docs/examples/notebooks/binderexample/StatisticalAnalysis.ipynb @@ -16,7 +16,7 @@ "from pyhf.contrib.viz import brazil\n", "\n", "import base64\n", - "from IPython.core.display import display, HTML\n", + "from IPython.display import display, HTML\n", "from ipywidgets import interact, fixed" ] }, @@ -1248,7 +1248,7 @@ } ], "source": [ - "%matplotlib notebook\n", + "%matplotlib widget\n", "fig, ax = plt.subplots(1, 1)\n", "fig.set_size_inches(10, 5)\n", "ax.set_ylim(0, 1.5 * np.max(workspace.data(pdf, include_auxdata=False)))\n", diff --git a/docs/examples/notebooks/pytorch_tests_onoff.ipynb b/docs/examples/notebooks/pytorch_tests_onoff.ipynb index b4c0780a46..b1830d14ad 100644 --- a/docs/examples/notebooks/pytorch_tests_onoff.ipynb +++ b/docs/examples/notebooks/pytorch_tests_onoff.ipynb @@ -35,7 +35,7 @@ " self.auxdata.append(bkg_over_bsq)\n", "\n", " def alphas(self, pars):\n", - " return np.product([pars, self.bkg_over_db_squared], axis=0)\n", + " return np.prod([pars, self.bkg_over_db_squared], axis=0)\n", "\n", " def logpdf(self, a, alpha):\n", " return _log_poisson_impl(a, alpha)\n", diff --git a/docs/generate_jupyterlite_iframe.py b/docs/generate_jupyterlite_iframe.py deleted file mode 100644 index 31a49feac8..0000000000 --- a/docs/generate_jupyterlite_iframe.py +++ /dev/null @@ -1,30 +0,0 @@ -import urllib.parse - - -def main(): - code = """\ -import piplite -await piplite.install(["pyhf==0.7.1", "matplotlib>=3.0.0"]) -%matplotlib inline -import pyhf\ -""" - - parsed_url = urllib.parse.quote(code) - url_base = "https://jupyterlite.github.io/demo/repl/index.html" - jupyterlite_options = "?kernel=python&toolbar=1&code=" - jupyterlite_url = url_base + jupyterlite_options + parsed_url - - print(f"# jupyterlite URL:\n{jupyterlite_url}") - - jupyterlite_iframe_rst = f"""\ - \ -""" - print(f"\n# RST for iframe for jupyterlite.rst:\n{jupyterlite_iframe_rst}") - - -if __name__ == "__main__": - raise SystemExit(main()) diff --git a/docs/jupyterlite.rst b/docs/jupyterlite.rst index 88b942bb58..2fc7e4ba35 100644 --- a/docs/jupyterlite.rst +++ b/docs/jupyterlite.rst @@ -1,30 +1,14 @@ Try out now with JupyterLite_ ----------------------------- -.. admonition:: To try pyhf code in the browser with the Pyolite JupyterLite kernel: - :class: dropdown - - #. Type (or copy and paste) code in the input cell. - #. To execute the code, press ``Shift + Enter`` or click on the (Run) ▶ button in the toolbar. - - To get going try copying and pasting the "Hello World" example below! - - .. admonition:: Kernel not loading? - :class: dropdown - - If the kernel isn't loading and you just see a blank iframe, you will need your - browser to accept cookies from jupyterlite.github.io and then reload the page. - .. - Comment: iframe below generated by docs/generate_jupyterlite_iframe.py - -.. raw:: html + Comment: Use https://github.com/jupyterlite/jupyterlite-sphinx - +.. retrolite:: lite/jupyterlite.ipynb + :width: 100% + :height: 600px + :prompt: Try pyhf! + :prompt_color: #3a77b0 .. Comment: Add an extra blank line as a spacer diff --git a/docs/lite/jupyter-lite.json b/docs/lite/jupyter-lite.json new file mode 100644 index 0000000000..db3ecaa59d --- /dev/null +++ b/docs/lite/jupyter-lite.json @@ -0,0 +1,8 @@ +{ + "jupyter-lite-schema-version": 0, + "jupyter-config-data": { + "enableMemoryStorage": true, + "settingsStorageDrivers": ["memoryStorageDriver"], + "contentsStorageDrivers": ["memoryStorageDriver"] + } +} diff --git a/docs/lite/jupyterlite.py b/docs/lite/jupyterlite.py new file mode 100644 index 0000000000..e285cdd616 --- /dev/null +++ b/docs/lite/jupyterlite.py @@ -0,0 +1,27 @@ +# --- +# jupyter: +# kernelspec: +# display_name: Python (Pyodide) +# language: python +# name: python +# --- + +# %% [markdown] +# # `pyhf` in the browser + +# %% [markdown] +# * To run the code, click on the first cell (gray box) and press Shift+Enter or click on the (Run) ▶ button to run each cell. +# * Alternatively, from the `Run` menu select `Run All Cells`. +# * Feel free to experiment, and if you need to restore the original code reload this browser page. Any changes you make will be lost when you reload. +# +# To get going try copying and pasting the "Hello World" example below! + +# %% +import piplite + +# Install pyhf in the browser +await piplite.install(["pyhf==0.7.2", "matplotlib>=3.0.0"]) +# %matplotlib inline +import pyhf + +# You can now use pyhf! diff --git a/docs/lite/jupytext.toml b/docs/lite/jupytext.toml new file mode 100644 index 0000000000..2e5c093f23 --- /dev/null +++ b/docs/lite/jupytext.toml @@ -0,0 +1,3 @@ +# Always pair ipynb notebooks in the current directory to py:percent files +formats = ["ipynb", "py:percent"] +notebook_metadata_filter = "-all,kernelspec" diff --git a/docs/release-notes.rst b/docs/release-notes.rst index 8698351460..ec60c92997 100644 --- a/docs/release-notes.rst +++ b/docs/release-notes.rst @@ -2,6 +2,7 @@ Release Notes ============= +.. include:: release-notes/v0.7.2.rst .. include:: release-notes/v0.7.1.rst .. include:: release-notes/v0.7.0.rst .. include:: release-notes/v0.6.3.rst diff --git a/docs/release-notes/v0.7.1.rst b/docs/release-notes/v0.7.1.rst index 6682a642e0..bd8680952f 100644 --- a/docs/release-notes/v0.7.1.rst +++ b/docs/release-notes/v0.7.1.rst @@ -9,7 +9,7 @@ Important Notes * All backends are now fully compatible and tested with `Python 3.11 `_. (PR :pr:`2145`) -* The ``tensorflow`` extra (``'pyhf[tensorlfow]'``) now automatically installs +* The ``tensorflow`` extra (``'pyhf[tensorflow]'``) now automatically installs ``tensorflow-macos`` for Apple silicon machines. (PR :pr:`2119`) diff --git a/docs/release-notes/v0.7.2.rst b/docs/release-notes/v0.7.2.rst new file mode 100644 index 0000000000..35cf8e095c --- /dev/null +++ b/docs/release-notes/v0.7.2.rst @@ -0,0 +1,43 @@ +|release v0.7.2|_ +================= + +This is a patch release from ``v0.7.1`` → ``v0.7.2``. + +Important Notes +--------------- + +* ``pyhf`` became a `NumFOCUS Affiliated Project + `__ on + 2022-12-19. |NumFOCUS Affiliated Project| + ``v0.7.1`` is the first release to appear in a NumFOCUS Newsletter and + ``v0.7.2`` is the first release to appear as part of the Affiliated Projects + page. + (PR :pr:`2179`) + +Fixes +----- + +* If a multiple component parameter of interest is used raise + :class:`~pyhf.exceptions.InvalidModel`. + This guards against modifiers like :class:`~pyhf.modifiers.shapefactor`, + :class:`~pyhf.modifiers.shapesys`, and :class:`~pyhf.modifiers.staterror` + from being used as POIs. + (PR :pr:`2197`) +* Use :data:`typing.TYPE_CHECKING` guard to avoid causing a + :class:`ModuleNotFoundError` when the version of NumPy installed is older + than ``v1.21.0``, which is the first NumPy release to include + :mod:`numpy.typing`. + (PR :pr:`2208`) + +Contributors +------------ + +``v0.7.2`` benefited from contributions from: + +* Alexander Held + +.. |release v0.7.2| replace:: ``v0.7.2`` +.. _`release v0.7.2`: https://github.com/scikit-hep/pyhf/releases/tag/v0.7.2 + +.. |NumFOCUS Affiliated Project| image:: https://img.shields.io/badge/NumFOCUS-Affiliated%20Project-orange.svg?style=flat&colorA=E1523D&colorB=007D8A + :target: https://numfocus.org/sponsored-projects/affiliated-projects diff --git a/noxfile.py b/noxfile.py index a01049cbfc..ba75fae440 100644 --- a/noxfile.py +++ b/noxfile.py @@ -130,7 +130,7 @@ def docs(session): Example: - $ nox --session docs -- serve + $ nox --session docs -- serve # Need for local jupyterlite preview $ nox --session docs -- clean """ session.install("--upgrade", "--editable", ".[backends,contrib,docs]") diff --git a/pyproject.toml b/pyproject.toml index 07a651dc0f..32c0949671 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -95,8 +95,9 @@ test = [ "pytest-mock", "requests-mock>=1.9.0", "pytest-benchmark[histogram]", - "pytest-console-scripts", + "pytest-console-scripts>=1.4.0", "pytest-mpl", + "ipympl>=0.3.0", "pydocstyle", "papermill~=2.3.4", "scrapbook~=0.5.0", @@ -113,8 +114,10 @@ docs = [ "nbsphinx!=0.8.8", # c.f. https://github.com/spatialaudio/nbsphinx/issues/620 "ipywidgets", "sphinx-issues", - "sphinx-copybutton>=0.3.2", - "sphinx-togglebutton>=0.3.0", + "sphinx-copybutton>=0.3.2,!=0.5.1", + "jupyterlite-sphinx>=0.8.0", + "jupyterlite-pyodide-kernel>=0.0.7", + "jupytext>=1.14.0", "ipython!=8.7.0", # c.f. https://github.com/scikit-hep/pyhf/pull/2068 ] develop = [ @@ -152,7 +155,6 @@ packages = ["src/pyhf"] [tool.black] line-length = 88 -target-version = ['py38', 'py39', 'py310', 'py311'] skip-string-normalization = true include = '\.pyi?$' exclude = ''' @@ -213,6 +215,8 @@ filterwarnings = [ 'ignore:Call to deprecated create function:DeprecationWarning', # protobuf via tensorflow 'ignore:`np.bool8` is a deprecated alias for `np.bool_`:DeprecationWarning', # numpy via tensorflow "ignore:module 'sre_constants' is deprecated:DeprecationWarning", # tensorflow v2.12.0+ for Python 3.11+ + "ignore:ml_dtypes.float8_e4m3b11 is deprecated.", #FIXME: Can remove when jaxlib>=0.4.12 + "ignore:jsonschema.RefResolver is deprecated as of v4.18.0, in favor of the:DeprecationWarning", # Issue #2139 ] [tool.coverage.run] @@ -224,6 +228,9 @@ omit = ["*/pyhf/typing.py"] precision = 1 sort = "cover" show_missing = true +exclude_also = [ + "if TYPE_CHECKING:" +] [tool.mypy] files = "src" @@ -292,10 +299,12 @@ ignore = [ "RUF001", # String contains ambiguous unicode character "RUF005", # unpack-instead-of-concatenating-to-collection-literal ] -target-version = "py38" src = ["src"] typing-modules = ["pyhf.typing"] unfixable = [ "F841", # Removes unused variables ] flake8-tidy-imports.ban-relative-imports = "all" + +[tool.ruff.per-file-ignores] +"docs/lite/jupyterlite.py" = ["F401", "F704"] diff --git a/src/pyhf/data/citation.bib b/src/pyhf/data/citation.bib index c7f94dc057..5cef8add01 100644 --- a/src/pyhf/data/citation.bib +++ b/src/pyhf/data/citation.bib @@ -1,10 +1,10 @@ @software{pyhf, author = {Lukas Heinrich and Matthew Feickert and Giordon Stark}, - title = "{pyhf: v0.7.1}", - version = {0.7.1}, + title = "{pyhf: v0.7.2}", + version = {0.7.2}, doi = {10.5281/zenodo.1169739}, url = {https://doi.org/10.5281/zenodo.1169739}, - note = {https://github.com/scikit-hep/pyhf/releases/tag/v0.7.1} + note = {https://github.com/scikit-hep/pyhf/releases/tag/v0.7.2} } @article{pyhf_joss, diff --git a/src/pyhf/pdf.py b/src/pyhf/pdf.py index f6cf54b362..3b55f2369a 100644 --- a/src/pyhf/pdf.py +++ b/src/pyhf/pdf.py @@ -464,10 +464,13 @@ def set_poi(self, name): raise exceptions.InvalidModel( f"The parameter of interest '{name:s}' cannot be fit as it is not declared in the model specification." ) - s = self.par_slice(name) - assert s.stop - s.start == 1 + if self.param_set(name).n_parameters > 1: + # multi-parameter modifiers are not supported as POIs + raise exceptions.InvalidModel( + f"The parameter '{name:s}' contains multiple components and is not currently supported as parameter of interest." + ) self._poi_name = name - self._poi_index = s.start + self._poi_index = self.par_slice(name).start def _create_and_register_paramsets(self, required_paramsets): next_index = 0 diff --git a/src/pyhf/tensor/numpy_backend.py b/src/pyhf/tensor/numpy_backend.py index fe60abd4ee..bb96393937 100644 --- a/src/pyhf/tensor/numpy_backend.py +++ b/src/pyhf/tensor/numpy_backend.py @@ -2,10 +2,16 @@ from __future__ import annotations import logging -from typing import Callable, Generic, Mapping, Sequence, TypeVar, Union +from typing import TYPE_CHECKING, Callable, Generic, Mapping, Sequence, TypeVar, Union import numpy as np -from numpy.typing import ArrayLike, DTypeLike, NBitBase, NDArray + +# Needed while numpy lower bound is older than v1.21.0 +if TYPE_CHECKING: + from numpy.typing import ArrayLike, DTypeLike, NBitBase, NDArray +else: + NBitBase = "NBitBase" + from scipy import special from scipy.special import gammaln, xlogy from scipy.stats import norm, poisson @@ -248,7 +254,7 @@ def sum(self, tensor_in: Tensor[T], axis: int | None = None) -> ArrayLike: return np.sum(tensor_in, axis=axis) def product(self, tensor_in: Tensor[T], axis: Shape | None = None) -> ArrayLike: - return np.product(tensor_in, axis=axis) # type: ignore[arg-type] + return np.prod(tensor_in, axis=axis) # type: ignore[arg-type] def abs(self, tensor: Tensor[T]) -> ArrayLike: return np.abs(tensor) diff --git a/src/pyhf/utils.py b/src/pyhf/utils.py index 8e24ac191e..e6da9f3de2 100644 --- a/src/pyhf/utils.py +++ b/src/pyhf/utils.py @@ -111,7 +111,7 @@ def citation(oneline=False): >>> import pyhf >>> pyhf.utils.citation(oneline=True) - '@software{pyhf, author = {Lukas Heinrich and Matthew Feickert and Giordon Stark}, title = "{pyhf: v0.7.1}", version = {0.7.1}, doi = {10.5281/zenodo.1169739}, url = {https://doi.org/10.5281/zenodo.1169739}, note = {https://github.com/scikit-hep/pyhf/releases/tag/v0.7.1}}@article{pyhf_joss, doi = {10.21105/joss.02823}, url = {https://doi.org/10.21105/joss.02823}, year = {2021}, publisher = {The Open Journal}, volume = {6}, number = {58}, pages = {2823}, author = {Lukas Heinrich and Matthew Feickert and Giordon Stark and Kyle Cranmer}, title = {pyhf: pure-Python implementation of HistFactory statistical models}, journal = {Journal of Open Source Software}}' + '@software{pyhf, author = {Lukas Heinrich and Matthew Feickert and Giordon Stark}, title = "{pyhf: v0.7.2}", version = {0.7.2}, doi = {10.5281/zenodo.1169739}, url = {https://doi.org/10.5281/zenodo.1169739}, note = {https://github.com/scikit-hep/pyhf/releases/tag/v0.7.2}}@article{pyhf_joss, doi = {10.21105/joss.02823}, url = {https://doi.org/10.21105/joss.02823}, year = {2021}, publisher = {The Open Journal}, volume = {6}, number = {58}, pages = {2823}, author = {Lukas Heinrich and Matthew Feickert and Giordon Stark and Kyle Cranmer}, title = {pyhf: pure-Python implementation of HistFactory statistical models}, journal = {Journal of Open Source Software}}' Keyword Args: oneline (:obj:`bool`): Whether to provide citation with new lines (default) or as a one-liner. diff --git a/src/pyhf/workspace.py b/src/pyhf/workspace.py index 5d1bfcc169..00abcf77f4 100644 --- a/src/pyhf/workspace.py +++ b/src/pyhf/workspace.py @@ -5,14 +5,18 @@ * the observed data (optional) * fit configurations ("measurements") """ +from __future__ import annotations + +import collections +import copy import logging +from typing import ClassVar + import jsonpatch -import copy -import collections -from pyhf import exceptions -from pyhf import schema -from pyhf.pdf import Model + +from pyhf import exceptions, schema from pyhf.mixins import _ChannelSummaryMixin +from pyhf.pdf import Model log = logging.getLogger(__name__) @@ -284,7 +288,7 @@ class Workspace(_ChannelSummaryMixin, dict): A JSON-serializable object that is built from an object that follows the :obj:`workspace.json` `schema `__. """ - valid_joins = ['none', 'outer', 'left outer', 'right outer'] + valid_joins: ClassVar[list[str]] = ['none', 'outer', 'left outer', 'right outer'] def __init__(self, spec, validate: bool = True, **config_kwargs): """ diff --git a/tbump.toml b/tbump.toml index 01560be47c..d3f25bbd58 100644 --- a/tbump.toml +++ b/tbump.toml @@ -1,7 +1,7 @@ github_url = "https://github.com/scikit-hep/pyhf/" [version] -current = "0.7.1" +current = "0.7.2" # Example of a semver regexp. # Make sure this matches current_version before @@ -19,7 +19,7 @@ regex = ''' [git] # The current version will get updated when tbump is run -message_template = "Bump version: 0.7.1 → {new_version}" +message_template = "Bump version: 0.7.2 → {new_version}" tag_template = "v{new_version}" # For each file to patch, add a [[file]] config @@ -55,10 +55,7 @@ src = "codemeta.json" src = "CITATION.cff" [[file]] -src = "docs/generate_jupyterlite_iframe.py" - -[[file]] -src = "docs/jupyterlite.rst" +src = "docs/lite/jupyterlite.py" [[field]] # the name of the field diff --git a/tests/test_examples.py b/tests/test_examples.py index 8891384932..9d4c2a1e1c 100644 --- a/tests/test_examples.py +++ b/tests/test_examples.py @@ -3,5 +3,5 @@ def test_2bin_1channel(tmpdir, script_runner): command = f"pyhf inspect {'docs/examples/json/2-bin_1-channel.json':s}" - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success diff --git a/tests/test_pdf.py b/tests/test_pdf.py index 045575d725..e680dad3f6 100644 --- a/tests/test_pdf.py +++ b/tests/test_pdf.py @@ -1329,3 +1329,33 @@ def test_is_shared_paramset_shapesys_same_sample_same_channel(): with pytest.raises(pyhf.exceptions.InvalidModel): pyhf.Workspace(spec).model() + + +def test_multi_component_poi(): + spec = { + "channels": [ + { + "name": "SR", + "samples": [ + { + "data": [5.0, 10.0], + "modifiers": [ + {"data": None, "name": "mu", "type": "shapefactor"} + ], + "name": "Signal", + } + ], + } + ], + "measurements": [ + {"config": {"parameters": [], "poi": "mu"}, "name": "example"} + ], + "observations": [{"data": [5.0, 10.0], "name": "SR"}], + "version": "1.0.0", + } + + with pytest.raises( + pyhf.exceptions.InvalidModel, + match="The parameter 'mu' contains multiple components and is not currently supported as parameter of interest.", + ): + pyhf.Workspace(spec).model() diff --git a/tests/test_scripts.py b/tests/test_scripts.py index f51dbaee61..0dd88e9b8a 100644 --- a/tests/test_scripts.py +++ b/tests/test_scripts.py @@ -30,7 +30,7 @@ def tarfile_path(tmpdir): def test_version(script_runner): command = 'pyhf --version' start = time.time() - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) end = time.time() elapsed = end - start assert ret.success @@ -44,7 +44,7 @@ def test_version(script_runner): def test_citation(script_runner, flag): command = f'pyhf {flag}' start = time.time() - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) end = time.time() elapsed = end - start assert ret.success @@ -60,7 +60,7 @@ def test_citation(script_runner, flag): def test_import_prepHistFactory(tmpdir, script_runner): temp = tmpdir.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s} --hide-progress' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success assert ret.stdout == '' assert ret.stderr == '' @@ -73,7 +73,7 @@ def test_import_prepHistFactory(tmpdir, script_runner): def test_import_prepHistFactory_withProgress(tmpdir, script_runner): temp = tmpdir.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success assert ret.stdout == '' assert ret.stderr != '' @@ -81,7 +81,7 @@ def test_import_prepHistFactory_withProgress(tmpdir, script_runner): def test_import_prepHistFactory_stdout(tmpdir, script_runner): command = 'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success assert ret.stdout != '' assert ret.stderr != '' @@ -92,10 +92,10 @@ def test_import_prepHistFactory_stdout(tmpdir, script_runner): def test_import_prepHistFactory_and_fit(tmpdir, script_runner): temp = tmpdir.join("parsed_output.json") command = f"pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}" - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) command = f"pyhf fit {temp.strpath:s}" - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success ret_json = json.loads(ret.stdout) @@ -110,7 +110,7 @@ def test_import_prepHistFactory_and_fit(tmpdir, script_runner): "ConstExample", ]: command = f"pyhf fit {temp.strpath:s} --value --measurement {measurement:s}" - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success ret_json = json.loads(ret.stdout) @@ -121,7 +121,7 @@ def test_import_prepHistFactory_and_fit(tmpdir, script_runner): tmp_out = tmpdir.join(f"{measurement:s}_output.json") # make sure output file works too command += f" --output-file {tmp_out.strpath:s}" - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success ret_json = json.load(tmp_out) assert "mle_parameters" in ret_json @@ -131,10 +131,10 @@ def test_import_prepHistFactory_and_fit(tmpdir, script_runner): def test_import_prepHistFactory_and_cls(tmpdir, script_runner): temp = tmpdir.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) command = f'pyhf cls {temp.strpath:s}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success d = json.loads(ret.stdout) @@ -149,7 +149,7 @@ def test_import_prepHistFactory_and_cls(tmpdir, script_runner): 'ConstExample', ]: command = f'pyhf cls {temp.strpath:s} --measurement {measurement:s}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success d = json.loads(ret.stdout) @@ -160,7 +160,7 @@ def test_import_prepHistFactory_and_cls(tmpdir, script_runner): tmp_out = tmpdir.join(f'{measurement:s}_output.json') # make sure output file works too command += f' --output-file {tmp_out.strpath:s}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success d = json.load(tmp_out) assert 'CLs_obs' in d @@ -173,7 +173,7 @@ def test_import_usingMounts(datadir, tmpdir, script_runner): temp = tmpdir.join("parsed_output.json") command = f'pyhf xml2json --hide-progress -v {data}:/absolute/path/to -v {data}:/another/absolute/path/to --output-file {temp.strpath:s} {data.joinpath("config/example.xml")}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success assert ret.stdout == '' assert ret.stderr == '' @@ -189,7 +189,7 @@ def test_import_usingMounts_badDelimitedPaths(datadir, tmpdir, script_runner): temp = tmpdir.join("parsed_output.json") command = f'pyhf xml2json --hide-progress -v {data}::/absolute/path/to -v {data}/another/absolute/path/to --output-file {temp.strpath:s} {data.joinpath("config/example.xml")}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert not ret.success assert ret.stdout == '' assert 'is not a valid colon-separated option' in ret.stderr @@ -199,10 +199,10 @@ def test_import_usingMounts_badDelimitedPaths(datadir, tmpdir, script_runner): def test_fit_backend_option(tmpdir, script_runner, backend): temp = tmpdir.join("parsed_output.json") command = f"pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}" - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) command = f"pyhf fit --backend {backend:s} {temp.strpath:s}" - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success ret_json = json.loads(ret.stdout) @@ -214,10 +214,10 @@ def test_fit_backend_option(tmpdir, script_runner, backend): def test_cls_backend_option(tmpdir, script_runner, backend): temp = tmpdir.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) command = f'pyhf cls --backend {backend:s} {temp.strpath:s}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success d = json.loads(ret.stdout) @@ -229,10 +229,10 @@ def test_cls_backend_option(tmpdir, script_runner, backend): def test_import_and_export(tmpdir, script_runner): temp = tmpdir.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) command = f"pyhf json2xml {temp.strpath:s} --output-dir {tmpdir.mkdir('output').strpath:s}" - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success @@ -247,23 +247,23 @@ def test_patch(tmpdir, script_runner): temp = tmpdir.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) command = f'pyhf cls {temp.strpath:s} --patch {patch.strpath:s}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success command = f"pyhf json2xml {temp.strpath:s} --output-dir {tmpdir.mkdir('output_1').strpath:s} --patch {patch.strpath:s}" - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success command = f'pyhf cls {temp.strpath:s} --patch -' - ret = script_runner.run(*shlex.split(command), stdin=patch) + ret = script_runner.run(shlex.split(command), stdin=patch) assert ret.success command = f"pyhf json2xml {temp.strpath:s} --output-dir {tmpdir.mkdir('output_2').strpath:s} --patch -" - ret = script_runner.run(*shlex.split(command), stdin=patch) + ret = script_runner.run(shlex.split(command), stdin=patch) assert ret.success @@ -274,24 +274,24 @@ def test_patch_fail(tmpdir, script_runner): temp = tmpdir.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) command = f'pyhf cls {temp.strpath:s} --patch {patch.strpath:s}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert not ret.success command = f"pyhf json2xml {temp.strpath:s} --output-dir {tmpdir.mkdir('output').strpath:s} --patch {patch.strpath:s}" - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert not ret.success def test_bad_measurement_name(tmpdir, script_runner): temp = tmpdir.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) command = f'pyhf cls {temp.strpath:s} --measurement "a-fake-measurement-name"' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert not ret.success # assert 'no measurement by name' in ret.stderr # numpy swallows the log.error() here, dunno why @@ -299,14 +299,14 @@ def test_bad_measurement_name(tmpdir, script_runner): def test_testpoi(tmpdir, script_runner): temp = tmpdir.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) pois = [1.0, 0.5, 0.001] results_exp = [] results_obs = [] for test_poi in pois: command = f'pyhf cls {temp.strpath:s} --test-poi {test_poi:f}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success d = json.loads(ret.stdout) @@ -334,11 +334,11 @@ def test_testpoi(tmpdir, script_runner): def test_fit_optimizer(tmpdir, script_runner, optimizer, opts, success): temp = tmpdir.join("parsed_output.json") command = f"pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}" - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) optconf = " ".join(f"--optconf {opt}" for opt in opts) command = f"pyhf fit --optimizer {optimizer} {optconf} {temp.strpath}" - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success == success @@ -350,11 +350,11 @@ def test_fit_optimizer(tmpdir, script_runner, optimizer, opts, success): def test_cls_optimizer(tmpdir, script_runner, optimizer, opts, success): temp = tmpdir.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) optconf = " ".join(f"--optconf {opt}" for opt in opts) command = f'pyhf cls {temp.strpath} --optimizer {optimizer} {optconf}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success == success @@ -362,21 +362,21 @@ def test_cls_optimizer(tmpdir, script_runner, optimizer, opts, success): def test_inspect(tmpdir, script_runner): temp = tmpdir.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s} --hide-progress' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) command = f'pyhf inspect {temp.strpath:s}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success def test_inspect_outfile(tmpdir, script_runner): temp = tmpdir.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s} --hide-progress' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) tempout = tmpdir.join("inspect_output.json") command = f'pyhf inspect {temp.strpath:s} --output-file {tempout.strpath:s}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success summary = json.loads(tempout.read()) @@ -399,23 +399,23 @@ def test_inspect_outfile(tmpdir, script_runner): def test_prune(tmpdir, script_runner): temp = tmpdir.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s} --hide-progress' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) command = ( f"pyhf prune -m staterror_channel1 --measurement GammaExample {temp.strpath:s}" ) - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success def test_prune_outfile(tmpdir, script_runner): temp = tmpdir.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s} --hide-progress' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) tempout = tmpdir.join("prune_output.json") command = f'pyhf prune -m staterror_channel1 --measurement GammaExample {temp.strpath:s} --output-file {tempout.strpath:s}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success spec = json.loads(temp.read()) @@ -431,21 +431,21 @@ def test_prune_outfile(tmpdir, script_runner): def test_rename(tmpdir, script_runner): temp = tmpdir.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s} --hide-progress' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) command = f'pyhf rename -m staterror_channel1 staterror_channelone --measurement GammaExample GamEx {temp.strpath:s}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success def test_rename_outfile(tmpdir, script_runner): temp = tmpdir.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s} --hide-progress' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) tempout = tmpdir.join("rename_output.json") command = f'pyhf rename -m staterror_channel1 staterror_channelone --measurement GammaExample GamEx {temp.strpath:s} --output-file {tempout.strpath:s}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success spec = json.loads(temp.read()) @@ -466,7 +466,7 @@ def test_combine(tmpdir, script_runner): temp_1 = tmpdir.join("parsed_output.json") temp_2 = tmpdir.join("renamed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp_1.strpath:s} --hide-progress' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) rename_channels = {'channel1': 'channel2'} rename_measurements = { @@ -483,10 +483,10 @@ def test_combine(tmpdir, script_runner): ' --measurement ' + ' '.join(item) for item in rename_measurements.items() ) command = f"pyhf rename {temp_1.strpath:s} {_opts_channels:s} {_opts_measurements:s} --output-file {temp_2.strpath:s}" - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) command = f'pyhf combine {temp_1.strpath:s} {temp_2.strpath:s}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success @@ -494,7 +494,7 @@ def test_combine_outfile(tmpdir, script_runner): temp_1 = tmpdir.join("parsed_output.json") temp_2 = tmpdir.join("renamed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp_1.strpath:s} --hide-progress' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) rename_channels = {'channel1': 'channel2'} rename_measurements = { @@ -511,11 +511,11 @@ def test_combine_outfile(tmpdir, script_runner): ' --measurement ' + ' '.join(item) for item in rename_measurements.items() ) command = f"pyhf rename {temp_1.strpath:s} {_opts_channels:s} {_opts_measurements:s} --output-file {temp_2.strpath:s}" - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) tempout = tmpdir.join("combined_output.json") command = f'pyhf combine {temp_1.strpath:s} {temp_2.strpath:s} --output-file {tempout.strpath:s}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success combined_spec = json.loads(tempout.read()) @@ -528,18 +528,18 @@ def test_combine_merge_channels(tmpdir, script_runner): temp_1 = tmpdir.join("parsed_output.json") temp_2 = tmpdir.join("renamed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp_1.strpath} --hide-progress' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success command = ( f'pyhf prune {temp_1.strpath} --sample signal --output-file {temp_2.strpath}' ) - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success command = f'pyhf combine --merge-channels --join "left outer" {temp_1.strpath} {temp_2.strpath}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success @@ -555,10 +555,10 @@ def test_workspace_digest(tmpdir, script_runner, algorithms, do_json): temp = tmpdir.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath} --hide-progress' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) command = f"pyhf digest {temp.strpath} -a {' -a '.join(algorithms)}{' -j' if do_json else ''}" - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success assert all(algorithm in ret.stdout for algorithm in algorithms) if do_json: @@ -591,19 +591,19 @@ def test_workspace_digest(tmpdir, script_runner, algorithms, do_json): def test_patchset_download(tmpdir, script_runner, requests_mock, tarfile_path, archive): requests_mock.get(archive, content=open(tarfile_path, "rb").read()) command = f'pyhf contrib download {archive} {tmpdir.join("likelihoods").strpath}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success # Run with all optional flags command = f'pyhf contrib download --verbose --force {archive} {tmpdir.join("likelihoods").strpath}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success requests_mock.get( "https://www.pyhfthisdoesnotexist.org/record/resource/1234567", status_code=200 ) command = f'pyhf contrib download --verbose https://www.pyhfthisdoesnotexist.org/record/resource/1234567 {tmpdir.join("likelihoods").strpath}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert not ret.success assert ( "pyhf.exceptions.InvalidArchiveHost: www.pyhfthisdoesnotexist.org is not an approved archive host" @@ -615,7 +615,7 @@ def test_patchset_download(tmpdir, script_runner, requests_mock, tarfile_path, a "https://httpstat.us/404/record/resource/1234567", status_code=404 ) command = f'pyhf contrib download --verbose --force https://httpstat.us/404/record/resource/1234567 {tmpdir.join("likelihoods").strpath}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert not ret.success assert "gives a response code of 404" in ret.stderr @@ -680,7 +680,7 @@ def test_missing_contrib_download(caplog): def test_patchset_inspect(datadir, script_runner): command = f'pyhf patchset inspect {datadir.joinpath("example_patchset.json")}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert 'patch_channel1_signal_syst1' in ret.stdout @@ -694,7 +694,7 @@ def test_patchset_extract(datadir, tmpdir, script_runner, output_file, with_meta if with_metadata: command += " --with-metadata" - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success if output_file: @@ -714,7 +714,7 @@ def test_patchset_extract(datadir, tmpdir, script_runner, output_file, with_meta def test_patchset_verify(datadir, script_runner): command = f'pyhf patchset verify {datadir.joinpath("example_bkgonly.json")} {datadir.joinpath("example_patchset.json")}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success assert 'All good' in ret.stdout @@ -727,7 +727,7 @@ def test_patchset_apply(datadir, tmpdir, script_runner, output_file): if output_file: command += f" --output-file {temp.strpath}" - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success if output_file: @@ -743,21 +743,21 @@ def test_patchset_apply(datadir, tmpdir, script_runner, output_file): def test_sort(tmpdir, script_runner): temp = tmpdir.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s} --hide-progress' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) command = f'pyhf sort {temp.strpath}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success def test_sort_outfile(tmpdir, script_runner): temp = tmpdir.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s} --hide-progress' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) tempout = tmpdir.join("sort_output.json") command = f'pyhf sort {temp.strpath} --output-file {tempout.strpath}' - ret = script_runner.run(*shlex.split(command)) + ret = script_runner.run(shlex.split(command)) assert ret.success