diff --git a/.github/dependabot.yml b/.github/dependabot.yml index ced1d704d0..93c21d076c 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -5,6 +5,10 @@ updates: directory: "/" schedule: interval: "weekly" + groups: + actions: + patterns: + - "*" labels: - "github-actions" - "dependencies" diff --git a/.github/workflows/bump-version.yml b/.github/workflows/bump-version.yml index 607db63612..8083fa8a43 100644 --- a/.github/workflows/bump-version.yml +++ b/.github/workflows/bump-version.yml @@ -196,11 +196,11 @@ jobs: echo "steps.script.outputs.old_tag=v${current_tag}" echo "old_tag=v${current_tag}" >> $GITHUB_OUTPUT - - name: Set up Python 3.11 + - name: Set up Python 3.12 if: success() - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: - python-version: '3.11' + python-version: '3.12' - name: Install Python dependencies run: | diff --git a/.github/workflows/ci-windows.yml b/.github/workflows/ci-windows.yml new file mode 100644 index 0000000000..c9f3e8e68b --- /dev/null +++ b/.github/workflows/ci-windows.yml @@ -0,0 +1,46 @@ +name: CI on Windows + +on: + # Run daily at 1:23 UTC + schedule: + - cron: '23 1 * * *' + workflow_dispatch: + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + test: + + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [windows-latest] + python-version: ['3.9', '3.10', '3.11', '3.12'] + + steps: + - uses: actions/checkout@v4 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + + - name: Install dependencies + run: | + python -m pip install --upgrade pip setuptools wheel + python -m pip install --upgrade '.[all,test]' + + - name: List installed Python packages + run: python -m pip list + + - name: Test with pytest and coverage + # FIXME: ignore tests/test_scripts.py as runner breaks on Windows currently + run: | + coverage run --module pytest --ignore tests/test_scripts.py --ignore tests/contrib --ignore tests/benchmarks --ignore tests/test_notebooks.py + + - name: Coverage report for core project + run: | + coverage report + coverage xml diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 81324f2884..65d7e656dd 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -27,23 +27,26 @@ jobs: strategy: matrix: os: [ubuntu-latest] - python-version: ['3.8', '3.9', '3.10', '3.11'] + python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] include: - os: macos-latest - python-version: '3.11' + python-version: '3.12' + # Apple silicon runner + - os: macos-14 + python-version: '3.12' steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies run: | python -m pip install --upgrade pip setuptools wheel - python -m pip install --upgrade .[test] + python -m pip install --upgrade ".[all,test]" - name: List installed Python packages run: python -m pip list @@ -66,10 +69,12 @@ jobs: if: >- github.event_name != 'schedule' && matrix.os == 'ubuntu-latest' - uses: codecov/codecov-action@v3 + uses: codecov/codecov-action@v4 with: + fail_ci_if_error: true files: ./coverage.xml flags: unittests-${{ matrix.python-version }} + token: ${{ secrets.CODECOV_TOKEN }} - name: Test Contrib module with pytest run: | @@ -81,30 +86,35 @@ jobs: coverage xml - name: Report contrib coverage with Codecov - if: github.event_name != 'schedule' && matrix.python-version == '3.11' && matrix.os == 'ubuntu-latest' - uses: codecov/codecov-action@v3 + if: github.event_name != 'schedule' && matrix.python-version == '3.12' && matrix.os == 'ubuntu-latest' + uses: codecov/codecov-action@v4 with: + fail_ci_if_error: true files: ./coverage.xml flags: contrib + token: ${{ secrets.CODECOV_TOKEN }} - name: Test docstring examples with doctest - if: matrix.python-version == '3.11' + # TODO: Don't currently try to match amd64 and arm64 floating point for docs, but will in the future. + if: matrix.python-version == '3.12' && matrix.os != 'macos-14' run: coverage run --data-file=.coverage-doctest --module pytest src/ README.rst - name: Coverage report for doctest only - if: matrix.python-version == '3.11' + if: matrix.python-version == '3.12' && matrix.os != 'macos-14' run: | coverage report --data-file=.coverage-doctest coverage xml --data-file=.coverage-doctest -o doctest-coverage.xml - name: Report doctest coverage with Codecov - if: github.event_name != 'schedule' && matrix.python-version == '3.11' && matrix.os == 'ubuntu-latest' - uses: codecov/codecov-action@v3 + if: github.event_name != 'schedule' && matrix.python-version == '3.12' && matrix.os == 'ubuntu-latest' + uses: codecov/codecov-action@v4 with: + fail_ci_if_error: true files: doctest-coverage.xml flags: doctest + token: ${{ secrets.CODECOV_TOKEN }} - name: Run benchmarks - if: github.event_name == 'schedule' && matrix.python-version == '3.11' + if: github.event_name == 'schedule' && matrix.python-version == '3.12' run: | pytest --benchmark-sort=mean tests/benchmarks/test_benchmark.py diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 8319caaeb3..7d680f58d3 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -26,7 +26,7 @@ jobs: # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@v2 + uses: github/codeql-action/init@v3 with: languages: python # If you wish to specify custom queries, you can do so here or in a config file. @@ -35,4 +35,4 @@ jobs: # queries: ./path/to/local/query, your-org/your-repo/queries@main - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v2 + uses: github/codeql-action/analyze@v3 diff --git a/.github/workflows/dependencies-head.yml b/.github/workflows/dependencies-head.yml index ab2e8e8de2..1e4651251f 100644 --- a/.github/workflows/dependencies-head.yml +++ b/.github/workflows/dependencies-head.yml @@ -17,20 +17,20 @@ jobs: strategy: matrix: os: [ubuntu-latest, macos-latest] - python-version: ['3.11'] + python-version: ['3.12'] steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies run: | python -m pip install --upgrade pip setuptools wheel - python -m pip --no-cache-dir --quiet install --upgrade --pre .[test] + python -m pip --no-cache-dir --quiet install --upgrade --pre ".[all,test]" python -m pip list - name: List release candidates, alpha, and beta releases @@ -47,13 +47,13 @@ jobs: strategy: matrix: os: [ubuntu-latest] - python-version: ['3.11'] + python-version: ['3.12'] steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} @@ -62,7 +62,7 @@ jobs: - name: Install dependencies run: | python -m pip install --upgrade pip setuptools wheel - python -m pip --no-cache-dir --quiet install --upgrade .[test] + python -m pip --no-cache-dir --quiet install --upgrade ".[all,test]" python -m pip uninstall --yes scipy python -m pip install --upgrade --index-url https://pypi.anaconda.org/scientific-python-nightly-wheels/simple scipy python -m pip list @@ -77,18 +77,18 @@ jobs: strategy: matrix: os: [ubuntu-latest] - python-version: ['3.11'] + python-version: ['3.12'] steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies run: | python -m pip install --upgrade pip setuptools wheel - python -m pip --no-cache-dir --quiet install --upgrade .[test] + python -m pip --no-cache-dir --quiet install --upgrade ".[all,test]" python -m pip uninstall --yes iminuit python -m pip install --upgrade cython python -m pip install --upgrade git+https://github.com/scikit-hep/iminuit.git @@ -103,18 +103,18 @@ jobs: strategy: matrix: os: [ubuntu-latest] - python-version: ['3.11'] + python-version: ['3.12'] steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies run: | python -m pip install --upgrade pip setuptools wheel - python -m pip --no-cache-dir --quiet install --upgrade .[test] + python -m pip --no-cache-dir --quiet install --upgrade ".[all,test]" python -m pip uninstall --yes uproot python -m pip install --upgrade git+https://github.com/scikit-hep/uproot5.git python -m pip list @@ -128,20 +128,20 @@ jobs: strategy: matrix: os: [ubuntu-latest] - python-version: ['3.11'] + python-version: ['3.12'] steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies run: | python -m pip install --upgrade pip setuptools wheel - python -m pip --no-cache-dir --quiet install --upgrade .[test] + python -m pip --no-cache-dir --quiet install --upgrade ".[all,test]" python -m pip uninstall --yes matplotlib # Need to use --extra-index-url as dependencies aren't on scientific-python-nightly-wheels package index. # Need to use --pre as dev releases will need priority over stable releases. @@ -165,18 +165,18 @@ jobs: strategy: matrix: os: [ubuntu-latest] - python-version: ['3.11'] + python-version: ['3.12'] steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies run: | python -m pip install --upgrade pip setuptools wheel - python -m pip --no-cache-dir --quiet install --upgrade .[test] + python -m pip --no-cache-dir --quiet install --upgrade ".[all,test]" python -m pip uninstall --yes pytest python -m pip install --upgrade git+https://github.com/pytest-dev/pytest.git python -m pip list diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index aa744303f6..155100d0ed 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -22,9 +22,9 @@ jobs: fetch-depth: 0 - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: - python-version: '3.11' + python-version: '3.12' - name: Install Python dependencies run: | @@ -91,7 +91,7 @@ jobs: done - name: Upload artifact - uses: actions/upload-pages-artifact@v2 + uses: actions/upload-pages-artifact@v3 with: path: 'docs/_build/html' @@ -113,8 +113,8 @@ jobs: steps: - name: Setup Pages - uses: actions/configure-pages@v3 + uses: actions/configure-pages@v5 - name: Deploy to GitHub Pages id: deployment - uses: actions/deploy-pages@v2 + uses: actions/deploy-pages@v4 diff --git a/.github/workflows/lower-bound-requirements.yml b/.github/workflows/lower-bound-requirements.yml index 27e9a9cf4c..96b5c6a3d8 100644 --- a/.github/workflows/lower-bound-requirements.yml +++ b/.github/workflows/lower-bound-requirements.yml @@ -20,14 +20,14 @@ jobs: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies and force lowest bound run: | python -m pip install --upgrade pip setuptools wheel - python -m pip --no-cache-dir install --constraint tests/constraints.txt .[test] + python -m pip --no-cache-dir install --constraint tests/constraints.txt ".[all,test]" - name: List installed Python packages run: python -m pip list diff --git a/.github/workflows/notebooks.yml b/.github/workflows/notebooks.yml index 47c6acaff0..7ab2b048f8 100644 --- a/.github/workflows/notebooks.yml +++ b/.github/workflows/notebooks.yml @@ -16,13 +16,13 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ['3.11'] + python-version: ['3.12'] steps: - uses: actions/checkout@v4 - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} @@ -30,7 +30,7 @@ jobs: run: | python -m pip install --upgrade pip setuptools wheel # FIXME: c.f. https://github.com/scikit-hep/pyhf/issues/2104 - python -m pip install --upgrade .[test] 'jupyter-client<8.0.0' + python -m pip install --upgrade ".[all,test]" 'jupyter-client<8.0.0' - name: List installed Python packages run: python -m pip list diff --git a/.github/workflows/publish-package.yml b/.github/workflows/publish-package.yml index 187391b1a3..cd5d4a9ba7 100644 --- a/.github/workflows/publish-package.yml +++ b/.github/workflows/publish-package.yml @@ -36,10 +36,10 @@ jobs: with: fetch-depth: 0 - - name: Set up Python 3.11 - uses: actions/setup-python@v4 + - name: Set up Python 3.12 + uses: actions/setup-python@v5 with: - python-version: '3.11' + python-version: '3.12' - name: Install python-build and twine run: | @@ -98,7 +98,7 @@ jobs: run: python -m zipfile --list dist/pyhf-*.whl - name: Upload distribution artifact - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: dist-artifact path: dist @@ -118,7 +118,7 @@ jobs: steps: - name: Download distribution artifact - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: dist-artifact path: dist @@ -132,13 +132,13 @@ jobs: if: >- (github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v') && github.repository == 'scikit-hep/pyhf') || (github.event_name == 'workflow_dispatch' && github.event.inputs.publish == 'true' && github.repository == 'scikit-hep/pyhf') - uses: pypa/gh-action-pypi-publish@v1.8.10 + uses: pypa/gh-action-pypi-publish@v1.8.14 with: repository-url: https://test.pypi.org/legacy/ print-hash: true - name: Publish distribution 📦 to PyPI if: github.event_name == 'release' && github.event.action == 'published' && github.repository == 'scikit-hep/pyhf' - uses: pypa/gh-action-pypi-publish@v1.8.10 + uses: pypa/gh-action-pypi-publish@v1.8.14 with: print-hash: true diff --git a/.github/workflows/release_tests.yml b/.github/workflows/release_tests.yml index 7fbbe46828..b149d22d11 100644 --- a/.github/workflows/release_tests.yml +++ b/.github/workflows/release_tests.yml @@ -18,16 +18,16 @@ jobs: strategy: matrix: os: [ubuntu-latest] - python-version: ['3.8', '3.9', '3.10', '3.11'] + python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] include: - os: macos-latest - python-version: '3.11' + python-version: '3.12' steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 1087037b32..0a69bd27a6 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -4,7 +4,7 @@ ci: repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 + rev: v4.5.0 hooks: - id: check-added-large-files - id: check-case-conflict @@ -22,29 +22,38 @@ repos: - id: name-tests-test args: ["--pytest-test-first"] - id: requirements-txt-fixer + exclude: tests/constraints.txt - id: trailing-whitespace # exclude generated files exclude: ^validation/|\.dtd$|\.xml$ +- repo: https://github.com/pre-commit/pygrep-hooks + rev: "v1.10.0" + hooks: + - id: rst-backticks + - id: rst-directive-colons + - id: rst-inline-touching-normal + - repo: https://github.com/astral-sh/ruff-pre-commit - rev: "v0.0.292" + rev: "v0.3.5" hooks: - id: ruff args: ["--fix", "--show-fixes"] - repo: https://github.com/psf/black-pre-commit-mirror - rev: 23.9.1 + rev: 24.3.0 hooks: - id: black-jupyter + types_or: [python, pyi, jupyter] - repo: https://github.com/adamchainz/blacken-docs rev: 1.16.0 hooks: - id: blacken-docs - additional_dependencies: [black==23.7.0] + additional_dependencies: [black==24.3.0] - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.5.1 + rev: v1.9.0 # check the oldest and newest supported Pythons hooks: - &mypy @@ -55,15 +64,8 @@ repos: ['numpy', 'types-tqdm', 'click', 'types-jsonpatch', 'types-pyyaml', 'types-jsonschema', 'importlib_metadata', 'packaging'] args: ["--python-version=3.8"] - <<: *mypy - name: mypy with Python 3.11 - args: ["--python-version=3.11"] - -- repo: https://github.com/nbQA-dev/nbQA - rev: 1.7.0 - hooks: - - id: nbqa-ruff - additional_dependencies: [ruff==0.0.292] - args: ["--extend-ignore=F821,F401,F841,F811"] + name: mypy with Python 3.12 + args: ["--python-version=3.12"] - repo: https://github.com/codespell-project/codespell rev: v2.2.6 diff --git a/.readthedocs.yaml b/.readthedocs.yaml index bdab4e8207..bd5f4ea9cd 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -9,7 +9,7 @@ version: 2 build: os: ubuntu-22.04 tools: - python: "3.11" + python: "3.12" apt_packages: - curl - jq diff --git a/.zenodo.json b/.zenodo.json index e1ede5548b..4d85eb20fb 100644 --- a/.zenodo.json +++ b/.zenodo.json @@ -1,8 +1,8 @@ { "description": "pure-Python HistFactory implementation with tensors and autodiff", "license": "Apache-2.0", - "title": "scikit-hep/pyhf: v0.7.4", - "version": "v0.7.4", + "title": "scikit-hep/pyhf: v0.7.6", + "version": "v0.7.6", "upload_type": "software", "creators": [ { @@ -36,7 +36,7 @@ "related_identifiers": [ { "scheme": "url", - "identifier": "https://github.com/scikit-hep/pyhf/tree/v0.7.4", + "identifier": "https://github.com/scikit-hep/pyhf/tree/v0.7.6", "relation": "isSupplementTo" } ] diff --git a/CITATION.cff b/CITATION.cff index 32128c124b..14b1444bb7 100644 --- a/CITATION.cff +++ b/CITATION.cff @@ -14,11 +14,11 @@ authors: given-names: "Giordon" orcid: "https://orcid.org/0000-0001-6616-3433" affiliation: "SCIPP, University of California, Santa Cruz" -title: "pyhf: v0.7.4" -version: 0.7.4 +title: "pyhf: v0.7.6" +version: 0.7.6 doi: 10.5281/zenodo.1169739 -repository-code: "https://github.com/scikit-hep/pyhf/releases/tag/v0.7.4" -url: "https://pyhf.readthedocs.io/en/v0.7.4/" +repository-code: "https://github.com/scikit-hep/pyhf/releases/tag/v0.7.6" +url: "https://pyhf.readthedocs.io/en/v0.7.6/" keywords: - python - physics diff --git a/README.rst b/README.rst index b536b9a08c..eda8612ae8 100644 --- a/README.rst +++ b/README.rst @@ -6,7 +6,7 @@ pure-python fitting/limit-setting/interval estimation HistFactory-style ======================================================================= -|GitHub Project| |DOI| |JOSS DOI| |Scikit-HEP| |NSF Award Number| |NumFOCUS Affiliated Project| +|GitHub Project| |DOI| |JOSS DOI| |Scikit-HEP| |NSF Award Number IRIS-HEP v1| |NSF Award Number IRIS-HEP v2| |NumFOCUS Affiliated Project| |Docs from latest| |Docs from main| |Jupyter Book tutorial| |Binder| @@ -309,11 +309,11 @@ the preferred BibTeX entry for citation of ``pyhf`` includes both the @software{pyhf, author = {Lukas Heinrich and Matthew Feickert and Giordon Stark}, - title = "{pyhf: v0.7.4}", - version = {0.7.4}, + title = "{pyhf: v0.7.6}", + version = {0.7.6}, doi = {10.5281/zenodo.1169739}, url = {https://doi.org/10.5281/zenodo.1169739}, - note = {https://github.com/scikit-hep/pyhf/releases/tag/v0.7.4} + note = {https://github.com/scikit-hep/pyhf/releases/tag/v0.7.6} } @article{pyhf_joss, @@ -348,7 +348,8 @@ Acknowledgements ---------------- Matthew Feickert has received support to work on ``pyhf`` provided by NSF -cooperative agreement `OAC-1836650 `__ (IRIS-HEP) +cooperative agreements `OAC-1836650 `__ +and `PHY-2323298 `__ (IRIS-HEP) and grant `OAC-1450377 `__ (DIANA/HEP). ``pyhf`` is a `NumFOCUS Affiliated Project `__. @@ -361,11 +362,13 @@ and grant `OAC-1450377 =3.0.0"]) +await piplite.install(["pyhf==0.7.6", "matplotlib>=3.0.0"]) # %matplotlib inline import pyhf diff --git a/docs/release-notes.rst b/docs/release-notes.rst index c11d6c2019..720321de7e 100644 --- a/docs/release-notes.rst +++ b/docs/release-notes.rst @@ -2,6 +2,8 @@ Release Notes ============= +.. include:: release-notes/v0.7.6.rst +.. include:: release-notes/v0.7.5.rst .. include:: release-notes/v0.7.4.rst .. include:: release-notes/v0.7.3.rst .. include:: release-notes/v0.7.2.rst diff --git a/docs/release-notes/v0.7.5.rst b/docs/release-notes/v0.7.5.rst new file mode 100644 index 0000000000..6687a55370 --- /dev/null +++ b/docs/release-notes/v0.7.5.rst @@ -0,0 +1,14 @@ +|release v0.7.5|_ +================= + +This is a patch release from ``v0.7.4`` → ``v0.7.5``. + +Fixes +----- + +* Remove operating system dependent components of schema validation to allow for + validation on Windows. + (PR :pr:`2357`) + +.. |release v0.7.5| replace:: ``v0.7.5`` +.. _`release v0.7.5`: https://github.com/scikit-hep/pyhf/releases/tag/v0.7.5 diff --git a/docs/release-notes/v0.7.6.rst b/docs/release-notes/v0.7.6.rst new file mode 100644 index 0000000000..35ced6792e --- /dev/null +++ b/docs/release-notes/v0.7.6.rst @@ -0,0 +1,32 @@ +|release v0.7.6|_ +================= + +This is a patch release from ``v0.7.5`` → ``v0.7.6``. + +Fixes +----- + +* For the JAX backend access ``jax.config`` from the ``jax`` top level API to + avoid support issues with ``jax`` and ``jaxlib`` ``v0.4.20+``. + (PR :pr:`2376`) +* Add information in the warnings for :func:`pyhf.infer.test_statistics.qmu` and + :func:`pyhf.infer.test_statistics.qmu_tilde` that provides users with the + higher level ``pyhf.infer`` APIs ``kwarg`` to set the correct test statistic. + (PR :pr:`2390`) +* Correct the variable assignment for the one-sigma and two-sigma limit band + artists in :func:`pyhf.contrib.viz.brazil.plot_brazil_band` to match the + stated return structure. + (PR :pr:`2411`) +* In the ``pyhf.infer`` module, correct the ``fixed_params`` type in the docs + to be to :obj:`tuple` or :obj:`list`. + (PR :pr:`2420`) + +Contributors +------------ + +``v0.7.6`` benefited from contributions from: + +* Lorenz Gaertner + +.. |release v0.7.6| replace:: ``v0.7.6`` +.. _`release v0.7.6`: https://github.com/scikit-hep/pyhf/releases/tag/v0.7.6 diff --git a/noxfile.py b/noxfile.py index ba75fae440..1828383486 100644 --- a/noxfile.py +++ b/noxfile.py @@ -4,10 +4,10 @@ import nox -ALL_PYTHONS = ["3.8", "3.9", "3.10", "3.11"] +ALL_PYTHONS = ["3.8", "3.9", "3.10", "3.11", "3.12"] # Default sessions to run if no session handles are passed -nox.options.sessions = ["lint", "tests-3.11"] +nox.options.sessions = ["lint", "tests-3.12"] DIR = Path(__file__).parent.resolve() @@ -30,12 +30,12 @@ def tests(session): Examples: - $ nox --session tests --python 3.11 - $ nox --session tests --python 3.11 -- contrib # run the contrib module tests - $ nox --session tests --python 3.11 -- tests/test_tensor.py # run specific tests - $ nox --session tests --python 3.11 -- coverage # run with coverage but slower + $ nox --session tests --python 3.12 + $ nox --session tests --python 3.12 -- contrib # run the contrib module tests + $ nox --session tests --python 3.12 -- tests/test_tensor.py # run specific tests + $ nox --session tests --python 3.12 -- coverage # run with coverage but slower """ - session.install("--upgrade", "--editable", ".[test]") + session.install("--upgrade", "--editable", ".[all,test]") session.install("--upgrade", "pytest") # Allow tests to be run with coverage @@ -107,7 +107,7 @@ def regenerate(session): """ Regenerate Matplotlib images. """ - session.install("--upgrade", "--editable", ".[test]") + session.install("--upgrade", "--editable", ".[all,test]") session.install("--upgrade", "pytest", "matplotlib") if not sys.platform.startswith("linux"): session.error( @@ -182,7 +182,7 @@ def notebooks(session: nox.Session): """ Run the notebook tests. """ - session.install("--upgrade", "--editable", ".[test]") + session.install("--upgrade", "--editable", ".[all,test]") session.run( "pytest", "--override-ini", diff --git a/pyproject.toml b/pyproject.toml index 1e50c697ce..034be5f52f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -32,12 +32,14 @@ classifiers = [ "Environment :: WebAssembly :: Emscripten", "Intended Audience :: Science/Research", "License :: OSI Approved :: Apache Software License", + "Operating System :: OS Independent", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3 :: Only", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Programming Language :: Python :: Implementation :: CPython", "Topic :: Scientific/Engineering", "Topic :: Scientific/Engineering :: Physics", @@ -66,10 +68,18 @@ Homepage = "https://github.com/scikit-hep/pyhf" [project.optional-dependencies] shellcomplete = ["click_completion"] +# TODO: 'tensorflow' supports all platform_machine for tensorflow v2.16.1+ +# but TensorFlow only supports python_version 3.8 up through tensorflow v2.13.1. +# So until Python 3.8 support is dropped, split requirments on python_version +# before and after 3.9. +# NOTE: macos x86 support is deprecated from tensorflow v2.17.0 onwards. tensorflow = [ - "tensorflow>=2.7.0; platform_machine != 'arm64'", # c.f. PR #1962 - "tensorflow-macos>=2.7.0; platform_machine == 'arm64' and platform_system == 'Darwin'", # c.f. PR #2119 - "tensorflow-probability>=0.11.0", # c.f. PR #1657 + # python == 3.8 + "tensorflow>=2.7.0; python_version < '3.9' and platform_machine != 'arm64'", # c.f. PR #1962, #2452 + "tensorflow-macos>=2.7.0; python_version < '3.9' and platform_machine == 'arm64' and platform_system == 'Darwin'", # c.f. PR #2119, #2452 + "tensorflow-probability>=0.11.0; python_version < '3.9'", # c.f. PR #1657, #2452 + # python >= 3.9 + "tensorflow-probability[tf]>=0.24.0; python_version >= '3.9'" # c.f. PR #2452 ] torch = ["torch>=1.10.0"] # c.f. PR #1657 jax = [ @@ -87,7 +97,6 @@ all = ["pyhf[backends,xmlio,contrib,shellcomplete]"] # Developer extras test = [ - "pyhf[all]", "scikit-hep-testdata>=0.4.11", "pytest>=6.0", "coverage[toml]>=6.0.0", @@ -98,7 +107,7 @@ test = [ "pytest-mpl", "ipympl>=0.3.0", "pydocstyle", - "papermill~=2.3.4", + "papermill~=2.5.0", "scrapbook~=0.5.0", "jupyter", "graphviz", @@ -114,13 +123,13 @@ docs = [ "ipywidgets", "sphinx-issues", "sphinx-copybutton>=0.3.2,!=0.5.1", - "jupyterlite-sphinx>=0.8.0", + "jupyterlite-sphinx>=0.13.1", # c.f. https://github.com/scikit-hep/pyhf/pull/2458 "jupyterlite-pyodide-kernel>=0.0.7", "jupytext>=1.14.0", "ipython!=8.7.0", # c.f. https://github.com/scikit-hep/pyhf/pull/2068 ] develop = [ - "pyhf[test,docs]", + "pyhf[all,test,docs]", "tbump>=6.7.0", "pre-commit", "nox", @@ -216,6 +225,8 @@ filterwarnings = [ "ignore:module 'sre_constants' is deprecated:DeprecationWarning", # tensorflow v2.12.0+ for Python 3.11+ "ignore:ml_dtypes.float8_e4m3b11 is deprecated.", #FIXME: Can remove when jaxlib>=0.4.12 "ignore:jsonschema.RefResolver is deprecated as of v4.18.0, in favor of the:DeprecationWarning", # Issue #2139 + "ignore:Skipping device Apple Paravirtual device that does not support Metal 2.0:UserWarning", # Can't fix given hardware/virtualized device + 'ignore:Type google._upb._message.[A-Z]+ uses PyType_Spec with a metaclass that has custom:DeprecationWarning', # protobuf via tensorflow ] [tool.coverage.run] @@ -233,10 +244,9 @@ exclude_also = [ [tool.mypy] files = "src" -python_version = "3.11" +python_version = "3.12" warn_unused_configs = true strict = true -show_error_codes = true enable_error_code = ["ignore-without-code", "redundant-expr", "truthy-bool"] warn_unreachable = true @@ -283,25 +293,28 @@ module = [ ignore_errors = true [tool.ruff] +src = ["src"] +line-length = 88 + +[tool.ruff.lint] select = [ "E", "F", "W", # flake8 "UP", # pyupgrade "RUF", # Ruff-specific "TID", # flake8-tidy-imports ] -line-length = 88 ignore = [ "E402", "E501", "RUF001", # String contains ambiguous unicode character "RUF005", # unpack-instead-of-concatenating-to-collection-literal ] -src = ["src"] typing-modules = ["pyhf.typing"] unfixable = [ "F841", # Removes unused variables ] flake8-tidy-imports.ban-relative-imports = "all" -[tool.ruff.per-file-ignores] +[tool.ruff.lint.per-file-ignores] "docs/lite/jupyterlite.py" = ["F401", "F704"] +"**.ipynb" = ["F821", "F401", "F841", "F811", "E703"] diff --git a/src/pyhf/cli/__init__.py b/src/pyhf/cli/__init__.py index 0d65039661..ed53c296af 100644 --- a/src/pyhf/cli/__init__.py +++ b/src/pyhf/cli/__init__.py @@ -1,4 +1,5 @@ """The pyhf command line interface.""" + from pyhf.cli.cli import pyhf as cli from pyhf.cli.rootio import cli as rootio from pyhf.cli.spec import cli as spec diff --git a/src/pyhf/cli/cli.py b/src/pyhf/cli/cli.py index a1a486fe54..cd458a6329 100644 --- a/src/pyhf/cli/cli.py +++ b/src/pyhf/cli/cli.py @@ -1,4 +1,5 @@ """The pyhf Command Line Interface.""" + import logging import click diff --git a/src/pyhf/cli/complete.py b/src/pyhf/cli/complete.py index 58fda5604f..c39e6e9784 100644 --- a/src/pyhf/cli/complete.py +++ b/src/pyhf/cli/complete.py @@ -1,4 +1,5 @@ '''Shell completions for pyhf.''' + import click try: diff --git a/src/pyhf/cli/infer.py b/src/pyhf/cli/infer.py index a877469efc..f2b0dce107 100644 --- a/src/pyhf/cli/infer.py +++ b/src/pyhf/cli/infer.py @@ -1,4 +1,5 @@ """The inference CLI group.""" + import logging import click diff --git a/src/pyhf/cli/patchset.py b/src/pyhf/cli/patchset.py index 75bdb56887..4d31d4963d 100644 --- a/src/pyhf/cli/patchset.py +++ b/src/pyhf/cli/patchset.py @@ -1,4 +1,5 @@ """The pyhf spec CLI subcommand.""" + import logging import click diff --git a/src/pyhf/cli/rootio.py b/src/pyhf/cli/rootio.py index 95d8872a84..c5d5840762 100644 --- a/src/pyhf/cli/rootio.py +++ b/src/pyhf/cli/rootio.py @@ -1,4 +1,5 @@ """CLI subapps to handle conversion from ROOT.""" + import logging import click diff --git a/src/pyhf/cli/spec.py b/src/pyhf/cli/spec.py index 9772558169..08fdd73d94 100644 --- a/src/pyhf/cli/spec.py +++ b/src/pyhf/cli/spec.py @@ -1,4 +1,5 @@ """The pyhf spec CLI subcommand.""" + import logging import click @@ -149,9 +150,11 @@ def inspect(workspace, output_file, measurement): ('(*) ' if measurement_name == default_measurement['name'] else '') + measurement_name, measurement_poi, - ','.join(measurement_parameters) - if measurement_parameters - else '(none)', + ( + ','.join(measurement_parameters) + if measurement_parameters + else '(none)' + ), ) ) diff --git a/src/pyhf/contrib/cli.py b/src/pyhf/contrib/cli.py index eaf2bb7e23..5bba47e4d4 100644 --- a/src/pyhf/contrib/cli.py +++ b/src/pyhf/contrib/cli.py @@ -1,4 +1,5 @@ """CLI for functionality that will get migrated out eventually.""" + import logging import click from pathlib import Path diff --git a/src/pyhf/contrib/utils.py b/src/pyhf/contrib/utils.py index 02c9ffb5f2..7fa9550242 100644 --- a/src/pyhf/contrib/utils.py +++ b/src/pyhf/contrib/utils.py @@ -91,7 +91,12 @@ def download(archive_url, output_directory, force=False, compress=False): with tarfile.open( mode="r:*", fileobj=BytesIO(response.content) ) as archive: - archive.extractall(output_directory) + # TODO: Simplify after pyhf is Python 3.12+ only + # c.f. https://docs.python.org/3.12/library/tarfile.html#extraction-filters + if hasattr(tarfile, "data_filter"): + archive.extractall(output_directory, filter="data") + else: + archive.extractall(output_directory) except tarfile.ReadError: if not zipfile.is_zipfile(BytesIO(response.content)): raise exceptions.InvalidArchive( diff --git a/src/pyhf/contrib/viz/brazil.py b/src/pyhf/contrib/viz/brazil.py index a8955f20eb..18b25d87bd 100644 --- a/src/pyhf/contrib/viz/brazil.py +++ b/src/pyhf/contrib/viz/brazil.py @@ -1,4 +1,5 @@ """Brazil Band Plots.""" + from collections import namedtuple import matplotlib.pyplot as plt @@ -129,14 +130,14 @@ def plot_brazil_band(test_pois, cls_obs, cls_exp, test_size, ax, **kwargs): label=None if idx != 2 else r"$\mathrm{CL}_{s,\mathrm{exp}}$", ) cls_exp_lines.append(_cls_exp_line) - one_sigma_band = ax.fill_between( + two_sigma_band = ax.fill_between( test_pois, cls_exp[0], cls_exp[-1], facecolor="yellow", label=r"$\pm2\sigma$ $\mathrm{CL}_{s,\mathrm{exp}}$", ) - two_sigma_band = ax.fill_between( + one_sigma_band = ax.fill_between( test_pois, cls_exp[1], cls_exp[-2], diff --git a/src/pyhf/data/citation.bib b/src/pyhf/data/citation.bib index ea49d8f090..48348f61b0 100644 --- a/src/pyhf/data/citation.bib +++ b/src/pyhf/data/citation.bib @@ -1,10 +1,10 @@ @software{pyhf, author = {Lukas Heinrich and Matthew Feickert and Giordon Stark}, - title = "{pyhf: v0.7.4}", - version = {0.7.4}, + title = "{pyhf: v0.7.6}", + version = {0.7.6}, doi = {10.5281/zenodo.1169739}, url = {https://doi.org/10.5281/zenodo.1169739}, - note = {https://github.com/scikit-hep/pyhf/releases/tag/v0.7.4} + note = {https://github.com/scikit-hep/pyhf/releases/tag/v0.7.6} } @article{pyhf_joss, diff --git a/src/pyhf/infer/__init__.py b/src/pyhf/infer/__init__.py index 5aaaf2c1c6..dc5531cd93 100644 --- a/src/pyhf/infer/__init__.py +++ b/src/pyhf/infer/__init__.py @@ -61,8 +61,8 @@ def hypotest( par_bounds (:obj:`tensor`): The extrema of values the model parameters are allowed to reach in the fit. The shape should be ``(n, 2)`` for ``n`` model parameters. - fixed_params (:obj:`tensor` of :obj:`bool`): The flag to set a parameter constant to its starting - value during minimization. + fixed_params (:obj:`tuple` or :obj:`list` of :obj:`bool`): The flag to set a parameter + constant to its starting value during minimization. calctype (:obj:`str`): The calculator to create. Choose either 'asymptotics' (default) or 'toybased'. return_tail_probs (:obj:`bool`): Bool for returning :math:`\mathrm{CL}_{s+b}` and :math:`\mathrm{CL}_{b}` return_expected (:obj:`bool`): Bool for returning :math:`\mathrm{CL}_{\mathrm{exp}}` diff --git a/src/pyhf/infer/calculators.py b/src/pyhf/infer/calculators.py index ed9642e085..f9922c6d89 100644 --- a/src/pyhf/infer/calculators.py +++ b/src/pyhf/infer/calculators.py @@ -7,6 +7,7 @@ Using the calculators hypothesis tests can then be performed. """ + from pyhf.infer.mle import fixed_poi_fit from pyhf import get_backend from pyhf.infer import utils @@ -61,8 +62,8 @@ def generate_asimov_data( par_bounds (:obj:`tensor`): The extrema of values the model parameters are allowed to reach in the fit. The shape should be ``(n, 2)`` for ``n`` model parameters. - fixed_params (:obj:`tensor` of :obj:`bool`): The flag to set a parameter constant to its starting - value during minimization. + fixed_params (:obj:`tuple` or :obj:`list` of :obj:`bool`): The flag to set a parameter + constant to its starting value during minimization. return_fitted_pars (:obj:`bool`): Return the best-fit parameter values for the given ``asimov_mu``. @@ -241,8 +242,8 @@ def __init__( par_bounds (:obj:`tensor`): The extrema of values the model parameters are allowed to reach in the fit. The shape should be ``(n, 2)`` for ``n`` model parameters. - fixed_params (:obj:`tensor` of :obj:`bool`): The flag to set a parameter constant to its starting - value during minimization. + fixed_params (:obj:`tuple` or :obj:`list` of :obj:`bool`): The flag to set a parameter + constant to its starting value during minimization. test_stat (:obj:`str`): The test statistic to use as a numerical summary of the data: ``'qtilde'``, ``'q'``, or ``'q0'``. @@ -685,8 +686,8 @@ def __init__( par_bounds (:obj:`tensor`): The extrema of values the model parameters are allowed to reach in the fit. The shape should be ``(n, 2)`` for ``n`` model parameters. - fixed_params (:obj:`tensor` of :obj:`bool`): The flag to set a parameter constant to its starting - value during minimization. + fixed_params (:obj:`tuple` or :obj:`list` of :obj:`bool`): The flag to set a parameter + constant to its starting value during minimization. test_stat (:obj:`str`): The test statistic to use as a numerical summary of the data: ``'qtilde'``, ``'q'``, or ``'q0'``. diff --git a/src/pyhf/infer/intervals/__init__.py b/src/pyhf/infer/intervals/__init__.py index 0f2f928cdd..2acdfd3165 100644 --- a/src/pyhf/infer/intervals/__init__.py +++ b/src/pyhf/infer/intervals/__init__.py @@ -1,4 +1,5 @@ """Interval estimation""" + import pyhf.infer.intervals.upper_limits __all__ = ["upper_limits.upper_limit"] diff --git a/src/pyhf/infer/intervals/upper_limits.py b/src/pyhf/infer/intervals/upper_limits.py index 6fbc27e6b3..601189cdbb 100644 --- a/src/pyhf/infer/intervals/upper_limits.py +++ b/src/pyhf/infer/intervals/upper_limits.py @@ -1,4 +1,5 @@ """Interval estimation""" + import numpy as np from scipy.optimize import toms748 diff --git a/src/pyhf/infer/mle.py b/src/pyhf/infer/mle.py index fffff84131..c269eb47c8 100644 --- a/src/pyhf/infer/mle.py +++ b/src/pyhf/infer/mle.py @@ -1,4 +1,5 @@ """Module for Maximum Likelihood Estimation.""" + from pyhf import get_backend from pyhf.exceptions import UnspecifiedPOI @@ -106,8 +107,8 @@ def fit(data, pdf, init_pars=None, par_bounds=None, fixed_params=None, **kwargs) par_bounds (:obj:`list` of :obj:`list`/:obj:`tuple`): The extrema of values the model parameters are allowed to reach in the fit. The shape should be ``(n, 2)`` for ``n`` model parameters. - fixed_params (:obj:`list` of :obj:`bool`): The flag to set a parameter constant to its starting - value during minimization. + fixed_params (:obj:`tuple` or :obj:`list` of :obj:`bool`): The flag to set a parameter + constant to its starting value during minimization. kwargs: Keyword arguments passed through to the optimizer API Returns: @@ -180,8 +181,8 @@ def fixed_poi_fit( par_bounds (:obj:`list` of :obj:`list`/:obj:`tuple`): The extrema of values the model parameters are allowed to reach in the fit. The shape should be ``(n, 2)`` for ``n`` model parameters. - fixed_params (:obj:`list` of :obj:`bool`): The flag to set a parameter constant to its starting - value during minimization. + fixed_params (:obj:`tuple` or :obj:`list` of :obj:`bool`): The flag to set a parameter + constant to its starting value during minimization. kwargs: Keyword arguments passed through to the optimizer API Returns: diff --git a/src/pyhf/infer/test_statistics.py b/src/pyhf/infer/test_statistics.py index 3af773d42c..97b6babe79 100644 --- a/src/pyhf/infer/test_statistics.py +++ b/src/pyhf/infer/test_statistics.py @@ -112,8 +112,8 @@ def qmu(mu, data, pdf, init_pars, par_bounds, fixed_params, return_fitted_pars=F par_bounds (:obj:`list` of :obj:`list`/:obj:`tuple`): The extrema of values the model parameters are allowed to reach in the fit. The shape should be ``(n, 2)`` for ``n`` model parameters. - fixed_params (:obj:`list` of :obj:`bool`): The flag to set a parameter constant to its starting - value during minimization. + fixed_params (:obj:`tuple` or :obj:`list` of :obj:`bool`): The flag to set a parameter + constant to its starting value during minimization. return_fitted_pars (:obj:`bool`): Return the best-fit parameter tensors the fixed-POI and unconstrained fits have converged on (i.e. :math:`\mu, \hat{\hat{\theta}}` and :math:`\hat{\mu}, \hat{\theta}`) @@ -134,7 +134,8 @@ def qmu(mu, data, pdf, init_pars, par_bounds, fixed_params, return_fitted_pars=F if par_bounds[pdf.config.poi_index][0] == 0: log.warning( 'qmu test statistic used for fit configuration with POI bounded at zero.\n' - + 'Use the qmu_tilde test statistic (pyhf.infer.test_statistics.qmu_tilde) instead.' + + 'Use the qmu_tilde test statistic (pyhf.infer.test_statistics.qmu_tilde) instead.\n' + + 'If you called this from pyhf.infer.mle or pyhf.infer.hypotest, set test_stat="qtilde".' ) return _qmu_like( mu, @@ -206,8 +207,8 @@ def qmu_tilde( par_bounds (:obj:`list` of :obj:`list`/:obj:`tuple`): The extrema of values the model parameters are allowed to reach in the fit. The shape should be ``(n, 2)`` for ``n`` model parameters. - fixed_params (:obj:`list` of :obj:`bool`): The flag to set a parameter constant to its starting - value during minimization. + fixed_params (:obj:`tuple` or :obj:`list` of :obj:`bool`): The flag to set a parameter + constant to its starting value during minimization. return_fitted_pars (:obj:`bool`): Return the best-fit parameter tensors the fixed-POI and unconstrained fits have converged on (i.e. :math:`\mu, \hat{\hat{\theta}}` and :math:`\hat{\mu}, \hat{\theta}`) @@ -229,7 +230,8 @@ def qmu_tilde( if par_bounds[pdf.config.poi_index][0] != 0: log.warning( 'qmu_tilde test statistic used for fit configuration with POI not bounded at zero.\n' - + 'Use the qmu test statistic (pyhf.infer.test_statistics.qmu) instead.' + + 'Use the qmu test statistic (pyhf.infer.test_statistics.qmu) instead.\n' + + 'If you called this from pyhf.infer.mle or pyhf.infer.hypotest, set test_stat="q".' ) return _qmu_like( mu, @@ -288,8 +290,8 @@ def tmu(mu, data, pdf, init_pars, par_bounds, fixed_params, return_fitted_pars=F par_bounds (:obj:`list` of :obj:`list`/:obj:`tuple`): The extrema of values the model parameters are allowed to reach in the fit. The shape should be ``(n, 2)`` for ``n`` model parameters. - fixed_params (:obj:`list` of :obj:`bool`): The flag to set a parameter constant to its starting - value during minimization. + fixed_params (:obj:`tuple` or :obj:`list` of :obj:`bool`): The flag to set a parameter + constant to its starting value during minimization. return_fitted_pars (:obj:`bool`): Return the best-fit parameter tensors the fixed-POI and unconstrained fits have converged on (i.e. :math:`\mu, \hat{\hat{\theta}}` and :math:`\hat{\mu}, \hat{\theta}`) @@ -378,8 +380,8 @@ def tmu_tilde( par_bounds (:obj:`list` of :obj:`list`/:obj:`tuple`): The extrema of values the model parameters are allowed to reach in the fit. The shape should be ``(n, 2)`` for ``n`` model parameters. - fixed_params (:obj:`list` of :obj:`bool`): The flag to set a parameter constant to its starting - value during minimization. + fixed_params (:obj:`tuple` or :obj:`list` of :obj:`bool`): The flag to set a parameter + constant to its starting value during minimization. return_fitted_pars (:obj:`bool`): Return the best-fit parameter tensors the fixed-POI and unconstrained fits have converged on (i.e. :math:`\mu, \hat{\hat{\theta}}` and :math:`\hat{\mu}, \hat{\theta}`) @@ -456,8 +458,8 @@ def q0(mu, data, pdf, init_pars, par_bounds, fixed_params, return_fitted_pars=Fa par_bounds (:obj:`list` of :obj:`list`/:obj:`tuple`): The extrema of values the model parameters are allowed to reach in the fit. The shape should be ``(n, 2)`` for ``n`` model parameters. - fixed_params (:obj:`list` of :obj:`bool`): The flag to set a parameter constant to its starting - value during minimization. + fixed_params (:obj:`tuple` or :obj:`list` of :obj:`bool`): The flag to set a parameter + constant to its starting value during minimization. return_fitted_pars (:obj:`bool`): Return the best-fit parameter tensors the fixed-POI and unconstrained fits have converged on (i.e. :math:`\mu, \hat{\hat{\theta}}` and :math:`\hat{\mu}, \hat{\theta}`) diff --git a/src/pyhf/infer/utils.py b/src/pyhf/infer/utils.py index 1131b01de9..a9eeefac49 100644 --- a/src/pyhf/infer/utils.py +++ b/src/pyhf/infer/utils.py @@ -22,7 +22,7 @@ def all_pois_floating(pdf, fixed_params): Args: pdf (~pyhf.pdf.Model): The statistical model adhering to the schema ``model.json``. - fixed_params (:obj:`list` or `tensor` of :obj:`bool`): Array of + fixed_params (:obj:`tuple` or :obj:`list` of :obj:`bool`): Array of :obj:`bool` indicating if model parameters are fixed. Returns: diff --git a/src/pyhf/interpolators/code0.py b/src/pyhf/interpolators/code0.py index ff972d506b..52d87fefba 100644 --- a/src/pyhf/interpolators/code0.py +++ b/src/pyhf/interpolators/code0.py @@ -1,4 +1,5 @@ """Piecewise-linear Interpolation. (Code 0).""" + import logging import pyhf from pyhf.tensor.manager import get_backend diff --git a/src/pyhf/interpolators/code1.py b/src/pyhf/interpolators/code1.py index f5bbe71ff0..a9fa41c011 100644 --- a/src/pyhf/interpolators/code1.py +++ b/src/pyhf/interpolators/code1.py @@ -1,4 +1,5 @@ """Piecewise-Exponential Interpolation (Code 1).""" + import logging import math import pyhf diff --git a/src/pyhf/interpolators/code2.py b/src/pyhf/interpolators/code2.py index ed305c44da..8dff3278e7 100644 --- a/src/pyhf/interpolators/code2.py +++ b/src/pyhf/interpolators/code2.py @@ -1,4 +1,5 @@ """Quadratic Interpolation (Code 2).""" + import logging import pyhf from pyhf.tensor.manager import get_backend diff --git a/src/pyhf/interpolators/code4.py b/src/pyhf/interpolators/code4.py index a120bdf295..0290d0c52d 100644 --- a/src/pyhf/interpolators/code4.py +++ b/src/pyhf/interpolators/code4.py @@ -1,4 +1,5 @@ """Polynomial Interpolation (Code 4).""" + import logging import math import pyhf diff --git a/src/pyhf/interpolators/code4p.py b/src/pyhf/interpolators/code4p.py index ec8b63afa7..8841eb0dd0 100644 --- a/src/pyhf/interpolators/code4p.py +++ b/src/pyhf/interpolators/code4p.py @@ -1,4 +1,5 @@ """Piecewise-Linear + Polynomial Interpolation (Code 4p).""" + import logging import pyhf from pyhf.tensor.manager import get_backend diff --git a/src/pyhf/modifiers/shapesys.py b/src/pyhf/modifiers/shapesys.py index 39740e4cf8..c18ac90149 100644 --- a/src/pyhf/modifiers/shapesys.py +++ b/src/pyhf/modifiers/shapesys.py @@ -165,9 +165,9 @@ def _reindex_access_field(self, pdfconfig): sample_mask = self._shapesys_mask[syst_index][singular_sample_index][0] access_field_for_syst_and_batch[sample_mask] = selection - self._access_field[ - syst_index, batch_index - ] = access_field_for_syst_and_batch + self._access_field[syst_index, batch_index] = ( + access_field_for_syst_and_batch + ) def _precompute(self): tensorlib, _ = get_backend() diff --git a/src/pyhf/modifiers/staterror.py b/src/pyhf/modifiers/staterror.py index 919811c9fe..a6d6d499c5 100644 --- a/src/pyhf/modifiers/staterror.py +++ b/src/pyhf/modifiers/staterror.py @@ -92,9 +92,12 @@ def finalize(self): relerrs = default_backend.sum( [ [ - (modifier_data['data']['uncrt'][binnr] / nomsall[binnr]) ** 2 - if nomsall[binnr] > 0 - else 0.0 + ( + (modifier_data['data']['uncrt'][binnr] / nomsall[binnr]) + ** 2 + if nomsall[binnr] > 0 + else 0.0 + ) for binnr in range(len(modifier_data['data']['nom_data'])) ] for modifier_data in self.builder_data[modname].values() @@ -184,9 +187,9 @@ def _reindex_access_field(self, pdfconfig): sample_mask = self._staterror_mask[syst_index][singular_sample_index][0] access_field_for_syst_and_batch[sample_mask] = selection - self._access_field[ - syst_index, batch_index - ] = access_field_for_syst_and_batch + self._access_field[syst_index, batch_index] = ( + access_field_for_syst_and_batch + ) def _precompute(self): if not self.param_viewer.index_selection: diff --git a/src/pyhf/optimize/common.py b/src/pyhf/optimize/common.py index 61eeafd889..2049939159 100644 --- a/src/pyhf/optimize/common.py +++ b/src/pyhf/optimize/common.py @@ -1,4 +1,5 @@ """Common Backend Shim to prepare minimization for optimizer.""" + from pyhf.tensor.manager import get_backend from pyhf.tensor.common import _TensorViewer diff --git a/src/pyhf/optimize/mixins.py b/src/pyhf/optimize/mixins.py index ca51fec978..f2bd7c4f82 100644 --- a/src/pyhf/optimize/mixins.py +++ b/src/pyhf/optimize/mixins.py @@ -1,4 +1,5 @@ """Helper Classes for use of automatic differentiation.""" + import logging import numpy as np diff --git a/src/pyhf/optimize/opt_minuit.py b/src/pyhf/optimize/opt_minuit.py index e78b81a9f2..17d11a16dc 100644 --- a/src/pyhf/optimize/opt_minuit.py +++ b/src/pyhf/optimize/opt_minuit.py @@ -1,4 +1,5 @@ """Minuit Optimizer Class.""" + from pyhf import exceptions from pyhf.optimize.mixins import OptimizerMixin import scipy diff --git a/src/pyhf/optimize/opt_scipy.py b/src/pyhf/optimize/opt_scipy.py index f334c36ad7..68c64bafe3 100644 --- a/src/pyhf/optimize/opt_scipy.py +++ b/src/pyhf/optimize/opt_scipy.py @@ -1,4 +1,5 @@ """SciPy Optimizer Class.""" + from pyhf import exceptions from pyhf.optimize.mixins import OptimizerMixin import scipy diff --git a/src/pyhf/optimize/opt_tflow.py b/src/pyhf/optimize/opt_tflow.py index fd2965fb3e..178bc332ac 100644 --- a/src/pyhf/optimize/opt_tflow.py +++ b/src/pyhf/optimize/opt_tflow.py @@ -1,4 +1,5 @@ """Tensorflow Backend Function Shim.""" + from pyhf import get_backend import tensorflow as tf diff --git a/src/pyhf/parameters/paramview.py b/src/pyhf/parameters/paramview.py index cac057f8b5..0238832747 100644 --- a/src/pyhf/parameters/paramview.py +++ b/src/pyhf/parameters/paramview.py @@ -44,9 +44,11 @@ def extract_index_access(baseviewer, subviewer, indices): # the transpose is here so that modifier code doesn't have to do it indices_concatenated = tensorlib.astensor( - tensorlib.einsum('ij->ji', stitched) - if len(tensorlib.shape(stitched)) > 1 - else stitched, + ( + tensorlib.einsum('ij->ji', stitched) + if len(tensorlib.shape(stitched)) > 1 + else stitched + ), dtype='int', ) return index_selection, stitched, indices_concatenated diff --git a/src/pyhf/patchset.py b/src/pyhf/patchset.py index b72118835b..86acaef693 100644 --- a/src/pyhf/patchset.py +++ b/src/pyhf/patchset.py @@ -1,6 +1,7 @@ """ pyhf patchset provides a user-friendly interface for interacting with patchsets. """ + import logging import jsonpatch from pyhf import exceptions diff --git a/src/pyhf/probability.py b/src/pyhf/probability.py index 7851127ae3..0cc0330272 100644 --- a/src/pyhf/probability.py +++ b/src/pyhf/probability.py @@ -1,4 +1,5 @@ """The probability density function module.""" + from pyhf import get_backend __all__ = ["Independent", "Normal", "Poisson", "Simultaneous"] diff --git a/src/pyhf/schema/__init__.py b/src/pyhf/schema/__init__.py index 61bb01c78e..0c08f018b2 100644 --- a/src/pyhf/schema/__init__.py +++ b/src/pyhf/schema/__init__.py @@ -1,6 +1,7 @@ """ See :class:`~pyhf.schema.Schema` for documentation. """ + import pathlib import sys from pyhf.schema.loader import load_schema diff --git a/src/pyhf/schema/validator.py b/src/pyhf/schema/validator.py index 1fbc36c686..2540a3d002 100644 --- a/src/pyhf/schema/validator.py +++ b/src/pyhf/schema/validator.py @@ -1,4 +1,5 @@ import numbers +from pathlib import Path from typing import Mapping, Union import jsonschema @@ -70,12 +71,15 @@ def validate( version = version or variables.SCHEMA_VERSION - schema = load_schema(f'{version}/{schema_name}') + schema = load_schema(str(Path(version).joinpath(schema_name))) - # note: trailing slash needed for RefResolver to resolve correctly + # note: trailing slash needed for RefResolver to resolve correctly and by + # design, pathlib strips trailing slashes. See ref below: + # * https://bugs.python.org/issue21039 + # * https://github.com/python/cpython/issues/65238 resolver = jsonschema.RefResolver( - base_uri=f"file://{variables.schemas}/{version}/", - referrer=f"{schema_name}", + base_uri=f"{Path(variables.schemas).joinpath(version).as_uri()}/", + referrer=schema_name, store=variables.SCHEMA_CACHE, ) diff --git a/src/pyhf/tensor/jax_backend.py b/src/pyhf/tensor/jax_backend.py index cb1c5291f9..5fabe4c2fc 100644 --- a/src/pyhf/tensor/jax_backend.py +++ b/src/pyhf/tensor/jax_backend.py @@ -1,4 +1,4 @@ -from jax.config import config +from jax import config config.update('jax_enable_x64', True) diff --git a/src/pyhf/tensor/numpy_backend.py b/src/pyhf/tensor/numpy_backend.py index 8c540b47f3..ec33392e3b 100644 --- a/src/pyhf/tensor/numpy_backend.py +++ b/src/pyhf/tensor/numpy_backend.py @@ -1,4 +1,5 @@ """NumPy Tensor Library Module.""" + from __future__ import annotations import logging diff --git a/src/pyhf/tensor/pytorch_backend.py b/src/pyhf/tensor/pytorch_backend.py index f597237093..c4cc80032a 100644 --- a/src/pyhf/tensor/pytorch_backend.py +++ b/src/pyhf/tensor/pytorch_backend.py @@ -1,4 +1,5 @@ """PyTorch Tensor Library Module.""" + import torch import torch.autograd from torch.func import hessian diff --git a/src/pyhf/tensor/tensorflow_backend.py b/src/pyhf/tensor/tensorflow_backend.py index ce3030b7f9..72b7dc224e 100644 --- a/src/pyhf/tensor/tensorflow_backend.py +++ b/src/pyhf/tensor/tensorflow_backend.py @@ -1,4 +1,5 @@ """Tensorflow Tensor Library Module.""" + import logging import tensorflow as tf import tensorflow_probability as tfp diff --git a/src/pyhf/typing.py b/src/pyhf/typing.py index f012e3af22..19802e3da7 100644 --- a/src/pyhf/typing.py +++ b/src/pyhf/typing.py @@ -150,8 +150,7 @@ class TensorBackend(Protocol): precision: str default_do_grad: bool - def _setup(self) -> None: - ... + def _setup(self) -> None: ... class Optimizer(Protocol): @@ -159,8 +158,6 @@ class Optimizer(Protocol): class PDF(Protocol): - def sample(self, sample_shape: Shape) -> Any: - ... + def sample(self, sample_shape: Shape) -> Any: ... - def log_prob(self, value: Any) -> Any: - ... + def log_prob(self, value: Any) -> Any: ... diff --git a/src/pyhf/utils.py b/src/pyhf/utils.py index 45f3d89f3e..c9ad5d0185 100644 --- a/src/pyhf/utils.py +++ b/src/pyhf/utils.py @@ -111,7 +111,7 @@ def citation(oneline=False): >>> import pyhf >>> pyhf.utils.citation(oneline=True) - '@software{pyhf, author = {Lukas Heinrich and Matthew Feickert and Giordon Stark}, title = "{pyhf: v0.7.4}", version = {0.7.4}, doi = {10.5281/zenodo.1169739}, url = {https://doi.org/10.5281/zenodo.1169739}, note = {https://github.com/scikit-hep/pyhf/releases/tag/v0.7.4}}@article{pyhf_joss, doi = {10.21105/joss.02823}, url = {https://doi.org/10.21105/joss.02823}, year = {2021}, publisher = {The Open Journal}, volume = {6}, number = {58}, pages = {2823}, author = {Lukas Heinrich and Matthew Feickert and Giordon Stark and Kyle Cranmer}, title = {pyhf: pure-Python implementation of HistFactory statistical models}, journal = {Journal of Open Source Software}}' + '@software{pyhf, author = {Lukas Heinrich and Matthew Feickert and Giordon Stark}, title = "{pyhf: v0.7.6}", version = {0.7.6}, doi = {10.5281/zenodo.1169739}, url = {https://doi.org/10.5281/zenodo.1169739}, note = {https://github.com/scikit-hep/pyhf/releases/tag/v0.7.6}}@article{pyhf_joss, doi = {10.21105/joss.02823}, url = {https://doi.org/10.21105/joss.02823}, year = {2021}, publisher = {The Open Journal}, volume = {6}, number = {58}, pages = {2823}, author = {Lukas Heinrich and Matthew Feickert and Giordon Stark and Kyle Cranmer}, title = {pyhf: pure-Python implementation of HistFactory statistical models}, journal = {Journal of Open Source Software}}' Keyword Args: oneline (:obj:`bool`): Whether to provide citation with new lines (default) or as a one-liner. diff --git a/src/pyhf/workspace.py b/src/pyhf/workspace.py index 00abcf77f4..08b285d25e 100644 --- a/src/pyhf/workspace.py +++ b/src/pyhf/workspace.py @@ -5,6 +5,7 @@ * the observed data (optional) * fit configurations ("measurements") """ + from __future__ import annotations import collections @@ -17,6 +18,8 @@ from pyhf import exceptions, schema from pyhf.mixins import _ChannelSummaryMixin from pyhf.pdf import Model +import functools +import operator log = logging.getLogger(__name__) @@ -464,8 +467,8 @@ def data(self, model, include_auxdata=True): """ try: - observed_data = sum( - (self.observations[c] for c in model.config.channels), [] + observed_data = functools.reduce( + operator.iadd, (self.observations[c] for c in model.config.channels), [] ) except KeyError: log.error( @@ -706,7 +709,9 @@ def rename(self, modifiers=None, samples=None, channels=None, measurements=None) ) @classmethod - def combine(cls, left, right, join='none', merge_channels=False): + def combine( + cls, left, right, join='none', merge_channels=False, validate: bool = True + ): """ Return a new workspace specification that is the combination of the two workspaces. @@ -733,6 +738,7 @@ def combine(cls, left, right, join='none', merge_channels=False): right (~pyhf.workspace.Workspace): Another workspace join (:obj:`str`): How to join the two workspaces. Pick from "none", "outer", "left outer", or "right outer". merge_channels (:obj:`bool`): Whether or not to merge channels when performing the combine. This is only done with "outer", "left outer", and "right outer" options. + validate (:obj:`bool`): Whether to validate against a JSON schema. Returns: ~pyhf.workspace.Workspace: A new combined workspace object @@ -770,7 +776,7 @@ def combine(cls, left, right, join='none', merge_channels=False): 'observations': new_observations, 'version': new_version, } - return cls(newspec) + return cls(newspec, validate=validate) @classmethod def sorted(cls, workspace): diff --git a/src/pyhf/writexml.py b/src/pyhf/writexml.py index 8d3ecd3ca3..ff7aac800b 100644 --- a/src/pyhf/writexml.py +++ b/src/pyhf/writexml.py @@ -56,7 +56,7 @@ def _export_root_histogram(hist_name, data): # https://stackoverflow.com/a/4590052 def indent(elem, level=0): i = "\n" + level * " " - if elem: + if elem is not None: if not elem.text or not elem.text.strip(): elem.text = i + " " if not elem.tail or not elem.tail.strip(): diff --git a/tbump.toml b/tbump.toml index 34a0361310..edc6fa00e6 100644 --- a/tbump.toml +++ b/tbump.toml @@ -1,7 +1,7 @@ github_url = "https://github.com/scikit-hep/pyhf/" [version] -current = "0.7.4" +current = "0.7.6" # Example of a semver regexp. # Make sure this matches current_version before @@ -19,7 +19,7 @@ regex = ''' [git] # The current version will get updated when tbump is run -message_template = "Bump version: 0.7.4 → {new_version}" +message_template = "Bump version: 0.7.6 → {new_version}" tag_template = "v{new_version}" # For each file to patch, add a [[file]] config diff --git a/tests/conftest.py b/tests/conftest.py index 2f969e3444..ad2d9d7cba 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -167,6 +167,6 @@ def datadir(tmp_path, request): dir_util.copy_tree(test_dir, str(tmp_path)) # shutil is nicer, but doesn't work: https://bugs.python.org/issue20849 # Once pyhf is Python 3.8+ only then the below can be used. - # shutil.copytree(test_dir, tmpdir) + # shutil.copytree(test_dir, tmp_path) return tmp_path diff --git a/tests/contrib/test_contrib_utils.py b/tests/contrib/test_contrib_utils.py index 5a0b69261b..4af73adf2d 100644 --- a/tests/contrib/test_contrib_utils.py +++ b/tests/contrib/test_contrib_utils.py @@ -1,6 +1,5 @@ import tarfile import zipfile -from pathlib import Path from shutil import rmtree import pytest @@ -10,70 +9,69 @@ @pytest.fixture(scope="function") -def tarfile_path(tmpdir): - with open( - tmpdir.join("test_file.txt").strpath, "w", encoding="utf-8" - ) as write_file: +def tarfile_path(tmp_path): + with open(tmp_path.joinpath("test_file.txt"), "w", encoding="utf-8") as write_file: write_file.write("test file") with tarfile.open( - tmpdir.join("test_tar.tar.gz").strpath, mode="w:gz", encoding="utf-8" + tmp_path.joinpath("test_tar.tar.gz"), mode="w:gz", encoding="utf-8" ) as archive: - archive.add(tmpdir.join("test_file.txt").strpath) - return Path(tmpdir.join("test_tar.tar.gz").strpath) + archive.add(tmp_path.joinpath("test_file.txt")) + return tmp_path.joinpath("test_tar.tar.gz") @pytest.fixture(scope="function") -def tarfile_uncompressed_path(tmpdir): - with open( - tmpdir.join("test_file.txt").strpath, "w", encoding="utf-8" - ) as write_file: +def tarfile_uncompressed_path(tmp_path): + with open(tmp_path.joinpath("test_file.txt"), "w", encoding="utf-8") as write_file: write_file.write("test file") with tarfile.open( - tmpdir.join("test_tar.tar").strpath, mode="w", encoding="utf-8" + tmp_path.joinpath("test_tar.tar"), mode="w", encoding="utf-8" ) as archive: - archive.add(tmpdir.join("test_file.txt").strpath) - return Path(tmpdir.join("test_tar.tar").strpath) + archive.add(tmp_path.joinpath("test_file.txt")) + return tmp_path.joinpath("test_tar.tar") @pytest.fixture(scope="function") -def zipfile_path(tmpdir): - with open( - tmpdir.join("test_file.txt").strpath, "w", encoding="utf-8" - ) as write_file: +def zipfile_path(tmp_path): + with open(tmp_path.joinpath("test_file.txt"), "w", encoding="utf-8") as write_file: write_file.write("test file") - with zipfile.ZipFile(tmpdir.join("test_zip.zip").strpath, "w") as archive: - archive.write(tmpdir.join("test_file.txt").strpath) - return Path(tmpdir.join("test_zip.zip").strpath) + with zipfile.ZipFile(tmp_path.joinpath("test_zip.zip"), "w") as archive: + archive.write(tmp_path.joinpath("test_file.txt")) + return tmp_path.joinpath("test_zip.zip") -def test_download_untrusted_archive_host(tmpdir, requests_mock): +def test_download_untrusted_archive_host(tmp_path, requests_mock): archive_url = "https://www.pyhfthisdoesnotexist.org" requests_mock.get(archive_url) with pytest.raises(InvalidArchiveHost): - download(archive_url, tmpdir.join("likelihoods").strpath) + download(archive_url, tmp_path.joinpath("likelihoods")) -def test_download_invalid_archive(tmpdir, requests_mock): +def test_download_invalid_archive(tmp_path, requests_mock): archive_url = "https://www.hepdata.net/record/resource/1408476?view=true" requests_mock.get(archive_url, status_code=404) with pytest.raises(InvalidArchive): - download(archive_url, tmpdir.join("likelihoods").strpath) + download(archive_url, tmp_path.joinpath("likelihoods")) -def test_download_compress(tmpdir, requests_mock): +def test_download_compress(tmp_path, requests_mock): archive_url = "https://www.hepdata.net/record/resource/1408476?view=true" requests_mock.get(archive_url) - download(archive_url, tmpdir.join("likelihoods").strpath, compress=True) + download(archive_url, tmp_path.joinpath("likelihoods"), compress=True) def test_download_archive_type( - tmpdir, mocker, requests_mock, tarfile_path, tarfile_uncompressed_path, zipfile_path + tmp_path, + mocker, + requests_mock, + tarfile_path, + tarfile_uncompressed_path, + zipfile_path, ): archive_url = "https://www.hepdata.net/record/resource/1408476?view=true" - output_directory = tmpdir.join("likelihoods").strpath + output_directory = tmp_path.joinpath("likelihoods") # Give BytesIO a tarfile requests_mock.get(archive_url, content=open(tarfile_path, "rb").read()) download(archive_url, output_directory) @@ -86,7 +84,7 @@ def test_download_archive_type( requests_mock.get(archive_url, content=open(zipfile_path, "rb").read()) # Run without and with existing output_directory to cover both # cases of the shutil.rmtree logic - rmtree(Path(output_directory)) + rmtree(output_directory) download(archive_url, output_directory) # without download(archive_url, output_directory) # with @@ -97,13 +95,13 @@ def test_download_archive_type( download(archive_url, output_directory) -def test_download_archive_force(tmpdir, requests_mock, tarfile_path): +def test_download_archive_force(tmp_path, requests_mock, tarfile_path): archive_url = "https://www.cern.ch/record/resource/123456789" requests_mock.get( archive_url, content=open(tarfile_path, "rb").read(), status_code=200 ) with pytest.raises(InvalidArchiveHost): - download(archive_url, tmpdir.join("likelihoods").strpath, force=False) + download(archive_url, tmp_path.joinpath("likelihoods"), force=False) - download(archive_url, tmpdir.join("likelihoods").strpath, force=True) + download(archive_url, tmp_path.joinpath("likelihoods"), force=True) diff --git a/tests/test_calculator.py b/tests/test_calculator.py index 97b5fa4e3d..973d1e27bc 100644 --- a/tests/test_calculator.py +++ b/tests/test_calculator.py @@ -84,6 +84,7 @@ def test_asymptotic_calculator_has_fitted_pars(test_stat): assert pytest.approx([1.0, 1.0], rel=rtol) == pyhf.tensorlib.tolist( fitted_pars.free_fit_to_data ) + # lower tolerance for amd64 and arm64 to agree assert pytest.approx( - [7.6470499e-05, 1.4997178], rel=rtol + [7.6470499e-05, 1.4997178], rel=1e-3 ) == pyhf.tensorlib.tolist(fitted_pars.free_fit_to_asimov) diff --git a/tests/test_examples.py b/tests/test_examples.py index 9d4c2a1e1c..fa545726ec 100644 --- a/tests/test_examples.py +++ b/tests/test_examples.py @@ -1,7 +1,7 @@ import shlex -def test_2bin_1channel(tmpdir, script_runner): +def test_2bin_1channel(tmp_path, script_runner): command = f"pyhf inspect {'docs/examples/json/2-bin_1-channel.json':s}" ret = script_runner.run(shlex.split(command)) assert ret.success diff --git a/tests/test_export.py b/tests/test_export.py index bba0aa224e..5c1ebed6e2 100644 --- a/tests/test_export.py +++ b/tests/test_export.py @@ -447,7 +447,7 @@ def test_integer_data(datadir, mocker): mocker.patch("pyhf.writexml._ROOT_DATA_FILE") channel = pyhf.writexml.build_channel(spec, channel_spec, {}) - assert channel + assert channel is not None @pytest.mark.parametrize( diff --git a/tests/test_infer.py b/tests/test_infer.py index f6a5bc6e92..0ccd072b94 100644 --- a/tests/test_infer.py +++ b/tests/test_infer.py @@ -23,7 +23,7 @@ def check_uniform_type(in_list): ) -def test_toms748_scan(tmpdir, hypotest_args): +def test_toms748_scan(tmp_path, hypotest_args): """ Test the upper limit toms748 scan returns the correct structure and values """ @@ -166,7 +166,7 @@ def test_upper_limit_with_kwargs(hypotest_args): ) -def test_mle_fit_default(tmpdir, hypotest_args): +def test_mle_fit_default(tmp_path, hypotest_args): """ Check that the default return structure of pyhf.infer.mle.fit is as expected """ @@ -180,7 +180,7 @@ def test_mle_fit_default(tmpdir, hypotest_args): assert pyhf.tensorlib.shape(result) == (model.config.npars,) -def test_mle_fit_return_fitted_val(tmpdir, hypotest_args): +def test_mle_fit_return_fitted_val(tmp_path, hypotest_args): """ Check that the return structure of pyhf.infer.mle.fit with the return_fitted_val keyword arg is as expected @@ -196,7 +196,7 @@ def test_mle_fit_return_fitted_val(tmpdir, hypotest_args): assert pyhf.tensorlib.shape(result[1]) == () -def test_hypotest_default(tmpdir, hypotest_args): +def test_hypotest_default(tmp_path, hypotest_args): """ Check that the default return structure of pyhf.infer.hypotest is as expected """ @@ -209,7 +209,7 @@ def test_hypotest_default(tmpdir, hypotest_args): assert isinstance(result, type(tb.astensor(result))) -def test_hypotest_poi_outofbounds(tmpdir, hypotest_args): +def test_hypotest_poi_outofbounds(tmp_path, hypotest_args): """ Check that the fit errors for POI outside of parameter bounds """ @@ -226,7 +226,7 @@ def test_hypotest_poi_outofbounds(tmpdir, hypotest_args): @pytest.mark.parametrize('test_stat', ['q0', 'q', 'qtilde']) -def test_hypotest_return_tail_probs(tmpdir, hypotest_args, test_stat): +def test_hypotest_return_tail_probs(tmp_path, hypotest_args, test_stat): """ Check that the return structure of pyhf.infer.hypotest with the return_tail_probs keyword arg is as expected @@ -243,7 +243,7 @@ def test_hypotest_return_tail_probs(tmpdir, hypotest_args, test_stat): @pytest.mark.parametrize('test_stat', ['q0', 'q', 'qtilde']) -def test_hypotest_return_expected(tmpdir, hypotest_args, test_stat): +def test_hypotest_return_expected(tmp_path, hypotest_args, test_stat): """ Check that the return structure of pyhf.infer.hypotest with the addition of the return_expected keyword arg is as expected @@ -265,7 +265,7 @@ def test_hypotest_return_expected(tmpdir, hypotest_args, test_stat): @pytest.mark.parametrize('test_stat', ['q0', 'q', 'qtilde']) -def test_hypotest_return_expected_set(tmpdir, hypotest_args, test_stat): +def test_hypotest_return_expected_set(tmp_path, hypotest_args, test_stat): """ Check that the return structure of pyhf.infer.hypotest with the addition of the return_expected_set keyword arg is as expected @@ -300,7 +300,7 @@ def test_hypotest_return_expected_set(tmpdir, hypotest_args, test_stat): @pytest.mark.parametrize('return_expected', [True, False]) @pytest.mark.parametrize('return_expected_set', [True, False]) def test_hypotest_return_calculator( - tmpdir, + tmp_path, hypotest_args, calctype, kwargs, @@ -491,7 +491,7 @@ def test_significance_to_pvalue_roundtrip(backend): assert np.allclose(sigma, back_to_sigma, atol=0, rtol=rtol) -def test_emperical_distribution(tmpdir, hypotest_args): +def test_emperical_distribution(tmp_path, hypotest_args): """ Check that the empirical distribution of the test statistic gives expected results @@ -537,7 +537,7 @@ def test_emperical_distribution(tmpdir, hypotest_args): ) -def test_toy_calculator(tmpdir, hypotest_args): +def test_toy_calculator(tmp_path, hypotest_args): """ Check that the toy calculator is performing as expected """ diff --git a/tests/test_interpolate.py b/tests/test_interpolate.py index ef8e382b66..52b5830f12 100644 --- a/tests/test_interpolate.py +++ b/tests/test_interpolate.py @@ -178,9 +178,7 @@ def test_code0_validation(backend, do_tensorized_calc): def test_code1_validation(backend, do_tensorized_calc): histogramssets = [[[[0.9], [1.0], [1.1]]]] alphasets = pyhf.tensorlib.astensor([[-2, -1, 0, 1, 2]]) - expected = pyhf.tensorlib.astensor( - [[[[0.9**2], [0.9], [1.0], [1.1], [1.1**2]]]] - ) + expected = pyhf.tensorlib.astensor([[[[0.9**2], [0.9], [1.0], [1.1], [1.1**2]]]]) interpolator = pyhf.interpolators.get(1, do_tensorized_calc=do_tensorized_calc)( histogramssets, subscribe=False diff --git a/tests/test_notebooks.py b/tests/test_notebooks.py index 07b978c2ff..bc005f2201 100644 --- a/tests/test_notebooks.py +++ b/tests/test_notebooks.py @@ -11,8 +11,8 @@ @pytest.fixture() -def common_kwargs(tmpdir): - outputnb = tmpdir.join('output.ipynb') +def common_kwargs(tmp_path): + outputnb = tmp_path.joinpath('output.ipynb') return { 'output_path': str(outputnb), 'kernel_name': f'python{sys.version_info.major}', diff --git a/tests/test_scripts.py b/tests/test_scripts.py index 0dd88e9b8a..9d00814c8e 100644 --- a/tests/test_scripts.py +++ b/tests/test_scripts.py @@ -5,7 +5,6 @@ import tarfile import time from importlib import import_module, reload -from pathlib import Path from unittest import mock import pytest @@ -15,16 +14,14 @@ @pytest.fixture(scope="function") -def tarfile_path(tmpdir): - with open( - tmpdir.join("test_file.txt").strpath, "w", encoding="utf-8" - ) as write_file: +def tarfile_path(tmp_path): + with open(tmp_path.joinpath("test_file.txt"), "w", encoding="utf-8") as write_file: write_file.write("test file") with tarfile.open( - tmpdir.join("test_tar.tar.gz").strpath, mode="w:gz", encoding="utf-8" + tmp_path.joinpath("test_tar.tar.gz"), mode="w:gz", encoding="utf-8" ) as archive: - archive.add(tmpdir.join("test_file.txt").strpath) - return Path(tmpdir.join("test_tar.tar.gz").strpath) + archive.add(tmp_path.joinpath("test_file.txt")) + return tmp_path.joinpath("test_tar.tar.gz") def test_version(script_runner): @@ -57,29 +54,29 @@ def test_citation(script_runner, flag): # see test_import.py for the same (detailed) test -def test_import_prepHistFactory(tmpdir, script_runner): - temp = tmpdir.join("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s} --hide-progress' +def test_import_prepHistFactory(tmp_path, script_runner): + temp = tmp_path.joinpath("parsed_output.json") + command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp} --hide-progress' ret = script_runner.run(shlex.split(command)) assert ret.success assert ret.stdout == '' assert ret.stderr == '' - parsed_xml = json.loads(temp.read()) + parsed_xml = json.loads(temp.read_text()) spec = {'channels': parsed_xml['channels']} pyhf.schema.validate(spec, 'model.json') -def test_import_prepHistFactory_withProgress(tmpdir, script_runner): - temp = tmpdir.join("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}' +def test_import_prepHistFactory_withProgress(tmp_path, script_runner): + temp = tmp_path.joinpath("parsed_output.json") + command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp}' ret = script_runner.run(shlex.split(command)) assert ret.success assert ret.stdout == '' assert ret.stderr != '' -def test_import_prepHistFactory_stdout(tmpdir, script_runner): +def test_import_prepHistFactory_stdout(tmp_path, script_runner): command = 'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/' ret = script_runner.run(shlex.split(command)) assert ret.success @@ -89,12 +86,12 @@ def test_import_prepHistFactory_stdout(tmpdir, script_runner): assert d -def test_import_prepHistFactory_and_fit(tmpdir, script_runner): - temp = tmpdir.join("parsed_output.json") - command = f"pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}" +def test_import_prepHistFactory_and_fit(tmp_path, script_runner): + temp = tmp_path.joinpath("parsed_output.json") + command = f"pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp}" ret = script_runner.run(shlex.split(command)) - command = f"pyhf fit {temp.strpath:s}" + command = f"pyhf fit {temp}" ret = script_runner.run(shlex.split(command)) assert ret.success @@ -109,7 +106,7 @@ def test_import_prepHistFactory_and_fit(tmpdir, script_runner): "LogNormExample", "ConstExample", ]: - command = f"pyhf fit {temp.strpath:s} --value --measurement {measurement:s}" + command = f"pyhf fit {temp} --value --measurement {measurement:s}" ret = script_runner.run(shlex.split(command)) assert ret.success @@ -118,22 +115,22 @@ def test_import_prepHistFactory_and_fit(tmpdir, script_runner): assert "mle_parameters" in ret_json assert "twice_nll" in ret_json - tmp_out = tmpdir.join(f"{measurement:s}_output.json") + tmp_out = tmp_path.joinpath(f"{measurement:s}_output.json") # make sure output file works too - command += f" --output-file {tmp_out.strpath:s}" + command += f" --output-file {tmp_out}" ret = script_runner.run(shlex.split(command)) assert ret.success - ret_json = json.load(tmp_out) + ret_json = json.load(tmp_out.open()) assert "mle_parameters" in ret_json assert "twice_nll" in ret_json -def test_import_prepHistFactory_and_cls(tmpdir, script_runner): - temp = tmpdir.join("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}' +def test_import_prepHistFactory_and_cls(tmp_path, script_runner): + temp = tmp_path.joinpath("parsed_output.json") + command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp}' ret = script_runner.run(shlex.split(command)) - command = f'pyhf cls {temp.strpath:s}' + command = f'pyhf cls {temp}' ret = script_runner.run(shlex.split(command)) assert ret.success @@ -148,7 +145,7 @@ def test_import_prepHistFactory_and_cls(tmpdir, script_runner): 'LogNormExample', 'ConstExample', ]: - command = f'pyhf cls {temp.strpath:s} --measurement {measurement:s}' + command = f'pyhf cls {temp} --measurement {measurement:s}' ret = script_runner.run(shlex.split(command)) assert ret.success @@ -157,37 +154,37 @@ def test_import_prepHistFactory_and_cls(tmpdir, script_runner): assert 'CLs_obs' in d assert 'CLs_exp' in d - tmp_out = tmpdir.join(f'{measurement:s}_output.json') + tmp_out = tmp_path.joinpath(f'{measurement:s}_output.json') # make sure output file works too - command += f' --output-file {tmp_out.strpath:s}' + command += f' --output-file {tmp_out}' ret = script_runner.run(shlex.split(command)) assert ret.success - d = json.load(tmp_out) + d = json.load(tmp_out.open()) assert 'CLs_obs' in d assert 'CLs_exp' in d -def test_import_usingMounts(datadir, tmpdir, script_runner): +def test_import_usingMounts(datadir, tmp_path, script_runner): data = datadir.joinpath("xmlimport_absolutePaths") - temp = tmpdir.join("parsed_output.json") - command = f'pyhf xml2json --hide-progress -v {data}:/absolute/path/to -v {data}:/another/absolute/path/to --output-file {temp.strpath:s} {data.joinpath("config/example.xml")}' + temp = tmp_path.joinpath("parsed_output.json") + command = f'pyhf xml2json --hide-progress -v {data}:/absolute/path/to -v {data}:/another/absolute/path/to --output-file {temp} {data.joinpath("config/example.xml")}' ret = script_runner.run(shlex.split(command)) assert ret.success assert ret.stdout == '' assert ret.stderr == '' - parsed_xml = json.loads(temp.read()) + parsed_xml = json.loads(temp.read_text()) spec = {'channels': parsed_xml['channels']} pyhf.schema.validate(spec, 'model.json') -def test_import_usingMounts_badDelimitedPaths(datadir, tmpdir, script_runner): +def test_import_usingMounts_badDelimitedPaths(datadir, tmp_path, script_runner): data = datadir.joinpath("xmlimport_absolutePaths") - temp = tmpdir.join("parsed_output.json") - command = f'pyhf xml2json --hide-progress -v {data}::/absolute/path/to -v {data}/another/absolute/path/to --output-file {temp.strpath:s} {data.joinpath("config/example.xml")}' + temp = tmp_path.joinpath("parsed_output.json") + command = f'pyhf xml2json --hide-progress -v {data}::/absolute/path/to -v {data}/another/absolute/path/to --output-file {temp} {data.joinpath("config/example.xml")}' ret = script_runner.run(shlex.split(command)) assert not ret.success @@ -196,12 +193,12 @@ def test_import_usingMounts_badDelimitedPaths(datadir, tmpdir, script_runner): @pytest.mark.parametrize("backend", ["numpy", "tensorflow", "pytorch", "jax"]) -def test_fit_backend_option(tmpdir, script_runner, backend): - temp = tmpdir.join("parsed_output.json") - command = f"pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}" +def test_fit_backend_option(tmp_path, script_runner, backend): + temp = tmp_path.joinpath("parsed_output.json") + command = f"pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp}" ret = script_runner.run(shlex.split(command)) - command = f"pyhf fit --backend {backend:s} {temp.strpath:s}" + command = f"pyhf fit --backend {backend:s} {temp}" ret = script_runner.run(shlex.split(command)) assert ret.success @@ -211,12 +208,12 @@ def test_fit_backend_option(tmpdir, script_runner, backend): @pytest.mark.parametrize("backend", ["numpy", "tensorflow", "pytorch", "jax"]) -def test_cls_backend_option(tmpdir, script_runner, backend): - temp = tmpdir.join("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}' +def test_cls_backend_option(tmp_path, script_runner, backend): + temp = tmp_path.joinpath("parsed_output.json") + command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp}' ret = script_runner.run(shlex.split(command)) - command = f'pyhf cls --backend {backend:s} {temp.strpath:s}' + command = f'pyhf cls --backend {backend:s} {temp}' ret = script_runner.run(shlex.split(command)) assert ret.success @@ -226,86 +223,98 @@ def test_cls_backend_option(tmpdir, script_runner, backend): assert 'CLs_exp' in d -def test_import_and_export(tmpdir, script_runner): - temp = tmpdir.join("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}' +def test_import_and_export(tmp_path, script_runner): + temp = tmp_path.joinpath("parsed_output.json") + command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp}' ret = script_runner.run(shlex.split(command)) - command = f"pyhf json2xml {temp.strpath:s} --output-dir {tmpdir.mkdir('output').strpath:s}" + output_dir_path = tmp_path / "output" + output_dir_path.mkdir() + + command = f"pyhf json2xml {temp} --output-dir {output_dir_path}" ret = script_runner.run(shlex.split(command)) assert ret.success -def test_patch(tmpdir, script_runner): - patch = tmpdir.join('patch.json') +def test_patch(tmp_path, script_runner): + patch = tmp_path.joinpath('patch.json') - patch.write( + patch.write_text( ''' [{"op": "replace", "path": "/channels/0/samples/0/data", "value": [5,6]}] ''' ) - temp = tmpdir.join("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}' + temp = tmp_path.joinpath("parsed_output.json") + command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp}' ret = script_runner.run(shlex.split(command)) - command = f'pyhf cls {temp.strpath:s} --patch {patch.strpath:s}' + command = f'pyhf cls {temp} --patch {patch}' ret = script_runner.run(shlex.split(command)) assert ret.success - command = f"pyhf json2xml {temp.strpath:s} --output-dir {tmpdir.mkdir('output_1').strpath:s} --patch {patch.strpath:s}" + output_dir_path = tmp_path / "output_1" + output_dir_path.mkdir(exist_ok=True) + + command = f"pyhf json2xml {temp} --output-dir {output_dir_path} --patch {patch}" ret = script_runner.run(shlex.split(command)) assert ret.success - command = f'pyhf cls {temp.strpath:s} --patch -' + command = f'pyhf cls {temp} --patch -' - ret = script_runner.run(shlex.split(command), stdin=patch) + ret = script_runner.run(shlex.split(command), stdin=patch.open()) assert ret.success - command = f"pyhf json2xml {temp.strpath:s} --output-dir {tmpdir.mkdir('output_2').strpath:s} --patch -" - ret = script_runner.run(shlex.split(command), stdin=patch) + output_dir_path = tmp_path / "output_2" + output_dir_path.mkdir(exist_ok=True) + + command = f"pyhf json2xml {temp} --output-dir {output_dir_path} --patch -" + ret = script_runner.run(shlex.split(command), stdin=patch.open()) assert ret.success -def test_patch_fail(tmpdir, script_runner): - patch = tmpdir.join('patch.json') +def test_patch_fail(tmp_path, script_runner): + patch = tmp_path.joinpath('patch.json') - patch.write('''not,json''') + patch.write_text('''not,json''') - temp = tmpdir.join("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}' + temp = tmp_path.joinpath("parsed_output.json") + command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp}' ret = script_runner.run(shlex.split(command)) - command = f'pyhf cls {temp.strpath:s} --patch {patch.strpath:s}' + command = f'pyhf cls {temp} --patch {patch}' ret = script_runner.run(shlex.split(command)) assert not ret.success - command = f"pyhf json2xml {temp.strpath:s} --output-dir {tmpdir.mkdir('output').strpath:s} --patch {patch.strpath:s}" + output_dir_path = tmp_path / "output" + output_dir_path.mkdir() + + command = f"pyhf json2xml {temp} --output-dir {output_dir_path} --patch {patch}" ret = script_runner.run(shlex.split(command)) assert not ret.success -def test_bad_measurement_name(tmpdir, script_runner): - temp = tmpdir.join("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}' +def test_bad_measurement_name(tmp_path, script_runner): + temp = tmp_path.joinpath("parsed_output.json") + command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp}' ret = script_runner.run(shlex.split(command)) - command = f'pyhf cls {temp.strpath:s} --measurement "a-fake-measurement-name"' + command = f'pyhf cls {temp} --measurement "a-fake-measurement-name"' ret = script_runner.run(shlex.split(command)) assert not ret.success # assert 'no measurement by name' in ret.stderr # numpy swallows the log.error() here, dunno why -def test_testpoi(tmpdir, script_runner): - temp = tmpdir.join("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}' +def test_testpoi(tmp_path, script_runner): + temp = tmp_path.joinpath("parsed_output.json") + command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp}' ret = script_runner.run(shlex.split(command)) pois = [1.0, 0.5, 0.001] results_exp = [] results_obs = [] for test_poi in pois: - command = f'pyhf cls {temp.strpath:s} --test-poi {test_poi:f}' + command = f'pyhf cls {temp} --test-poi {test_poi:f}' ret = script_runner.run(shlex.split(command)) assert ret.success @@ -331,13 +340,13 @@ def test_testpoi(tmpdir, script_runner): @pytest.mark.parametrize( "opts,success", [(["maxiter=1000"], True), (["maxiter=1"], False)] ) -def test_fit_optimizer(tmpdir, script_runner, optimizer, opts, success): - temp = tmpdir.join("parsed_output.json") - command = f"pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}" +def test_fit_optimizer(tmp_path, script_runner, optimizer, opts, success): + temp = tmp_path.joinpath("parsed_output.json") + command = f"pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp}" ret = script_runner.run(shlex.split(command)) optconf = " ".join(f"--optconf {opt}" for opt in opts) - command = f"pyhf fit --optimizer {optimizer} {optconf} {temp.strpath}" + command = f"pyhf fit --optimizer {optimizer} {optconf} {temp}" ret = script_runner.run(shlex.split(command)) assert ret.success == success @@ -347,39 +356,39 @@ def test_fit_optimizer(tmpdir, script_runner, optimizer, opts, success): @pytest.mark.parametrize( 'opts,success', [(['maxiter=1000'], True), (['maxiter=1'], False)] ) -def test_cls_optimizer(tmpdir, script_runner, optimizer, opts, success): - temp = tmpdir.join("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}' +def test_cls_optimizer(tmp_path, script_runner, optimizer, opts, success): + temp = tmp_path.joinpath("parsed_output.json") + command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp}' ret = script_runner.run(shlex.split(command)) optconf = " ".join(f"--optconf {opt}" for opt in opts) - command = f'pyhf cls {temp.strpath} --optimizer {optimizer} {optconf}' + command = f'pyhf cls {temp} --optimizer {optimizer} {optconf}' ret = script_runner.run(shlex.split(command)) assert ret.success == success -def test_inspect(tmpdir, script_runner): - temp = tmpdir.join("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s} --hide-progress' +def test_inspect(tmp_path, script_runner): + temp = tmp_path.joinpath("parsed_output.json") + command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp} --hide-progress' ret = script_runner.run(shlex.split(command)) - command = f'pyhf inspect {temp.strpath:s}' + command = f'pyhf inspect {temp}' ret = script_runner.run(shlex.split(command)) assert ret.success -def test_inspect_outfile(tmpdir, script_runner): - temp = tmpdir.join("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s} --hide-progress' +def test_inspect_outfile(tmp_path, script_runner): + temp = tmp_path.joinpath("parsed_output.json") + command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp} --hide-progress' ret = script_runner.run(shlex.split(command)) - tempout = tmpdir.join("inspect_output.json") - command = f'pyhf inspect {temp.strpath:s} --output-file {tempout.strpath:s}' + tempout = tmp_path.joinpath("inspect_output.json") + command = f'pyhf inspect {temp} --output-file {tempout}' ret = script_runner.run(shlex.split(command)) assert ret.success - summary = json.loads(tempout.read()) + summary = json.loads(tempout.read_text()) assert [ 'channels', 'measurements', @@ -396,65 +405,63 @@ def test_inspect_outfile(tmpdir, script_runner): assert len(summary['systematics']) == 6 -def test_prune(tmpdir, script_runner): - temp = tmpdir.join("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s} --hide-progress' +def test_prune(tmp_path, script_runner): + temp = tmp_path.joinpath("parsed_output.json") + command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp} --hide-progress' ret = script_runner.run(shlex.split(command)) - command = ( - f"pyhf prune -m staterror_channel1 --measurement GammaExample {temp.strpath:s}" - ) + command = f"pyhf prune -m staterror_channel1 --measurement GammaExample {temp}" ret = script_runner.run(shlex.split(command)) assert ret.success -def test_prune_outfile(tmpdir, script_runner): - temp = tmpdir.join("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s} --hide-progress' +def test_prune_outfile(tmp_path, script_runner): + temp = tmp_path.joinpath("parsed_output.json") + command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp} --hide-progress' ret = script_runner.run(shlex.split(command)) - tempout = tmpdir.join("prune_output.json") - command = f'pyhf prune -m staterror_channel1 --measurement GammaExample {temp.strpath:s} --output-file {tempout.strpath:s}' + tempout = tmp_path.joinpath("prune_output.json") + command = f'pyhf prune -m staterror_channel1 --measurement GammaExample {temp} --output-file {tempout}' ret = script_runner.run(shlex.split(command)) assert ret.success - spec = json.loads(temp.read()) + spec = json.loads(temp.read_text()) ws = pyhf.Workspace(spec) assert 'GammaExample' in ws.measurement_names assert 'staterror_channel1' in ws.model().config.parameters - pruned_spec = json.loads(tempout.read()) + pruned_spec = json.loads(tempout.read_text()) pruned_ws = pyhf.Workspace(pruned_spec) assert 'GammaExample' not in pruned_ws.measurement_names assert 'staterror_channel1' not in pruned_ws.model().config.parameters -def test_rename(tmpdir, script_runner): - temp = tmpdir.join("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s} --hide-progress' +def test_rename(tmp_path, script_runner): + temp = tmp_path.joinpath("parsed_output.json") + command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp} --hide-progress' ret = script_runner.run(shlex.split(command)) - command = f'pyhf rename -m staterror_channel1 staterror_channelone --measurement GammaExample GamEx {temp.strpath:s}' + command = f'pyhf rename -m staterror_channel1 staterror_channelone --measurement GammaExample GamEx {temp}' ret = script_runner.run(shlex.split(command)) assert ret.success -def test_rename_outfile(tmpdir, script_runner): - temp = tmpdir.join("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s} --hide-progress' +def test_rename_outfile(tmp_path, script_runner): + temp = tmp_path.joinpath("parsed_output.json") + command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp} --hide-progress' ret = script_runner.run(shlex.split(command)) - tempout = tmpdir.join("rename_output.json") - command = f'pyhf rename -m staterror_channel1 staterror_channelone --measurement GammaExample GamEx {temp.strpath:s} --output-file {tempout.strpath:s}' + tempout = tmp_path.joinpath("rename_output.json") + command = f'pyhf rename -m staterror_channel1 staterror_channelone --measurement GammaExample GamEx {temp} --output-file {tempout}' ret = script_runner.run(shlex.split(command)) assert ret.success - spec = json.loads(temp.read()) + spec = json.loads(temp.read_text()) ws = pyhf.Workspace(spec) assert 'GammaExample' in ws.measurement_names assert 'GamEx' not in ws.measurement_names assert 'staterror_channel1' in ws.model().config.parameters assert 'staterror_channelone' not in ws.model().config.parameters - renamed_spec = json.loads(tempout.read()) + renamed_spec = json.loads(tempout.read_text()) renamed_ws = pyhf.Workspace(renamed_spec) assert 'GammaExample' not in renamed_ws.measurement_names assert 'GamEx' in renamed_ws.measurement_names @@ -462,10 +469,10 @@ def test_rename_outfile(tmpdir, script_runner): assert 'staterror_channelone' in renamed_ws.model().config.parameters -def test_combine(tmpdir, script_runner): - temp_1 = tmpdir.join("parsed_output.json") - temp_2 = tmpdir.join("renamed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp_1.strpath:s} --hide-progress' +def test_combine(tmp_path, script_runner): + temp_1 = tmp_path.joinpath("parsed_output.json") + temp_2 = tmp_path.joinpath("renamed_output.json") + command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp_1} --hide-progress' ret = script_runner.run(shlex.split(command)) rename_channels = {'channel1': 'channel2'} @@ -482,18 +489,18 @@ def test_combine(tmpdir, script_runner): _opts_measurements = ''.join( ' --measurement ' + ' '.join(item) for item in rename_measurements.items() ) - command = f"pyhf rename {temp_1.strpath:s} {_opts_channels:s} {_opts_measurements:s} --output-file {temp_2.strpath:s}" + command = f"pyhf rename {temp_1} {_opts_channels:s} {_opts_measurements:s} --output-file {temp_2}" ret = script_runner.run(shlex.split(command)) - command = f'pyhf combine {temp_1.strpath:s} {temp_2.strpath:s}' + command = f'pyhf combine {temp_1} {temp_2}' ret = script_runner.run(shlex.split(command)) assert ret.success -def test_combine_outfile(tmpdir, script_runner): - temp_1 = tmpdir.join("parsed_output.json") - temp_2 = tmpdir.join("renamed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp_1.strpath:s} --hide-progress' +def test_combine_outfile(tmp_path, script_runner): + temp_1 = tmp_path.joinpath("parsed_output.json") + temp_2 = tmp_path.joinpath("renamed_output.json") + command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp_1} --hide-progress' ret = script_runner.run(shlex.split(command)) rename_channels = {'channel1': 'channel2'} @@ -510,35 +517,33 @@ def test_combine_outfile(tmpdir, script_runner): _opts_measurements = ''.join( ' --measurement ' + ' '.join(item) for item in rename_measurements.items() ) - command = f"pyhf rename {temp_1.strpath:s} {_opts_channels:s} {_opts_measurements:s} --output-file {temp_2.strpath:s}" + command = f"pyhf rename {temp_1} {_opts_channels:s} {_opts_measurements:s} --output-file {temp_2}" ret = script_runner.run(shlex.split(command)) - tempout = tmpdir.join("combined_output.json") - command = f'pyhf combine {temp_1.strpath:s} {temp_2.strpath:s} --output-file {tempout.strpath:s}' + tempout = tmp_path.joinpath("combined_output.json") + command = f'pyhf combine {temp_1} {temp_2} --output-file {tempout}' ret = script_runner.run(shlex.split(command)) assert ret.success - combined_spec = json.loads(tempout.read()) + combined_spec = json.loads(tempout.read_text()) combined_ws = pyhf.Workspace(combined_spec) assert combined_ws.channels == ['channel1', 'channel2'] assert len(combined_ws.measurement_names) == 8 -def test_combine_merge_channels(tmpdir, script_runner): - temp_1 = tmpdir.join("parsed_output.json") - temp_2 = tmpdir.join("renamed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp_1.strpath} --hide-progress' +def test_combine_merge_channels(tmp_path, script_runner): + temp_1 = tmp_path.joinpath("parsed_output.json") + temp_2 = tmp_path.joinpath("renamed_output.json") + command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp_1} --hide-progress' ret = script_runner.run(shlex.split(command)) assert ret.success - command = ( - f'pyhf prune {temp_1.strpath} --sample signal --output-file {temp_2.strpath}' - ) + command = f'pyhf prune {temp_1} --sample signal --output-file {temp_2}' ret = script_runner.run(shlex.split(command)) assert ret.success - command = f'pyhf combine --merge-channels --join "left outer" {temp_1.strpath} {temp_2.strpath}' + command = f'pyhf combine --merge-channels --join "left outer" {temp_1} {temp_2}' ret = script_runner.run(shlex.split(command)) assert ret.success @@ -547,17 +552,19 @@ def test_combine_merge_channels(tmpdir, script_runner): @pytest.mark.parametrize( 'algorithms', [['md5'], ['sha256'], ['sha256', 'md5'], ['sha256', 'md5']] ) -def test_workspace_digest(tmpdir, script_runner, algorithms, do_json): +def test_workspace_digest(tmp_path, script_runner, algorithms, do_json): results = { 'md5': '7de8930ff37e5a4f6a31da11bda7813f', 'sha256': '6d416ee67a40460499ea2ef596fb1e682a563d7df06e690018a211d35238aecc', } - temp = tmpdir.join("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath} --hide-progress' + temp = tmp_path.joinpath("parsed_output.json") + command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp} --hide-progress' ret = script_runner.run(shlex.split(command)) - command = f"pyhf digest {temp.strpath} -a {' -a '.join(algorithms)}{' -j' if do_json else ''}" + command = ( + f"pyhf digest {temp} -a {' -a '.join(algorithms)}{' -j' if do_json else ''}" + ) ret = script_runner.run(shlex.split(command)) assert ret.success assert all(algorithm in ret.stdout for algorithm in algorithms) @@ -588,21 +595,23 @@ def test_workspace_digest(tmpdir, script_runner, algorithms, do_json): "https://doi.org/10.17182/hepdata.89408.v1/r2", ], ) -def test_patchset_download(tmpdir, script_runner, requests_mock, tarfile_path, archive): +def test_patchset_download( + tmp_path, script_runner, requests_mock, tarfile_path, archive +): requests_mock.get(archive, content=open(tarfile_path, "rb").read()) - command = f'pyhf contrib download {archive} {tmpdir.join("likelihoods").strpath}' + command = f'pyhf contrib download {archive} {tmp_path.joinpath("likelihoods")}' ret = script_runner.run(shlex.split(command)) assert ret.success # Run with all optional flags - command = f'pyhf contrib download --verbose --force {archive} {tmpdir.join("likelihoods").strpath}' + command = f'pyhf contrib download --verbose --force {archive} {tmp_path.joinpath("likelihoods")}' ret = script_runner.run(shlex.split(command)) assert ret.success requests_mock.get( "https://www.pyhfthisdoesnotexist.org/record/resource/1234567", status_code=200 ) - command = f'pyhf contrib download --verbose https://www.pyhfthisdoesnotexist.org/record/resource/1234567 {tmpdir.join("likelihoods").strpath}' + command = f'pyhf contrib download --verbose https://www.pyhfthisdoesnotexist.org/record/resource/1234567 {tmp_path.joinpath("likelihoods")}' ret = script_runner.run(shlex.split(command)) assert not ret.success assert ( @@ -614,7 +623,7 @@ def test_patchset_download(tmpdir, script_runner, requests_mock, tarfile_path, a requests_mock.get( "https://httpstat.us/404/record/resource/1234567", status_code=404 ) - command = f'pyhf contrib download --verbose --force https://httpstat.us/404/record/resource/1234567 {tmpdir.join("likelihoods").strpath}' + command = f'pyhf contrib download --verbose --force https://httpstat.us/404/record/resource/1234567 {tmp_path.joinpath("likelihoods")}' ret = script_runner.run(shlex.split(command)) assert not ret.success assert "gives a response code of 404" in ret.stderr @@ -686,11 +695,11 @@ def test_patchset_inspect(datadir, script_runner): @pytest.mark.parametrize('output_file', [False, True]) @pytest.mark.parametrize('with_metadata', [False, True]) -def test_patchset_extract(datadir, tmpdir, script_runner, output_file, with_metadata): - temp = tmpdir.join("extracted_output.json") +def test_patchset_extract(datadir, tmp_path, script_runner, output_file, with_metadata): + temp = tmp_path.joinpath("extracted_output.json") command = f'pyhf patchset extract {datadir.joinpath("example_patchset.json")} --name patch_channel1_signal_syst1' if output_file: - command += f" --output-file {temp.strpath}" + command += f" --output-file {temp}" if with_metadata: command += " --with-metadata" @@ -698,7 +707,7 @@ def test_patchset_extract(datadir, tmpdir, script_runner, output_file, with_meta assert ret.success if output_file: - extracted_output = json.loads(temp.read()) + extracted_output = json.loads(temp.read_text()) else: extracted_output = json.loads(ret.stdout) if with_metadata: @@ -721,17 +730,17 @@ def test_patchset_verify(datadir, script_runner): @pytest.mark.parametrize('output_file', [False, True]) -def test_patchset_apply(datadir, tmpdir, script_runner, output_file): - temp = tmpdir.join("patched_output.json") +def test_patchset_apply(datadir, tmp_path, script_runner, output_file): + temp = tmp_path.joinpath("patched_output.json") command = f'pyhf patchset apply {datadir.joinpath("example_bkgonly.json")} {datadir.joinpath("example_patchset.json")} --name patch_channel1_signal_syst1' if output_file: - command += f" --output-file {temp.strpath}" + command += f" --output-file {temp}" ret = script_runner.run(shlex.split(command)) assert ret.success if output_file: - extracted_output = json.loads(temp.read()) + extracted_output = json.loads(temp.read_text()) else: extracted_output = json.loads(ret.stdout) assert extracted_output['channels'][0]['samples'][0]['modifiers'][0]['data'] == { @@ -740,24 +749,24 @@ def test_patchset_apply(datadir, tmpdir, script_runner, output_file): } -def test_sort(tmpdir, script_runner): - temp = tmpdir.join("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s} --hide-progress' +def test_sort(tmp_path, script_runner): + temp = tmp_path.joinpath("parsed_output.json") + command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp} --hide-progress' ret = script_runner.run(shlex.split(command)) - command = f'pyhf sort {temp.strpath}' + command = f'pyhf sort {temp}' ret = script_runner.run(shlex.split(command)) assert ret.success -def test_sort_outfile(tmpdir, script_runner): - temp = tmpdir.join("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s} --hide-progress' +def test_sort_outfile(tmp_path, script_runner): + temp = tmp_path.joinpath("parsed_output.json") + command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp} --hide-progress' ret = script_runner.run(shlex.split(command)) - tempout = tmpdir.join("sort_output.json") - command = f'pyhf sort {temp.strpath} --output-file {tempout.strpath}' + tempout = tmp_path.joinpath("sort_output.json") + command = f'pyhf sort {temp} --output-file {tempout}' ret = script_runner.run(shlex.split(command)) assert ret.success diff --git a/tests/test_workspace.py b/tests/test_workspace.py index 55ee0df046..e966d6c3f1 100644 --- a/tests/test_workspace.py +++ b/tests/test_workspace.py @@ -791,6 +791,28 @@ def test_combine_workspace(workspace_factory, join): ) +@pytest.mark.parametrize("join", pyhf.Workspace.valid_joins) +def test_combine_workspace_without_validation(mocker, workspace_factory, join): + ws = workspace_factory() + new_ws = ws.rename( + channels={channel: f"renamed_{channel}" for channel in ws.channels}, + samples={sample: f"renamed_{sample}" for sample in ws.samples}, + modifiers={ + modifier: f"renamed_{modifier}" + for modifier, _ in ws.modifiers + if modifier != "lumi" + }, + measurements={ + measurement: f"renamed_{measurement}" + for measurement in ws.measurement_names + }, + ) + + mocker.patch("pyhf.schema.validate") + pyhf.Workspace.combine(ws, new_ws, join=join, validate=False) + assert pyhf.schema.validate.called is False + + def test_workspace_equality(workspace_factory): ws = workspace_factory() ws_other = workspace_factory()