diff --git a/.github/actions/install_eitprocessing/action.yml b/.github/actions/install_eitprocessing/action.yml index 71efad28c..f47d75859 100644 --- a/.github/actions/install_eitprocessing/action.yml +++ b/.github/actions/install_eitprocessing/action.yml @@ -45,5 +45,5 @@ runs: key: ${{ env.pythonLocation }}-${{ hashFiles('pyproject.toml') }}-[${{ inputs.dependencies }}] - name: Install eitprocessing with [${{ inputs.dependencies }}] dependencies if: steps.cache-python-env.outputs.cache-hit != 'true' - run: python3 -m pip install -e ".[${{ inputs.dependencies }}]" + run: python3 -m pip install ".[${{ inputs.dependencies }}]" shell: bash diff --git a/.github/workflows/cffconvert.yml b/.github/workflows/cffconvert.yml index 1843d4cee..72401b92d 100644 --- a/.github/workflows/cffconvert.yml +++ b/.github/workflows/cffconvert.yml @@ -1,4 +1,4 @@ -name: Citation file +name: Citation File on: push: diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 4a6c7d2bc..2dbb76b26 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -1,4 +1,4 @@ -name: Linting +name: Lint on: push: diff --git a/.github/workflows/release_github.yml b/.github/workflows/release_github.yml index afe913bba..0744ca0e2 100644 --- a/.github/workflows/release_github.yml +++ b/.github/workflows/release_github.yml @@ -4,9 +4,10 @@ on: workflow_dispatch: inputs: version_level: - description: "Semantic version level increase." + description: Semantic version level increase required: true type: choice + default: patch options: - patch - minor @@ -14,144 +15,197 @@ on: permissions: contents: write - pull-requests: write packages: read + statuses: read + checks: read + pull-requests: write + actions: read + repository-projects: read + +env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GH_PAGER: cat jobs: + checks: + name: Check requirements + runs-on: ubuntu-latest + steps: + - name: Fail if main branch was selected + if: ${{ github.ref_name == 'main' }} + run: | + echo "Cannot release from main branch, please select valid release branch." + exit 1 + + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Check if PR exists + run: gh pr view ${{ github.ref_name }} + + - name: Check if PR base is main + run: gh pr view ${{ github.ref_name }} --json baseRefName -q 'if .baseRefName == "main" then "PR base is main" else error("PR base is not main") end' + + - name: Check if PR is mergeable + run: gh pr view ${{ github.ref_name }} --json mergeable -q 'if .mergeable == "MERGEABLE" then "PR is mergeable" else error("PR is not mergeable") end' + + - name: Check whether PR checks pass(ed) + # note that this assumed there are checks to be passed; this fails if there are no checks, which is no + # relevant to this repo + run: | + gh pr checks ${{ github.ref_name }} --watch --fail-fast + gh pr checks ${{ github.ref_name }} --json state -q 'if . | length == 0 then "No checks found." elif map(.state == "SUCCESS") | all then "All checks passed" else error("Not all checks passed") end' + test_python_releases: + needs: checks runs-on: ${{ matrix.os }} strategy: fail-fast: false - matrix: + matrix: os: ["ubuntu-latest"] python-version: ["3.10", "3.11", "3.12", "3.13"] name: Build for ${{ matrix.python-version }}, ${{ matrix.os }} - env: + env: EIT_PROCESSING_TEST_DATA: ${{ github.workspace }}/../eitprocessing_data/ steps: - - uses: actions/checkout@v4 - - uses: ./.github/actions/install_eitprocessing + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Install eitprocessing + uses: ./.github/actions/install_eitprocessing with: dependencies: testing extract-data: true python-version: ${{ matrix.python-version }} token: ${{ secrets.GITHUB_TOKEN }} data-directory: ${{ env.EIT_PROCESSING_TEST_DATA }} + - name: Run pytest run: pytest -v - - name: Install eitprocessing with dev dependencies + + - name: Install additional dependencies for building run: python3 -m pip install --upgrade ".[publishing]" + - name: Build eitprocessing run: python3 -m build - - github_release: + merge_and_bump: + name: Merge the changes into main and bump the version needs: test_python_releases - runs-on: "ubuntu-latest" - defaults: - run: - shell: bash -l {0} - + runs-on: ubuntu-latest + outputs: + new-version: ${{ steps.bump.outputs.current-version }} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} steps: - - name: Fail if main branch was selected - if: ${{ github.ref_name == 'main' }} - run: | - echo "Cannot release from main branch, please select valid release branch." - exit 1 - - name: Checkout repository uses: actions/checkout@v4 with: - token: ${{ secrets.GH_PAT }} ref: main fetch-depth: 0 + token: ${{ secrets.GH_PAT }} - name: Configure git run: | - git config user.email "actions@github.com" + git config user.email "${GITHUB_ACTOR_ID}+${GITHUB_ACTOR}@users.noreply.github.com" git config user.name "GitHub Actions" - git pull + git config -l - - name: Merge changes into main + - name: Merge branch into main run: | git switch main - git branch ${{ github.ref_name }} origin/${{ github.ref_name }} - git merge ${{ github.ref_name }} --no-ff --no-commit - git commit --no-edit + git branch -f ${{ github.ref_name }} origin/${{ github.ref_name }} + git merge ${{ github.ref_name }} --no-ff --no-edit + + - name: Install Python + uses: actions/setup-python@v5.1.1 + with: + python-version: "3.12" - - name: Bump version + - name: Install bump-my-version + shell: bash + run: | + python3 -m pip install pyproject-deplister + pyproject-deplister --extra dev --path pyproject.toml | grep bump-my-version | sed 's/ //g' | xargs -I{} python3 -m pip install "{}" + + - name: Pass Inputs to Shell id: bump + shell: bash + run: | + echo "previous-version=$(bump-my-version show current_version)" >> $GITHUB_OUTPUT + bump-my-version bump ${{ inputs.version_level }} --commit --tag + ([[ $? -gt 0 ]] && echo "bumped=false" || echo "bumped=true") >> $GITHUB_OUTPUT + echo "current-version=$(bump-my-version show current_version)" >> $GITHUB_OUTPUT + + - name: Fail if bumping failes + if: steps.bump.outputs.bumped == 'false' + run: | + echo "Bumping failed." + git reset --hard HEAD^ + exit 1 + + - name: Check new version number + if: steps.bump.outputs.bumped == 'true' + run: | + echo "Version was bumped from ${{ steps.bump.outputs.previous-version }} to ${{ steps.bump.outputs.current-version }}!" + + - name: Merge main into develop + run: | + git checkout develop + git merge main --no-ff --no-edit || { echo "Can't merge changes to develop. Manually merge PR and create GitHub release."; exit 1; } + + - name: Push changes to develop + uses: ad-m/github-push-action@master + with: + github_token: ${{ secrets.GH_PAT }} + branch: develop + force_with_lease: true + + - name: Checkout main run: | - echo "-- install bump-my-version" - python3 -m pip install bump-my-version - echo "-- bump the version" - bump-my-version bump ${{ github.event.inputs.version_level }} --commit --tag - echo "-- push bumped version" - echo "RELEASE_TAG=$(git describe --tags --abbrev=0)" >> $GITHUB_OUTPUT - git push --tags -f - git push + git checkout main + + - name: Push changes to main + uses: ad-m/github-push-action@master + with: + github_token: ${{ secrets.GH_PAT }} + branch: main + force_with_lease: true + + github_release: + name: Create a draft GitHub release + needs: merge_and_bump + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + ref: main - name: Create GitHub Release id: create_release env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | - gh release create ${{ steps.bump.outputs.RELEASE_TAG }} \ - --title="Release ${{ steps.bump.outputs.RELEASE_TAG }}" \ + gh release create v${{ needs.merge_and_bump.outputs.new-version }} \ + --title="Release v${{ needs.merge_and_bump.outputs.new-version }}" \ --generate-notes \ --draft - - name: Validate CITATION.cff - id: validate_cff - env: - GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }} - uses: dieghernan/cff-validator@v3 - - tidy_workspace: - # only run if action above succeeds + remove_branch: + name: Remove PR branch needs: github_release - runs-on: "ubuntu-latest" - defaults: - run: - shell: bash -l {0} - + if: ${{ github.ref_name != 'develop' }} + runs-on: ubuntu-latest steps: - name: Checkout repository uses: actions/checkout@v4 with: - token: ${{ secrets.GH_PAT }} + ref: develop fetch-depth: 0 - - name: Configure git - run: | - git config user.email "actions@github.com" - git config user.name "GitHub Actions" - git pull - - - name: Close PR - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: | - echo "-- searching for associated PR" - pr_number=$(gh pr list --head ${{ github.ref_name }} --json number --jq '.[0].number') - if [ -n "$pr_number" ]; then - echo "-- closing PR #$pr_number" - gh pr close $pr_number - else - echo "-- no open pull request found for branch $branch_name" - fi - - - name: Merge updates into develop - run: | - git switch develop - git merge --no-ff origin/main - git push - - - name: Delete release branch other than main or develop - run: | - if [[ ${{ github.ref_name }} != "main" && ${{ github.ref_name }} != "develop" ]]; then - echo "-- deleting branch '${{ github.ref_name }}'" - git push origin -d ${{ github.ref_name }} - else - echo "-- branch '${{ github.ref_name }}' will not be deleted from remote" - fi + - name: Remove PR branch + uses: dawidd6/action-delete-branch@v3 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + branches: ${{ github.ref_name }} diff --git a/.github/workflows/test_build_documentation.yml b/.github/workflows/test_build_documentation.yml index a53aafe6c..957257ba0 100644 --- a/.github/workflows/test_build_documentation.yml +++ b/.github/workflows/test_build_documentation.yml @@ -1,4 +1,4 @@ -name: Test build documentation +name: Documentation on: push: diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml index 746f58947..39f979056 100644 --- a/.github/workflows/testing.yml +++ b/.github/workflows/testing.yml @@ -1,4 +1,4 @@ -name: Run tests +name: Build and Test on: push: diff --git a/.gitignore b/.gitignore index 80a49b1ad..7c5480199 100644 --- a/.gitignore +++ b/.gitignore @@ -39,3 +39,5 @@ venv3 /site/ uv.lock + +worktrees diff --git a/.prettierignore b/.prettierignore new file mode 100644 index 000000000..a188e0692 --- /dev/null +++ b/.prettierignore @@ -0,0 +1 @@ +docs/* diff --git a/.vscode/settings.json b/.vscode/settings.json index e0a2028a7..5b431fe12 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -26,6 +26,7 @@ "rewrap.wrappingColumn": 120, "rewrap.autoWrap.enabled": true, "editor.formatOnSave": true, + "files.trimTrailingWhitespace": true, "editor.defaultFormatter": "esbenp.prettier-vscode", "[ignore]": { "editor.defaultFormatter": "foxundermoon.shell-format" diff --git a/README.dev.md b/README.dev.md index 9fb512b13..22ed56b01 100644 --- a/README.dev.md +++ b/README.dev.md @@ -128,7 +128,7 @@ We use [prettier](https://prettier.io/) for formatting most other files. If you ## Making a release -### Automated release workflow: +### Automated release workflow 0. **IMP0RTANT:** [Create a PR](#creating-a-pr) for the release branch (usually `develop`) and make sure that all checks pass! - if everything goes well, this PR will automatically be closed after the draft release is created. @@ -152,7 +152,7 @@ We use [prettier](https://prettier.io/) for formatting most other files. If you 8. Click green "Publish Release" button to convert the draft to a published release on GitHub. - This will automatically trigger [another GitHub workflow](https://github.com/EIT-ALIVE/eitprocessing/actions/workflows/release.yml) that will take care of publishing the package on PyPi. -#### Updating the token: +#### Updating the token NOTE: the current token (associated to @DaniBodor) allowing to bypass branch protection will expire on June 20th, 2025. To update the token do the following: @@ -164,7 +164,7 @@ NOTE: the current token (associated to @DaniBodor) allowing to bypass branch pro 5. Navigate to the [secrets settings](https://github.com/EIT-ALIVE/eitprocessing/settings/secrets/actions). 6. Edit the `GH_PAT` key giving your access token as the new value. -### Manually create a release: +### Manually create a release 0. Make sure you have all required developers tools installed `pip install -e .'[dev]'`. 1. Create a `release` branch from `main` and merge the changes into this branch. diff --git a/README.md b/README.md index d81b58edc..2546fcc97 100644 --- a/README.md +++ b/README.md @@ -1,51 +1,50 @@ -# Badges +# EITprocessing -| Badges | | -| :------------- | :------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| Registry | [![RSD](https://img.shields.io/badge/rsd-eitprocessing-00a3e3.svg)](https://www.research-software.nl/software/eitprocessing) [![workflow pypi badge](https://img.shields.io/pypi/v/eitprocessing.svg?colorB=blue)](https://pypi.python.org/project/eitprocessing/) [![github repo badge](https://img.shields.io/badge/github-repo-000.svg?logo=github&labelColor=gray&color=blue)](git@github.com:EIT-ALIVE/eitprocessing) | -| License | [![github license badge](https://img.shields.io/github/license/EIT-ALIVE/eitprocessing)](git@github.com:EIT-ALIVE/eitprocessing) | -| Citation | [![DOI](https://zenodo.org/badge/617944717.svg)](https://zenodo.org/badge/latestdoi/617944717) | -| Fairness | [![OpenSSF Best Practices](https://www.bestpractices.dev/projects/9147/badge)](https://www.bestpractices.dev/projects/9147) [![fair-software badge](https://img.shields.io/badge/fair--software.eu-%E2%97%8F%20%20%E2%97%8F%20%20%E2%97%8F%20%20%E2%97%8F%20%20%E2%97%8B-yellow)](https://fair-software.eu) | -| GitHub Actions | ![build](https://github.com/EIT-ALIVE/eitprocessing/actions/workflows/build.yml/badge.svg) ![lint](https://github.com/EIT-ALIVE/eitprocessing/actions/workflows/lint.yml/badge.svg) ![documentation](https://github.com/EIT-ALIVE/eitprocessing/actions/workflows/documentation.yml/badge.svg) ![cffconvert](https://github.com/EIT-ALIVE/eitprocessing/actions/workflows/cffconvert.yml/badge.svg) [![Coverage Status](https://coveralls.io/repos/github/EIT-ALIVE/eitprocessing/badge.svg?branch=main)](https://coveralls.io/github/EIT-ALIVE/eitprocessing?branch=main) | -| Python | ![Python](https://img.shields.io/badge/python-3.10-blue.svg) ![Python](https://img.shields.io/badge/python-3.11-blue.svg) ![Python](https://img.shields.io/badge/python-3.12-blue.svg) ![Python](https://img.shields.io/badge/python-3.13-blue.svg) | - -# Contents - -- [Introduction](#introduction) -- [Installation](#installation) - - [Install from PyPi](#install-from-pypi) - - [Developer install](#developer-install) -- [Documentation](#documentation) -- [Contributing](#contributing) -- [Credits](#credits) - -# Introduction +## Introduction [Electrical Impedance Tomography](https://en.wikipedia.org/wiki/Electrical_impedance_tomography) (EIT) is a noninvasive and radiation-free continuous imaging tool for monitoring respiratory mechanics. eitprocessing aims to provide a versatile, user-friendly, reproducible and reliable workflow for the processing and analysis of EIT data and related waveform data, like pressures and flow. -eitprocessing includes tools to load data exported from EIT-devices from several manufacturers, including Dräger, SenTec +`eitprocessing` includes tools to load data exported from EIT-devices from several manufacturers, including Dräger, SenTec and Timpel, as well as data from other sources. Several pre-processing tools and analysis tools are provided. -[eit_dash](https://github.com/EIT-ALIVE/eit_dash) provides an accompanying GUI. +| Badges | | +| :------------- | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| Registry | [![RSD](https://img.shields.io/badge/rsd-eitprocessing-00a3e3.svg)](https://www.research-software.nl/software/eitprocessing) [![workflow pypi badge](https://img.shields.io/pypi/v/eitprocessing.svg?colorB=blue)](https://pypi.python.org/project/eitprocessing/) [![github repo badge](https://img.shields.io/badge/github-repo-000.svg?logo=github&labelColor=gray&color=blue)](git@github.com:EIT-ALIVE/eitprocessing) | +| License | [![github license badge](https://img.shields.io/github/license/EIT-ALIVE/eitprocessing)](git@github.com:EIT-ALIVE/eitprocessing) | +| Citation | [![DOI](https://zenodo.org/badge/617944717.svg)](https://zenodo.org/badge/latestdoi/617944717) | +| Fairness | [![OpenSSF Best Practices](https://www.bestpractices.dev/projects/9147/badge)](https://www.bestpractices.dev/projects/9147) [![fair-software badge](https://img.shields.io/badge/fair--software.eu-%E2%97%8F%20%20%E2%97%8F%20%20%E2%97%8F%20%20%E2%97%8F%20%20%E2%97%8B-yellow)](https://fair-software.eu) | +| GitHub Actions | ![build](https://github.com/EIT-ALIVE/eitprocessing/actions/workflows/testing.yml/badge.svg) ![lint](https://github.com/EIT-ALIVE/eitprocessing/actions/workflows/lint.yml/badge.svg) ![documentation](https://github.com/EIT-ALIVE/eitprocessing/actions/workflows/test_build_documentation.yml/badge.svg) ![cffconvert](https://github.com/EIT-ALIVE/eitprocessing/actions/workflows/cffconvert.yml/badge.svg) [![Coverage Status](https://coveralls.io/repos/github/EIT-ALIVE/eitprocessing/badge.svg?branch=main)](https://coveralls.io/github/EIT-ALIVE/eitprocessing?branch=main) | +| Python Support | ![Python](https://img.shields.io/badge/python-3.10-blue.svg) ![Python](https://img.shields.io/badge/python-3.11-blue.svg) ![Python](https://img.shields.io/badge/python-3.12-blue.svg) ![Python](https://img.shields.io/badge/python-3.13-blue.svg) | +| Linting | [![Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json)](https://github.com/astral-sh/ruff) | + +## Contents -# Installation +- [Introduction](#introduction) +- [Installation](#installation) + - [Install from PyPi](#install-from-pypi) + - [Developer install](#developer-install) +- [Documentation](#documentation) +- [Contributing](#contributing) +- [Credits](#credits) + +## Installation It is advised to install eitprocessing in a dedicated virtual environment. See e.g. [Install packages in a virtual environment using pip and venv](https://packaging.python.org/en/latest/guides/installing-using-pip-and-virtual-environments/) or [Getting started with conda](https://docs.conda.io/projects/conda/en/stable/user-guide/getting-started.html). -## Install from PyPi +### Install from PyPi eitprocessing can be installed from PyPi as follows: -``` +```bash pip install eitprocessing ``` @@ -53,21 +52,23 @@ pip install eitprocessing For full developer options (testing, etc): -``` +```bash git clone git@github.com:EIT-ALIVE/eitprocessing.git cd eitprocessing pip install -e ".[dev]" ``` -# Documentation + + +## Documentation Please see our [user documentation](https://eit-alive.github.io/eitprocessing/) for a detailed explanation of the package. -# Contributing +## Contributing We welcome any contributions or suggestions. If you want to contribute to the development of eitprocessing, have a look at the [contribution guidelines](CONTRIBUTING.md) and the [developer documentation](README.dev.md). -# Credits +## Credits This package was created with [Cookiecutter](https://github.com/audreyr/cookiecutter) and the [NLeSC/python-template](https://github.com/NLeSC/python-template). diff --git a/docs/api/datacontainers.md b/docs/api/datacontainers.md index f8367aac2..dc0722097 100644 --- a/docs/api/datacontainers.md +++ b/docs/api/datacontainers.md @@ -1,7 +1,7 @@ # Data containers ::: eitprocessing.datahandling.sequence.Sequence - + ::: eitprocessing.datahandling.eitdata.EITData ::: eitprocessing.datahandling.continuousdata.ContinuousData diff --git a/docs/api/filters.md b/docs/api/filters.md index 9d939765c..b6fce83a0 100644 --- a/docs/api/filters.md +++ b/docs/api/filters.md @@ -9,4 +9,3 @@ ::: eitprocessing.filters.butterworth_filters.BandPassFilter ::: eitprocessing.filters.butterworth_filters.ButterworthFilter - diff --git a/docs/api/mixins.md b/docs/api/mixins.md index e444dc6f1..73016ab43 100644 --- a/docs/api/mixins.md +++ b/docs/api/mixins.md @@ -9,4 +9,3 @@ - t ::: eitprocessing.datahandling.mixins.slicing.TimeIndexer - \ No newline at end of file diff --git a/docs/css/material.css b/docs/css/material.css index bf35afa80..2a5cd9732 100644 --- a/docs/css/material.css +++ b/docs/css/material.css @@ -1,5 +1,5 @@ /* Indentation. */ div.doc-contents:not(.first) { - padding-left: 25px; - border-left: .1rem solid var(--md-typeset-table-color); + padding-left: 25px; + border-left: 0.1rem solid var(--md-typeset-table-color); } diff --git a/docs/index.md b/docs/index.md index 5f19972c6..59efb4d17 100644 --- a/docs/index.md +++ b/docs/index.md @@ -1,23 +1,19 @@ - # Welcome to EITprocessing -# Introduction +## Introduction -Welcome to the documentation of the ALIVE software tool designed to load, analyze, and extract parameters from Electrical Impedance Tomography (EIT) data. -This software was designed by a joined effort of the Rotterdam Advanced Respiratory Care research group (ROTARC) of the Intensive Care of the Erasmus Medical Center and the Netherlands e-Science center. [Grant ID: NLESC.OEC.2022.002](https://research-software-directory.org/projects/alive) +Welcome to the documentation of the ALIVE software tool designed to load, analyze, and extract parameters from Electrical Impedance Tomography (EIT) data. +This software was designed by a joined effort of the Rotterdam Advanced Respiratory Care research group (ROTARC) of the Intensive Care of the Erasmus Medical Center and the Netherlands e-Science center. [Grant ID: NLESC.OEC.2022.002](https://research-software-directory.org/projects/alive) -EIT is a bedside non-invasive lung imaging tool: it continuously and real-time visualizes changes in lung volume. Our software tool serves as a comprehensive solution for handling EIT data from multiple leading manufacturers, including Sentec, Dräger, and Timpel. +EIT is a bedside non-invasive lung imaging tool: it continuously and real-time visualizes changes in lung volume. Our software tool serves as a comprehensive solution for handling EIT data from multiple leading manufacturers, including Sentec, Dräger, and Timpel. -The software tool includes a back-end for researchers that are familair with programming [eitprocessing](https://github.com/EIT-ALIVE/eitprocessing) and also a user-friendly dashboard [eit_dash](https://github.com/EIT-ALIVE/eit_dash) for clinical researchers allowing to quickly import datasets from various formats and sources and perform processing and analysis. This documentation page concerns eitprocessing. +The software tool includes a back-end for researchers that are familair with programming [eitprocessing](https://github.com/EIT-ALIVE/eitprocessing) and also a user-friendly dashboard [eit_dash](https://github.com/EIT-ALIVE/eit_dash) for clinical researchers allowing to quickly import datasets from various formats and sources and perform processing and analysis. This documentation page concerns eitprocessing. Our tool offers robust analysis features. From basic filters to advanced signal processing techniques, you can extract meaningful parameters from your EIT data. With our dashboard we aim to provide default analysis pipelines and many opportunities for customization according to the user needs. Visualizations and interactive graphs make it easy to interpret the results and understand the underlying physiological processes. -It is important to note that the software tool is a work in progress, so not all fuctionalities are available yet. If you would like to contribute to coding you can reach out to us. +It is important to note that the software tool is a work in progress, so not all fuctionalities are available yet. If you would like to contribute to coding you can reach out to us. ## Getting Started -To start using our software you can you use the [installation](installation.md) guide to set up the software on your system. Once installed, you can load your first dataset and explore the basic features. -We are committed to supporting your journey with EIT data analysis and extraction. If you encounter any issues or have questions you can put a pull request via github or emailadres. - - - +To start using our software you can you use the [installation](installation.md) guide to set up the software on your system. Once installed, you can load your first dataset and explore the basic features. +We are committed to supporting your journey with EIT data analysis and extraction. If you encounter any issues or have questions you can put a pull request via github or emailadres. diff --git a/docs/installation.md b/docs/installation.md index 491329a16..713c749f9 100644 --- a/docs/installation.md +++ b/docs/installation.md @@ -1 +1 @@ ---8<-- "README.md:39:62" +--8<-- "README.md:install" diff --git a/docs/team.md b/docs/team.md index 9d0351c80..e8b7c0296 100644 --- a/docs/team.md +++ b/docs/team.md @@ -1,12 +1,14 @@ -# Our team +# Our Team ## eScience Center + - Dani Bodor, Lead RSE - Walter Baccinelli, RSE -- Pablo Lopez-Tarifa, Programme Manager +- Pablo Lopez-Tarifa, Programme Manager ## Erasmus Medical Center Rotterdam + - Annemijn H. Jonkman, Lead Applicant -- Peter Somhorst -- Juliette Francovich -- Jantine Wisse +- Peter Somhorst +- Juliette Francovich +- Jantine Wisse diff --git a/eitprocessing/datahandling/datacollection.py b/eitprocessing/datahandling/datacollection.py index 5d007ede6..266d58bf1 100644 --- a/eitprocessing/datahandling/datacollection.py +++ b/eitprocessing/datahandling/datacollection.py @@ -40,9 +40,9 @@ def __init__(self, data_type: type[V], *args, **kwargs): self.data_type = data_type super().__init__(*args, **kwargs) - def __setitem__(self, __key: str, __value: V, /) -> None: - self._check_item(__value, key=__key) - return super().__setitem__(__key, __value) + def __setitem__(self, key: str, value: V, /) -> None: + self._check_item(value, key=key) + return super().__setitem__(key, value) def add(self, *item: V, overwrite: bool = False) -> None: """Add one or multiple item(s) to the collection using the item label as the key.""" diff --git a/eitprocessing/datahandling/loading/__init__.py b/eitprocessing/datahandling/loading/__init__.py index a8c8b238a..ca3c37d9f 100644 --- a/eitprocessing/datahandling/loading/__init__.py +++ b/eitprocessing/datahandling/loading/__init__.py @@ -18,6 +18,10 @@ def load_eit_data( ) -> Sequence: """Load EIT data from path(s). + Current limitations: + - Dräger data is assumed to have a limited set of (Medibus) data. Newer additions that add data like pleural + pressure are not yet supported. + Args: path: relative or absolute path(s) to data file. vendor: vendor indicating the device used. @@ -29,7 +33,7 @@ def load_eit_data( Defaults to the same value as label. description: long description of sequence for human interpretation. sample_frequency: sample frequency at which the data was recorded. - Default for Draeger: 20 + No default for Draeger. Will be autodetected. Warns if autodetected differs from provided. Default for Timpel: 50 Default for Sentec: 50.2 first_frame: index of first frame to load. diff --git a/eitprocessing/datahandling/loading/draeger.py b/eitprocessing/datahandling/loading/draeger.py index 092c6c980..0f8affa1a 100644 --- a/eitprocessing/datahandling/loading/draeger.py +++ b/eitprocessing/datahandling/loading/draeger.py @@ -23,13 +23,12 @@ from numpy.typing import NDArray _FRAME_SIZE_BYTES = 4358 -DRAEGER_SAMPLE_FREQUENCY = 20 load_draeger_data = partial(load_eit_data, vendor=Vendor.DRAEGER) def load_from_single_path( path: Path, - sample_frequency: float | None = 20, + sample_frequency: float | None = None, first_frame: int = 0, max_frames: int | None = None, ) -> dict[str, DataCollection]: @@ -38,7 +37,9 @@ def load_from_single_path( if file_size % _FRAME_SIZE_BYTES: msg = ( f"File size {file_size} of file {path!s} not divisible by {_FRAME_SIZE_BYTES}.\n" - f"Make sure this is a valid and uncorrupted Dräger data file." + "Currently this package does not support loading files containing " + "esophageal pressure or other non-standard data. " + "Make sure this is a valid and uncorrupted Dräger data file." ) raise OSError(msg) total_frames = file_size // _FRAME_SIZE_BYTES @@ -87,8 +88,17 @@ def load_from_single_path( previous_marker, ) + estimated_sample_frequency = round((len(time) - 1) / (time[-1] - time[0]), 4) + if not sample_frequency: - sample_frequency = DRAEGER_SAMPLE_FREQUENCY + sample_frequency = estimated_sample_frequency + + elif sample_frequency != estimated_sample_frequency: + msg = ( + f"Provided sample frequency ({sample_frequency}) does not match " + f"the estimated sample frequency ({estimated_sample_frequency})." + ) + warnings.warn(msg, RuntimeWarning) # time wraps around the number of seconds in a day time = np.unwrap(time, period=24 * 60 * 60) diff --git a/eitprocessing/datahandling/loading/timpel.py b/eitprocessing/datahandling/loading/timpel.py index ac0287327..8f14c6e58 100644 --- a/eitprocessing/datahandling/loading/timpel.py +++ b/eitprocessing/datahandling/loading/timpel.py @@ -31,7 +31,7 @@ def load_from_single_path( path: Path, - sample_frequency: float | None = 20, + sample_frequency: float | None = TIMPEL_SAMPLE_FREQUENCY, first_frame: int = 0, max_frames: int | None = None, ) -> dict[str, DataCollection]: diff --git a/eitprocessing/datahandling/mixins/slicing.py b/eitprocessing/datahandling/mixins/slicing.py index 301676d3c..f25e75267 100644 --- a/eitprocessing/datahandling/mixins/slicing.py +++ b/eitprocessing/datahandling/mixins/slicing.py @@ -5,7 +5,7 @@ import warnings from abc import ABC, abstractmethod from dataclasses import dataclass -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Generic, TypeVar import numpy as np @@ -172,8 +172,11 @@ def select_by_time( # noqa: D417 ) +T = TypeVar("T", bound=HasTimeIndexer) + + @dataclass -class TimeIndexer: +class TimeIndexer(Generic[T]): """Helper class for slicing an object using the time axis instead of indices. Example: @@ -186,9 +189,9 @@ class TimeIndexer: ``` """ - obj: HasTimeIndexer + obj: T - def __getitem__(self, key: slice | float): + def __getitem__(self, key: slice | float) -> T: if isinstance(key, slice): if key.step: msg = "Can't slice by time using specific step sizes." diff --git a/project_setup.md b/project_setup.md deleted file mode 100644 index 104da031c..000000000 --- a/project_setup.md +++ /dev/null @@ -1,106 +0,0 @@ -# Project Setup - -Here we provide some details about the project setup. Most of the choices are explained in the -[guide](https://guide.esciencecenter.nl). Links to the relevant sections are included below. Feel free to remove this -text when the development of the software package takes off. - -For a quick reference on software development, we refer to [the software guide -checklist](https://guide.esciencecenter.nl/#/best_practices/checklist). - -## Python versions - -This repository is set up with Python versions: - -- 3.10 - -## Package management and dependencies - -You can use either pip or conda for installing dependencies and package management. This repository does not force you -to use one or the other, as project requirements differ. For advice on what to use, please check [the relevant section -of the -guide](https://guide.esciencecenter.nl/#/best_practices/language_guides/python?id=dependencies-and-package-management). - -- Runtime dependencies should be added to `setup.cfg` in the `install_requires` list under `[options]`. -- Development dependencies should be added to `setup.cfg` in one of the lists under `[options.extras_require]`. - -## Packaging/One command install - -You can distribute your code using PyPI. -[The guide](https://guide.esciencecenter.nl/#/best_practices/language_guides/python?id=building-and-packaging-code) can -help you decide which tool to use for packaging. - -## Testing and code coverage - -- Tests should be put in the `tests` folder. -- The `tests` folder contains: - - Example tests that you should replace with your own meaningful tests (file: `test_my_module.py`) -- The testing framework used is [PyTest](https://pytest.org) - - [PyTest introduction](https://pythontest.com/pytest-book/) - - PyTest is listed as a development dependency - - This is configured in `setup.cfg` -- The project uses [GitHub action workflows](https://docs.github.com/en/actions) to automatically run tests on GitHub infrastructure against multiple Python versions - - Workflows can be found in [`.github/workflows`](.github/workflows/) -- [Relevant section in the guide](https://guide.esciencecenter.nl/#/best_practices/language_guides/python?id=testing) - -## Documentation - -- Documentation should be put in the [`docs/`](docs/) directory. The contents have been generated using `sphinx-quickstart` (Sphinx version 1.6.5). -- We recommend writing the documentation using Restructured Text (reST) and Google style docstrings. - - [Restructured Text (reST) and Sphinx CheatSheet](https://thomas-cokelaer.info/tutorials/sphinx/rest_syntax.html) - - [Google style docstring examples](http://sphinxcontrib-napoleon.readthedocs.io/en/latest/example_google.html). -- The documentation is set up with the ReadTheDocs Sphinx theme. - - Check out its [configuration options](https://sphinx-rtd-theme.readthedocs.io/en/latest/). -- [AutoAPI](https://sphinx-autoapi.readthedocs.io/) is used to generate documentation for the package Python objects. -- `.readthedocs.yaml` is the ReadTheDocs configuration file. When ReadTheDocs is building the documentation this package and its development dependencies are installed so the API reference can be rendered. -- [Relevant section in the guide](https://guide.esciencecenter.nl/#/best_practices/language_guides/python?id=writingdocumentation) - -## Coding style conventions and code quality - -- [Relevant section in the NLeSC guide](https://guide.esciencecenter.nl/#/best_practices/language_guides/python?id=coding-style-conventions) and [README.dev.md](README.dev.md). - -## Continuous code quality - -We check code quality by hand, and continously upgrade as time allows. - -## Package version number - -- We recommend using [semantic versioning](https://guide.esciencecenter.nl/#/best_practices/releases?id=semantic-versioning). -- For convenience, the package version is stored in a single place: `eitprocessing/.bumpversion.cfg`. -- Don't forget to update the version number before [making a release](https://guide.esciencecenter.nl/#/best_practices/releases)! - -## Logging - -- We recommend using the logging module for getting useful information from your module (instead of using print). -- The project is set up with a logging example. -- [Relevant section in the guide](https://guide.esciencecenter.nl/#/best_practices/language_guides/python?id=logging) - -## CHANGELOG.md - -- Document changes to your software package -- [Relevant section in the guide](https://guide.esciencecenter.nl/#/best_practices/releases?id=changelogmd) - -## CITATION.cff - -- To allow others to cite your software, add a `CITATION.cff` file -- It only makes sense to do this once there is something to cite (e.g., a software release with a DOI). -- Follow the [making software citable](https://guide.esciencecenter.nl/#/citable_software/making_software_citable) section in the guide. - -## CODE_OF_CONDUCT.md - -- Information about how to behave professionally -- [Relevant section in the guide](https://guide.esciencecenter.nl/#/best_practices/documentation?id=code-of-conduct) - -## CONTRIBUTING.md - -- Information about how to contribute to this software package -- [Relevant section in the guide](https://guide.esciencecenter.nl/#/best_practices/documentation?id=contribution-guidelines) - -## MANIFEST.in - -- List non-Python files that should be included in a source distribution -- [Relevant section in the guide](https://guide.esciencecenter.nl/#/best_practices/language_guides/python?id=building-and-packaging-code) - -## NOTICE - -- List of attributions of this project and Apache-license dependencies -- [Relevant section in the guide](https://guide.esciencecenter.nl/#/best_practices/licensing?id=notice) diff --git a/pyproject.toml b/pyproject.toml index 009ee46b6..989e170a6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -50,11 +50,11 @@ dependencies = [ # development dependency groups dev = [ "pytest >= 7.4.0", - "bump-my-version", + "bump-my-version==0.28.1", "coverage", "pytest-cov", "coveralls", - "ruff == 0.6.9", + "ruff == 0.8", ] testing = ["pytest >= 7.4.0", "pytest-cov", "coveralls"] docs = [ @@ -98,16 +98,6 @@ exclude = ["tests*", "*tests.*", "*tests"] [tool.setuptools.package-data] eitprocessing = ["config/*.yaml", "py.typed"] -[tool.tox] -legacy_tox_ini = """ -[tox] -envlist = py37,py38,py39,py310,py311 -skip_missing_interpreters = true -[testenv] -commands = pytest -extras = dev -""" - [tool.ruff] output-format = "concise" line-length = 120 @@ -120,8 +110,6 @@ ignore = [ "FBT", # Using boolean arguments "ANN002", # Missing type annotation for `*args` "ANN003", # Missing type annotation for `**kwargs` - "ANN101", # Missing type annotation for `self` in method - "ANN102", # Missing type annotation for `cls` in classmethod "ANN204", # Missing return type annotation for special (dunder) method "S105", # Possible hardcoded password "S311", # insecure random generators @@ -151,7 +139,7 @@ extend-safe-fixes = [ "D415", # First line should end with a period, question mark, or exclamation point "D300", # Use triple double quotes `"""` "D200", # One-line docstring should fit on one line - "TCH", # Format type checking only imports + "TC", # Format type checking only imports "ISC001", # Implicitly concatenated strings on a single line "EM", # Exception message variables "RUF013", # Implicit Optional diff --git a/tests/conftest.py b/tests/conftest.py index e020f72cb..748099f67 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -22,17 +22,17 @@ @pytest.fixture(scope="session") def draeger1(): - return load_eit_data(draeger_file1, vendor="draeger", label="draeger1") + return load_eit_data(draeger_file1, vendor="draeger", sample_frequency=20, label="draeger1") @pytest.fixture(scope="session") def draeger2(): - return load_eit_data(draeger_file2, vendor="draeger", label="draeger2") + return load_eit_data(draeger_file2, vendor="draeger", sample_frequency=20, label="draeger2") @pytest.fixture(scope="session") def draeger_both(): - return load_eit_data([draeger_file2, draeger_file1], vendor="draeger", label="draeger_both") + return load_eit_data([draeger_file2, draeger_file1], vendor="draeger", sample_frequency=20, label="draeger_both") @pytest.fixture(scope="session") diff --git a/tests/mixins/test_eq.py b/tests/mixins/test_eq.py index 4f6385c82..f4300b635 100644 --- a/tests/mixins/test_eq.py +++ b/tests/mixins/test_eq.py @@ -8,8 +8,8 @@ def test_eq(): - data = load_eit_data(draeger_file1, vendor="draeger") - data2 = load_eit_data(draeger_file1, vendor="draeger") + data = load_eit_data(draeger_file1, vendor="draeger", sample_frequency=20) + data2 = load_eit_data(draeger_file1, vendor="draeger", sample_frequency=20) data.isequivalent(data2) diff --git a/tests/test_labels.py b/tests/test_labels.py index b8edb2d7d..1b3ec2ca8 100644 --- a/tests/test_labels.py +++ b/tests/test_labels.py @@ -6,7 +6,7 @@ def test_default_label(draeger1: Sequence): - draeger_default = load_eit_data(draeger_file1, vendor="draeger") + draeger_default = load_eit_data(draeger_file1, vendor="draeger", sample_frequency=20) assert isinstance(draeger_default.label, str) assert draeger_default.label == f"Sequence_{id(draeger_default)}" @@ -15,7 +15,7 @@ def test_default_label(draeger1: Sequence): assert timpel_default.label == f"Sequence_{id(timpel_default)}" # test that default label changes upon reloading identical data - draeger_reloaded = load_eit_data(draeger_file1, vendor="draeger") + draeger_reloaded = load_eit_data(draeger_file1, vendor="draeger", sample_frequency=20) assert draeger_default == draeger_reloaded assert draeger_default.label != draeger_reloaded.label assert draeger_default.label != draeger1.label diff --git a/tests/test_loading.py b/tests/test_loading.py index 05faa0766..46c748d26 100644 --- a/tests/test_loading.py +++ b/tests/test_loading.py @@ -3,9 +3,14 @@ from eitprocessing.datahandling.eitdata import EITData, Vendor from eitprocessing.datahandling.loading import load_eit_data -from eitprocessing.datahandling.loading.draeger import DRAEGER_SAMPLE_FREQUENCY from eitprocessing.datahandling.sequence import Sequence -from tests.conftest import draeger_file1, draeger_file2, draeger_file3, dummy_file, timpel_file +from tests.conftest import ( + draeger_file1, + draeger_file2, + draeger_file3, + dummy_file, + timpel_file, +) # ruff: noqa: ERA001 #TODO: remove this line @@ -21,8 +26,8 @@ def test_loading_draeger( assert len(draeger1.eit_data["raw"]) == len(draeger1.eit_data["raw"].time) assert len(draeger2.eit_data["raw"].time) == 20740 - assert draeger1 == load_eit_data(draeger_file1, vendor="draeger", label="draeger1") - assert draeger1 == load_eit_data(draeger_file1, vendor="draeger", label="something_else") + assert draeger1 == load_eit_data(draeger_file1, vendor="draeger", sample_frequency=20, label="draeger1") + assert draeger1 == load_eit_data(draeger_file1, vendor="draeger", sample_frequency=20, label="something_else") assert draeger1 != draeger2 # Load multiple @@ -36,6 +41,15 @@ def test_loading_draeger( # assert draeger_both != draeger_inverted +def test_sample_frequency_draeger(): + with_sf = load_eit_data(draeger_file1, vendor="draeger", sample_frequency=20) + without_sf = load_eit_data(draeger_file1, vendor="draeger") + assert with_sf.eit_data["raw"].sample_frequency == without_sf.eit_data["raw"].sample_frequency + + with pytest.warns(RuntimeWarning): + _ = load_eit_data(draeger_file1, vendor="draeger", sample_frequency=50) + + def test_loading_timpel( draeger1: Sequence, timpel1: Sequence, @@ -56,13 +70,13 @@ def test_loading_illegal(): # non existing for vendor in ["draeger", "timpel"]: with pytest.raises(FileNotFoundError): - _ = load_eit_data(dummy_file, vendor=vendor) + _ = load_eit_data(dummy_file, vendor=vendor, sample_frequency=20) # incorrect vendor with pytest.raises(OSError): _ = load_eit_data(draeger_file1, vendor="timpel") with pytest.raises(OSError): - _ = load_eit_data(timpel_file, vendor="draeger") + _ = load_eit_data(timpel_file, vendor="draeger", sample_frequency=20) def test_load_partial( @@ -80,8 +94,8 @@ def test_load_partial( # file for this situation. # Timpel - timpel_part1 = load_eit_data(timpel_file, "timpel", max_frames=cutoff, label="timpel_part_1") - timpel_part2 = load_eit_data(timpel_file, "timpel", first_frame=cutoff, label="timpel_part2") + timpel_part1 = load_eit_data(timpel_file, vendor="timpel", max_frames=cutoff, label="timpel_part_1") + timpel_part2 = load_eit_data(timpel_file, vendor="timpel", first_frame=cutoff, label="timpel_part2") assert len(timpel_part1) == cutoff assert len(timpel_part2) == len(timpel1) - cutoff @@ -91,8 +105,20 @@ def test_load_partial( # assert Sequence.concatenate(timpel_part2, timpel_part1) != timpel1 # Draeger - draeger2_part1 = load_eit_data(draeger_file2, "draeger", max_frames=cutoff, label="draeger_part_1") - draeger2_part2 = load_eit_data(draeger_file2, "draeger", first_frame=cutoff, label="draeger_part_2") + draeger2_part1 = load_eit_data( + draeger_file2, + vendor="draeger", + sample_frequency=20, + max_frames=cutoff, + label="draeger_part_1", + ) + draeger2_part2 = load_eit_data( + draeger_file2, + vendor="draeger", + sample_frequency=20, + first_frame=cutoff, + label="draeger_part_2", + ) assert len(draeger2_part1) == cutoff assert len(draeger2_part2) == len(draeger2) - cutoff @@ -109,24 +135,19 @@ def test_load_partial( def test_illegal_first_frame(): for ff in [0.5, -1, "fdw", 1e12]: with pytest.raises((TypeError, ValueError)): - _ = load_eit_data(draeger_file1, "draeger", first_frame=ff) + _ = load_eit_data(draeger_file1, vendor="draeger", sample_frequency=20, first_frame=ff) for ff2 in [0, 0.0, 1.0, None]: - _ = load_eit_data(draeger_file1, "draeger", first_frame=ff2) + _ = load_eit_data(draeger_file1, vendor="draeger", sample_frequency=20, first_frame=ff2) def test_max_frames_too_large(): with pytest.warns(): - _ = load_eit_data(draeger_file1, "draeger", max_frames=1e12) - - -def test_sample_frequency_unset(): - loaded_draeger = load_eit_data(draeger_file1, "draeger", sample_frequency=None) - assert loaded_draeger.eit_data["raw"].sample_frequency == DRAEGER_SAMPLE_FREQUENCY + _ = load_eit_data(draeger_file1, vendor="draeger", sample_frequency=20, max_frames=1e12) def test_event_on_first_frame(draeger2: Sequence): - draeger3 = load_eit_data(draeger_file3, vendor="draeger") + draeger3 = load_eit_data(draeger_file3, vendor="draeger", sample_frequency=20) draeger3_events = draeger3.sparse_data["events_(draeger)"] assert draeger3_events == draeger2.sparse_data["events_(draeger)"] assert draeger3_events.time[0] == draeger3.eit_data["raw"].time[0]