diff --git a/.github/styler.R b/.github/styler.R new file mode 100755 index 00000000..afe58a6b --- /dev/null +++ b/.github/styler.R @@ -0,0 +1,36 @@ +#!/usr/bin/env Rscript + +library("argparse") +library("styler") + +parser <- ArgumentParser(description = "Call styler") +parser$add_argument("dir", + metavar = "DIR", type = "character", + help = "File to parse" +) +parser$add_argument("--dry", + choices = c("off", "on"), default = "on" +) +args <- parser$parse_args() + +file_info <- file.info(args$dir) +is_directory <- file_info$isdir + +if (is_directory) { + captured_output <- capture.output({ + result <- style_dir(args$dir, indent_by = 4, dry = args$dry, recursive = TRUE) + }) +} else { + captured_output <- capture.output({ + result <- style_file(args$dir, indent_by = 4, dry = args$dry) + }) +} + +n <- nrow(subset(result, changed == TRUE)) +if (n > 0) { + if (args$dry == "off") { + print(paste("Changed", n, "files")) + } else { + stop(paste("Linting failed for", n, "files")) + } +} \ No newline at end of file diff --git a/.github/workflows/deploy.yaml b/.github/workflows/deploy.yaml deleted file mode 100644 index 96a5304a..00000000 --- a/.github/workflows/deploy.yaml +++ /dev/null @@ -1,185 +0,0 @@ -name: Galaxy Tool Deployment - -on: - push: - branches: - - develop - -env: - GALAXY_FORK: galaxyproject - GALAXY_BRANCH: release_23.1 - MAX_CHUNKS: 4 - -jobs: - # This is a setup modified from that at - # https://github.com/galaxyproject/tools-iuc/blob/master/.github/workflows/pr.yaml - # The setup job does two things: - # 1. cache the pip cache and .planemo - # 2. determine the list of changed repositories - # it produces one artifact which contains - # - a file with the latest SHA from the chosen branch of the Galaxy repo - # - a file containing the list of changed repositories - # which are needed in subsequent steps. - setup: - name: Setup cache and Planemo - runs-on: ubuntu-latest - outputs: - galaxy_head_sha: ${{ steps.get-galaxy-sha.outputs.galaxy_head_sha }} - strategy: - matrix: - python-version: ['3.7'] - steps: - - name: Print github context properties - run: | - echo 'event: ${{ github.event_name }}' - echo 'sha: ${{ github.sha }}' - echo 'ref: ${{ github.ref }}' - echo 'head_ref: ${{ github.head_ref }}' - echo 'base_ref: ${{ github.base_ref }}' - echo 'event.before: ${{ github.event.before }}' - echo 'event.after: ${{ github.event.after }}' - - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python-version }} - - name: Determine latest commit in the Galaxy repo - id: get-galaxy-sha - run: echo "::set-output name=galaxy_head_sha::$(git ls-remote https://github.com/${{ env.GALAXY_FORK }}/galaxy refs/heads/${{ env.GALAXY_BRANCH }} | cut -f1)" - - name: Cache .cache/pip - uses: actions/cache@v3 - id: cache-pip - with: - path: ~/.cache/pip - key: pip_cache_py_${{ matrix.python-version }}_gxy_${{ steps.get-galaxy-sha.outputs.galaxy_head_sha }} - - name: Cache .planemo - uses: actions/cache@v3 - id: cache-planemo - with: - path: ~/.planemo - key: planemo_cache_py_${{ matrix.python-version }}_gxy_${{ steps.get-galaxy-sha.outputs.galaxy_head_sha }} - # Install the `wheel` package so that when installing other packages which - # are not available as wheels, pip will build a wheel for them, which can be cached. - - name: Install wheel - run: pip install wheel - - name: Install Planemo - run: pip install planemo - - uses: actions/checkout@v3 - with: - fetch-depth: 0 - - name: Fake a Planemo run to update cache - uses: galaxyproject/planemo-ci-action@v1 - id: discover - with: - create-cache: ${{ steps.cache-pip.outputs.cache-hit != 'true' || steps.cache-planemo.outputs.cache-hit != 'true' }} - galaxy-fork: ${{ env.GALAXY_FORK }} - galaxy-branch: ${{ env.GALAXY_BRANCH }} - max-chunks: ${{ env.MAX_CHUNKS }} - python-version: ${{ matrix.python-version }} - - find_changed: - name: Determine changed repositories - needs: [setup] - runs-on: ubuntu-latest - strategy: - fail-fast: false - matrix: - python-version: ['3.7'] - outputs: - commit_range: ${{ steps.get-commit-range.outputs.commit_range }} - nchunks: ${{ steps.get-chunks.outputs.nchunks }} - chunk_list: ${{ steps.get-chunks.outputs.chunk_list }} - steps: - - uses: actions/checkout@v3 - with: - fetch-depth: 0 - # The range of commits to check for changes is: - # - for events on the develop branch we compare against the sha before the event - # (note that this does not work for feature branch events since we want all - # commits on the feature branch and not just the commits of the last event) - - name: Set commit range (push to the develop branch, e.g. merge) - run: echo "COMMIT_RANGE=${{ github.event.before }}.." >> $GITHUB_ENV - - id: get-commit-range - run: echo "::set-output name=commit_range::$COMMIT_RANGE" - - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python-version }} - - name: Cache .cache/pip - uses: actions/cache@v3 - id: cache-pip - with: - path: ~/.cache/pip - key: pip_cache_py_${{ matrix.python-version }}_gxy_${{ needs.setup.outputs.galaxy_head_sha }} - - name: Install Planemo - run: pip install planemo - - name: Planemo ci_find_repos - run: planemo ci_find_repos --changed_in_commit_range $COMMIT_RANGE --exclude packages --exclude deprecated --output changed_repositories.list - - name: Show repo list - run: cat changed_repositories.list - - uses: actions/upload-artifact@v3 - with: - name: Workflow artifacts - path: changed_repositories.list - - name: Planemo ci_find_tools for the changed repos - run: | - touch changed_tools.list - if [ -s changed_repositories.list ]; then - planemo ci_find_tools --output changed_tools.list $(cat changed_repositories.list) - fi - - name: Show tool list - run: cat changed_tools.list - - name: Compute chunks - id: get-chunks - run: | - nchunks=$(wc -l < changed_tools.list) - if [ "$nchunks" -gt "$MAX_CHUNKS" ]; then - nchunks=$MAX_CHUNKS - elif [ "$nchunks" -eq 0 ]; then - nchunks=1 - fi - echo "::set-output name=nchunks::$nchunks" - echo "::set-output name=chunk_list::[$(seq -s ", " 0 $(($nchunks - 1)))]" - - name: Show chunks - run: | - echo 'Using ${{ steps.get-chunks.outputs.nchunks }} chunks (${{ steps.get-chunks.outputs.chunk_list }})' - - # deploy the tools to the toolshed - deploy: - name: Deploy - needs: [setup, find_changed] - strategy: - matrix: - python-version: ['3.7'] - runs-on: ubuntu-latest - if: github.ref == 'refs/heads/develop' && github.repository_owner == 'ebi-gene-expression-group' - steps: - - uses: actions/checkout@v3 - with: - fetch-depth: 1 - - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python-version }} - - uses: actions/download-artifact@v2 - with: - name: Workflow artifacts - path: ../workflow_artifacts/ - - name: Cache .cache/pip - uses: actions/cache@v3 - id: cache-pip - with: - path: ~/.cache/pip - key: pip_cache_py_${{ matrix.python-version }}_gxy_${{ needs.setup.outputs.galaxy_head_sha }} - - name: Install Planemo - run: pip install planemo - - name: Deploy on toolshed - env: - SHED_KEY: ${{ secrets.TS_API_KEY }} - run: | - while read -r DIR; do - if [ -d "$DIR/test-data" ]; then - max_test_file_size=$(du $DIR/test-data/$(ls -S $DIR/test-data | head -n 1) | awk '{print $1}') - if [ $max_test_file_size -gt 10000 ]; then - echo "Deleting test files, found one > 10Mb" 1>&2 - rm -rf $DIR/test-data - fi - fi - planemo shed_update --shed_target toolshed --shed_key "${{ env.SHED_KEY }}" --force_repository_creation "$DIR" || exit 1; - done < ../workflow_artifacts/changed_repositories.list diff --git a/.github/workflows/pr.yaml b/.github/workflows/pr.yaml index c20f254e..a7251791 100644 --- a/.github/workflows/pr.yaml +++ b/.github/workflows/pr.yaml @@ -1,9 +1,28 @@ name: Galaxy Tool Linting and Tests for push and PR -on: [pull_request] +on: + pull_request: + paths-ignore: + - 'deprecated/**' + - 'docs/**' + - '*' + push: + branches: + - main + - master + paths-ignore: + - 'deprecated/**' + - 'docs/**' + - '*' env: GALAXY_FORK: galaxyproject - GALAXY_BRANCH: release_23.1 + GALAXY_BRANCH: release_24.0 MAX_CHUNKS: 4 + MAX_FILE_SIZE: 1M +concurrency: + # Group runs by PR, but keep runs on the default branch separate + # because we do not want to cancel ToolShed uploads + group: pr-${{ github.ref == 'refs/heads/main' && github.run_number || github.ref }} + cancel-in-progress: true jobs: # the setup job does two things: # 1. cache the pip cache and .planemo @@ -21,9 +40,10 @@ jobs: tool-list: ${{ steps.discover.outputs.tool-list }} chunk-count: ${{ steps.discover.outputs.chunk-count }} chunk-list: ${{ steps.discover.outputs.chunk-list }} + commit-range: ${{ steps.discover.outputs.commit-range }} strategy: matrix: - python-version: ['3.7'] + python-version: ['3.11'] steps: - name: Print github context properties run: | @@ -36,29 +56,29 @@ jobs: echo 'event.after: ${{ github.event.after }}' - name: Determine latest commit in the Galaxy repo id: get-galaxy-sha - run: echo "::set-output name=galaxy-head-sha::$(git ls-remote https://github.com/${{ env.GALAXY_FORK }}/galaxy refs/heads/${{ env.GALAXY_BRANCH }} | cut -f1)" - - uses: actions/setup-python@v4 + run: echo "galaxy-head-sha=$(git ls-remote https://github.com/${{ env.GALAXY_FORK }}/galaxy refs/heads/${{ env.GALAXY_BRANCH }} | cut -f1)" >> $GITHUB_OUTPUT + - uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Cache .cache/pip - uses: actions/cache@v3 + uses: actions/cache@v4 id: cache-pip with: path: ~/.cache/pip - key: pip_cache_py_${{ matrix.python-version }}_gxy_${{ steps.get-galaxy-sha.outputs.galaxy-head-sha }}-${{ secrets.CACHE_VERSION }} + key: pip_cache_py_${{ matrix.python-version }}_gxy_${{ steps.get-galaxy-sha.outputs.galaxy-head-sha }} - name: Cache .planemo - uses: actions/cache@v3 + uses: actions/cache@v4 id: cache-planemo with: path: ~/.planemo - key: planemo_cache_py_${{ matrix.python-version }}_gxy_${{ steps.get-galaxy-sha.outputs.galaxy-head-sha }}-${{ secrets.CACHE_VERSION }} + key: planemo_cache_py_${{ matrix.python-version }}_gxy_${{ steps.get-galaxy-sha.outputs.galaxy-head-sha }} # Install the `wheel` package so that when installing other packages which # are not available as wheels, pip will build a wheel for them, which can be cached. - name: Install wheel run: pip install wheel - - name: Install jq - run: sudo apt-get install -yq jq libjq1 - - uses: actions/checkout@v3 + - name: Install flake8 + run: pip install flake8 flake8-import-order + - uses: actions/checkout@v4 with: fetch-depth: 0 - name: Fake a Planemo run to update cache and determine commit range, repositories, and chunks @@ -84,50 +104,159 @@ jobs: lint: name: Lint tool-list needs: setup - if: needs.setup.outputs.repository-list != '' + if: ${{ needs.setup.outputs.repository-list != '' || needs.setup.outputs.tool-list != '' }} runs-on: ubuntu-latest strategy: fail-fast: false matrix: - python-version: ['3.7'] + python-version: ['3.11'] steps: - # checkout the repository - # and use it as the current working directory - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 1 - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Cache .cache/pip - uses: actions/cache@v3 + uses: actions/cache@v4 id: cache-pip with: path: ~/.cache/pip - key: pip_cache_py_${{ matrix.python-version }}_gxy_${{ needs.setup.outputs.galaxy-head-sha }}-${{ secrets.CACHE_VERSION }} - - name: Install jq - run: sudo apt-get install -yq jq libjq1 + key: pip_cache_py_${{ matrix.python-version }}_gxy_${{ needs.setup.outputs.galaxy-head-sha }} + + - name: Set fail level for pull request + if: ${{ github.event_name == 'pull_request' }} + run: + echo "FAIL_LEVEL=error" >> "$GITHUB_ENV" + - name: Set fail level for merge + if: ${{ github.event_name != 'pull_request' }} + run: + echo "FAIL_LEVEL=error" >> "$GITHUB_ENV" - name: Planemo lint uses: galaxyproject/planemo-ci-action@v1 id: lint with: mode: lint + fail-level: ${{ env.FAIL_LEVEL }} repository-list: ${{ needs.setup.outputs.repository-list }} tool-list: ${{ needs.setup.outputs.tool-list }} + - uses: actions/upload-artifact@v4 + if: ${{ failure() }} + with: + name: 'Tool linting output' + path: lint_report.txt + + # flake8 of Python scripts in the changed repositories + flake8: + name: Lint Python scripts + needs: setup + if: ${{ github.event_name == 'pull_request' && needs.setup.outputs.repository-list != '' }} + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + python-version: ['3.11'] + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 1 + - uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + - name: Cache .cache/pip + uses: actions/cache@v4 + id: cache-pip + with: + path: ~/.cache/pip + key: pip_cache_py_${{ matrix.python-version }}_gxy_${{ needs.setup.outputs.galaxy-head-sha }} + - name: Install flake8 + run: pip install flake8 flake8-import-order + - name: Flake8 + run: echo '${{ needs.setup.outputs.repository-list }}' | xargs -d '\n' flake8 --output-file pylint_report.txt --tee + - uses: actions/upload-artifact@v4 + if: ${{ failure() }} + with: + name: 'Python linting output' + path: pylint_report.txt + + lintr: + name: Lint R scripts + needs: setup + if: ${{ needs.setup.outputs.repository-list != '' }} + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [ubuntu-20.04] + r-version: ['release'] + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 1 + - uses: r-lib/actions/setup-r@v2 + with: + r-version: ${{ matrix.r-version }} + - name: Cache R packages + uses: actions/cache@v4 + with: + path: ${{ env.R_LIBS_USER }} + key: r_cache_${{ matrix.os }}_${{ matrix.r-version }} + - name: Install packages + uses: r-lib/actions/setup-r-dependencies@v2 + with: + packages: | + any::argparse + any::styler + - name: lintr + run: | + set -eo pipefail + echo '${{ needs.setup.outputs.repository-list }}' | xargs -d '\n' -n 1 ./.github/styler.R --dry off + git status + git diff --exit-code | tee rlint_report.txt + - uses: actions/upload-artifact@v4 + if: ${{ failure() }} + with: + name: 'R linting output' + path: rlint_report.txt + + file_sizes: + name: Check file sizes + needs: setup + if: ${{ github.event_name == 'pull_request' && needs.setup.outputs.repository-list != '' }} + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Check file sizes + run: | + touch file_size_report.txt + git diff --diff-filter=d --name-only ${{ needs.setup.outputs.commit-range }} > git.diff + while read line; do + find "$line" -type f -size +${{ env.MAX_FILE_SIZE }} >> file_size_report.txt + done < git.diff + if [[ -s file_size_report.txt ]]; then + echo "Files larger than ${{ env.MAX_FILE_SIZE }} found" + cat file_size_report.txt + exit 1 + fi + - uses: actions/upload-artifact@v4 + if: ${{ failure() }} + with: + name: 'File size report' + path: file_size_report.txt # Planemo test the changed repositories, each chunk creates an artifact # containing HTML and JSON reports for the executed tests test: name: Test tools - # This job runs on Linux - runs-on: ubuntu-latest needs: setup - if: needs.setup.outputs.repository-list != '' + if: ${{ needs.setup.outputs.repository-list != '' }} + runs-on: ubuntu-latest strategy: fail-fast: false matrix: chunk: ${{ fromJson(needs.setup.outputs.chunk-list) }} - python-version: ['3.7'] + python-version: ['3.11'] services: postgres: image: postgres:11 @@ -138,28 +267,29 @@ jobs: ports: - 5432:5432 steps: - # checkout the repository - # and use it as the current working directory - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 1 - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Cache .cache/pip - uses: actions/cache@v3 + uses: actions/cache@v4 id: cache-pip with: path: ~/.cache/pip - key: pip_cache_py_${{ matrix.python-version }}_gxy_${{ needs.setup.outputs.galaxy-head-sha }}-${{ secrets.CACHE_VERSION }} + key: pip_cache_py_${{ matrix.python-version }}_gxy_${{ needs.setup.outputs.galaxy-head-sha }} - name: Cache .planemo - uses: actions/cache@v3 + uses: actions/cache@v4 id: cache-planemo with: path: ~/.planemo - key: planemo_cache_py_${{ matrix.python-version }}_gxy_${{ needs.setup.outputs.galaxy-head-sha }}-${{ secrets.CACHE_VERSION }} - - name: Install jq - run: sudo apt-get install -yq jq libjq1 + key: planemo_cache_py_${{ matrix.python-version }}_gxy_${{ needs.setup.outputs.galaxy-head-sha }} + - name: Get number of CPU cores + uses: SimenB/github-actions-cpu-cores@v2 + id: cpu-cores + - name: Clean dotnet folder for space + run: rm -Rf /usr/share/dotnet - name: Get test-data for changed repos run: | for repo in ${{ needs.setup.outputs.repository-list }}; do @@ -181,8 +311,10 @@ jobs: galaxy-branch: ${{ env.GALAXY_BRANCH }} chunk: ${{ matrix.chunk }} chunk-count: ${{ needs.setup.outputs.chunk-count }} - html-report: true - - uses: actions/upload-artifact@v3 + galaxy-slots: ${{ steps.cpu-cores.outputs.count }} + # Limit each test to 15 minutes + test_timeout: 1800 + - uses: actions/upload-artifact@v4 with: name: 'Tool test output ${{ matrix.chunk }}' path: upload @@ -195,36 +327,130 @@ jobs: combine_outputs: name: Combine chunked test results needs: [setup, test] + if: ${{ always() && needs.setup.outputs.repository-list != '' }} + runs-on: ubuntu-latest strategy: matrix: - python-version: ['3.7'] - # This job runs on Linux - runs-on: ubuntu-latest + python-version: ['3.11'] steps: - - uses: actions/download-artifact@v3 + - uses: actions/download-artifact@v4 with: path: artifacts - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Cache .cache/pip - uses: actions/cache@v3 + uses: actions/cache@v4 id: cache-pip with: path: ~/.cache/pip - key: pip_cache_py_${{ matrix.python-version }}_gxy_${{ needs.setup.outputs.galaxy-head-sha }}-${{ secrets.CACHE_VERSION }} + key: pip_cache_py_${{ matrix.python-version }}_gxy_${{ needs.setup.outputs.galaxy-head-sha }} - name: Combine outputs uses: galaxyproject/planemo-ci-action@v1 id: combine with: mode: combine html-report: true - - uses: actions/upload-artifact@v3 + markdown-report: true + - uses: actions/upload-artifact@v4 with: name: 'All tool test results' path: upload + - run: cat upload/tool_test_output.md >> $GITHUB_STEP_SUMMARY - name: Check outputs uses: galaxyproject/planemo-ci-action@v1 id: check with: mode: check + - name: Check if all test chunks succeeded + run: | + NFILES=$(ls artifacts/ | grep "Tool test output" | wc -l) + if [[ "${{ needs.setup.outputs.chunk-count }}" != "$NFILES" ]]; then + exit 1 + fi + + # deploy the tools to the toolsheds (first TTS for testing) + deploy: + name: Deploy + needs: [setup, lint, combine_outputs] + if: ${{ github.ref == 'refs/heads/main' && github.repository_owner == 'ebi-gene-expression-group' }} + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ['3.11'] + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 1 + - uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + - name: Cache .cache/pip + uses: actions/cache@v4 + id: cache-pip + with: + path: ~/.cache/pip + key: pip_cache_py_${{ matrix.python-version }}_gxy_${{ needs.setup.outputs.galaxy-head-sha }} + - name: Delete large test files + run: | + echo "${{ needs.setup.outputs.repository-list }}" > changed_repositories.list + while read -r DIR; do + if [ -d "$DIR/test-data" ]; then + max_test_file_size=$(du $DIR/test-data/$(ls -S $DIR/test-data | head -n 1) | awk '{print $1}') + if [ $max_test_file_size -gt 10000 ]; then + echo "Deleting test files, found one > 10Mb" 1>&2 + rm -rf $DIR/test-data + fi + fi + done < changed_repositories.list + - name: Deploy on toolshed + uses: galaxyproject/planemo-ci-action@v1 + with: + mode: deploy + repository-list: ${{ needs.setup.outputs.repository-list }} + shed-target: toolshed + shed-key: ${{ secrets.TS_API_KEY }} + + deploy-report: + name: Report deploy status + needs: [deploy] + if: ${{ always() && needs.deploy.result != 'success' && github.ref == 'refs/heads/main' && github.repository_owner == 'ebi-gene-expression-group' }} + runs-on: ubuntu-latest + steps: + # report to the PR if deployment failed + - name: Get PR object + uses: 8BitJonny/gh-get-current-pr@2.2.0 + id: getpr + with: + sha: ${{ github.event.after }} + - name: Create comment + uses: peter-evans/create-or-update-comment@v4 + with: + token: ${{ secrets.PAT }} + issue-number: ${{ steps.getpr.outputs.number }} + body: | + Attention: deployment ${{ needs.deploy.result }}! + + https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }} + + determine-success: + name: Check workflow success + needs: [setup, lint, flake8, lintr, file_sizes, combine_outputs] + if: ${{ always() && github.ref != 'refs/heads/main' }} + runs-on: ubuntu-latest + steps: + - name: Check tool lint status + if: ${{ needs.lint.result != 'success' && needs.flake8.result != 'skipped' }} + run: exit 1 + - name: Indicate Python script lint status + if: ${{ needs.flake8.result != 'success' && needs.flake8.result != 'skipped' }} + run: exit 1 + - name: Indicate R script lint status + if: ${{ needs.lintr.result != 'success' && needs.lintr.result != 'skipped' }} + run: exit 1 + - name: Indicate file size check status + if: ${{ needs.file_sizes.result != 'success' && needs.file_sizes.result != 'skipped' }} + run: exit 1 + - name: Check tool test status + if: ${{ needs.combine_outputs.result != 'success' && needs.combine_outputs.result != 'skipped' }} + run: exit 1 diff --git a/tools/tertiary-analysis/scanpy/scanpy-find-variable-genes.xml b/tools/tertiary-analysis/scanpy/scanpy-find-variable-genes.xml index 161fb0c3..a849fb7e 100644 --- a/tools/tertiary-analysis/scanpy/scanpy-find-variable-genes.xml +++ b/tools/tertiary-analysis/scanpy/scanpy-find-variable-genes.xml @@ -1,5 +1,5 @@ - + based on normalised dispersion of expression scanpy_macros2.xml @@ -25,6 +25,12 @@ PYTHONIOENCODING=utf-8 scanpy-find-variable-genes ${filter} #if $batch_key --batch-key ${batch_key} +#end if +#if $never_hvg + --never-hv-genes-file '${never_hvg}' +#end if +#if $always_hvg + --always-hv-genes-file '${always_hvg}' #end if @INPUT_OPTS@ @OUTPUT_OPTS@ @@ -54,6 +60,8 @@ PYTHONIOENCODING=utf-8 scanpy-find-variable-genes + + @@ -77,6 +85,41 @@ PYTHONIOENCODING=utf-8 scanpy-find-variable-genes + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +