-
Notifications
You must be signed in to change notification settings - Fork 190
129 lines (108 loc) · 4.23 KB
/
all-tests.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
name: Complete tests
on:
workflow_dispatch:
schedule:
- cron: "0 12 * * 0" # Weekly on Sunday at noon UTC
pull_request:
types: [synchronize, opened, reopened]
branches:
- main
env:
KACHERY_CLOUD_CLIENT_ID: ${{ secrets.KACHERY_CLOUD_CLIENT_ID }}
KACHERY_CLOUD_PRIVATE_KEY: ${{ secrets.KACHERY_CLOUD_PRIVATE_KEY }}
concurrency: # Cancel previous workflows on the same pull request
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
run:
name: ${{ matrix.os }} Python ${{ matrix.python-version }}
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
python-version: ["3.9", "3.12"] # Lower and higher versions we support
os: [macos-13, windows-latest, ubuntu-latest]
steps:
- uses: actions/checkout@v4
- name: Setup Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
# cache: 'pip' # caching pip dependencies
- name: Get current hash (SHA) of the ephy_testing_data repo
id: repo_hash
run: |
echo "dataset_hash=$(git ls-remote https://gin.g-node.org/NeuralEnsemble/ephy_testing_data.git HEAD | cut -f1)"
echo "dataset_hash=$(git ls-remote https://gin.g-node.org/NeuralEnsemble/ephy_testing_data.git HEAD | cut -f1)" >> $GITHUB_OUTPUT
shell: bash
- name: Cache datasets
id: cache-datasets
uses: actions/cache/restore@v4
with:
path: ~/spikeinterface_datasets
key: ${{ runner.os }}-datasets-${{ steps.repo_hash.outputs.dataset_hash }}
restore-keys: ${{ runner.os }}-datasets
- name: Install packages
run: |
git config --global user.email "[email protected]"
git config --global user.name "CI Almighty"
pip install -e .[test,extractors,streaming_extractors,full]
pip install tabulate
shell: bash
- name: Installad datalad
run: |
pip install datalad-installer
if [ ${{ runner.os }} = 'Linux' ]; then
datalad-installer --sudo ok git-annex --method datalad/packages
elif [ ${{ runner.os }} = 'macOS' ]; then
datalad-installer --sudo ok git-annex --method brew
elif [ ${{ runner.os }} = 'Windows' ]; then
datalad-installer --sudo ok git-annex --method datalad/git-annex:release
fi
pip install datalad
git config --global filter.annex.process "git-annex filter-process" # recommended for efficiency
shell: bash
- name: Set execute permissions on run_tests.sh
run: chmod +x .github/run_tests.sh
shell: bash
- name: Test core
run: pytest -m "core"
shell: bash
- name: Test extractors
env:
HDF5_PLUGIN_PATH: ${{ github.workspace }}/hdf5_plugin_path_maxwell
run: pytest -m "extractors"
shell: bash
- name: Test preprocessing
run: ./.github/run_tests.sh "preprocessing and not deepinterpolation" --no-virtual-env
shell: bash
- name: Test postprocessing
run: ./.github/run_tests.sh postprocessing --no-virtual-env
shell: bash
- name: Test quality metrics
run: ./.github/run_tests.sh qualitymetrics --no-virtual-env
shell: bash
- name: Test comparison
run: ./.github/run_tests.sh comparison --no-virtual-env
shell: bash
- name: Test core sorters
run: ./.github/run_tests.sh sorters --no-virtual-env
shell: bash
- name: Test internal sorters
run: ./.github/run_tests.sh sorters_internal --no-virtual-env
shell: bash
- name: Test curation
run: ./.github/run_tests.sh curation --no-virtual-env
shell: bash
- name: Test widgets
run: ./.github/run_tests.sh widgets --no-virtual-env
shell: bash
- name: Test exporters
run: ./.github/run_tests.sh exporters --no-virtual-env
shell: bash
- name: Test sortingcomponents
run: ./.github/run_tests.sh sortingcomponents --no-virtual-env
shell: bash
- name: Test generation
run: ./.github/run_tests.sh generation --no-virtual-env
shell: bash