Skip to content

Commit

Permalink
Merge pull request #2447 from SpikeInterface/pre-commit-ci-update-config
Browse files Browse the repository at this point in the history
[pre-commit.ci] pre-commit autoupdate
  • Loading branch information
alejoe91 authored Jan 30, 2024
2 parents fcb7ce4 + 309a840 commit 15d3899
Show file tree
Hide file tree
Showing 25 changed files with 34 additions and 17 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ repos:
- id: end-of-file-fixer
- id: trailing-whitespace
- repo: https://github.com/psf/black
rev: 23.12.1
rev: 24.1.1
hooks:
- id: black
files: ^src/
1 change: 1 addition & 0 deletions src/spikeinterface/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
"""

import importlib.metadata

__version__ = importlib.metadata.version("spikeinterface")
Expand Down
1 change: 1 addition & 0 deletions src/spikeinterface/core/datasets.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""
Some simple function to retrieve public datasets with datalad
"""

from __future__ import annotations

from pathlib import Path
Expand Down
1 change: 1 addition & 0 deletions src/spikeinterface/core/job_tools.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""
Some utils to handle parallel jobs on top of job and/or loky
"""

from __future__ import annotations
from pathlib import Path
import numpy as np
Expand Down
1 change: 1 addition & 0 deletions src/spikeinterface/core/node_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
* when peaks are already detected and reduced with `select_peaks()`
* on a sorting object
"""

from __future__ import annotations
from typing import Optional, Type

Expand Down
1 change: 1 addition & 0 deletions src/spikeinterface/core/tests/test_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
test for BaseRecording are done with BinaryRecordingExtractor.
but check only for BaseRecording general methods.
"""

from typing import Sequence
from spikeinterface.core.base import BaseExtractor
from spikeinterface.core import generate_recording, concatenate_recordings
Expand Down
1 change: 1 addition & 0 deletions src/spikeinterface/core/tests/test_baserecording.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
test for BaseRecording are done with BinaryRecordingExtractor.
but check only for BaseRecording general methods.
"""

import json
import pickle
from pathlib import Path
Expand Down
1 change: 1 addition & 0 deletions src/spikeinterface/core/tests/test_basesnippets.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
test for BaseSnippets are done with NumpySnippets.
but check only for BaseRecording general methods.
"""

from pathlib import Path
import pytest
import numpy as np
Expand Down
1 change: 1 addition & 0 deletions src/spikeinterface/core/tests/test_basesorting.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
test for BaseSorting are done with NpzSortingExtractor.
but check only for BaseRecording general methods.
"""

import shutil
from pathlib import Path

Expand Down
1 change: 1 addition & 0 deletions src/spikeinterface/core/waveform_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
2. extract and distribute snippets into buffers (optionally in parallel)
"""

from __future__ import annotations
from pathlib import Path

Expand Down
1 change: 1 addition & 0 deletions src/spikeinterface/full.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
# this imports everything in a flat module
import spieinterface.full as si
"""

import importlib.metadata

__version__ = importlib.metadata.version("spikeinterface")
Expand Down
1 change: 1 addition & 0 deletions src/spikeinterface/postprocessing/template_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
https://github.com/AllenInstitute/ecephys_spike_sorting/blob/master/ecephys_spike_sorting/modules/mean_waveforms/waveform_metrics.py
22/04/2020
"""

from __future__ import annotations

import numpy as np
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
test for BaseSnippets are done with NumpySnippets.
but check only for BaseRecording general methods.
"""

from pathlib import Path
import pytest
import numpy as np
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Classes and functions for computing multiple quality metrics."""

import warnings
from copy import deepcopy

Expand Down
1 change: 1 addition & 0 deletions src/spikeinterface/sorters/basesorter.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""
base class for sorters implementation.
"""

import time
import copy
from pathlib import Path
Expand Down
2 changes: 1 addition & 1 deletion src/spikeinterface/sorters/external/hdsort.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,7 @@ def _generate_configs_file(sorter_output_folder, params, file_name, file_format)
P["featureExtraction"] = {"nDims": float(params["n_pc_dims"])} # 6
P["clustering"] = {
"maxSpikes": 50000.0, # dont align spikes you dont cluster..
"meanShiftBandWidthFactor": 1.8
"meanShiftBandWidthFactor": 1.8,
# 'meanShiftBandWidth': sqrt(1.8*6) # todo: check this!
}

Expand Down
12 changes: 6 additions & 6 deletions src/spikeinterface/sorters/external/kilosort.py
Original file line number Diff line number Diff line change
Expand Up @@ -161,13 +161,13 @@ def _get_specific_options(cls, ops, params):
ops["nNeigh"] = 16.0 # visualization only (Phy): number of neighboring templates to retain projections of (16)

# options for channel whitening
ops[
"whitening"
] = "full" # type of whitening (default 'full', for 'noSpikes' set options for spike detection below)
ops["whitening"] = (
"full" # type of whitening (default 'full', for 'noSpikes' set options for spike detection below)
)
ops["nSkipCov"] = 1.0 # compute whitening matrix from every N-th batch (1)
ops[
"whiteningRange"
] = 32.0 # how many channels to whiten together (Inf for whole probe whitening, should be fine if Nchan<=32)
ops["whiteningRange"] = (
32.0 # how many channels to whiten together (Inf for whole probe whitening, should be fine if Nchan<=32)
)

# ops['criterionNoiseChannels'] = 0.2 # fraction of "noise" templates allowed to span all channel groups (see createChannelMapFile for more info).

Expand Down
1 change: 1 addition & 0 deletions src/spikeinterface/sorters/launcher.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""
Utils functions to launch several sorter on several recording in parallel or not.
"""

from pathlib import Path
import shutil
import numpy as np
Expand Down
2 changes: 1 addition & 1 deletion src/spikeinterface/sortingcomponents/clustering/merge.py
Original file line number Diff line number Diff line change
Expand Up @@ -255,7 +255,7 @@ def find_merge_pairs(
radius_um=70,
method="project_distribution",
method_kwargs={},
**job_kwargs
**job_kwargs,
# n_jobs=1,
# mp_context="fork",
# max_threads_per_process=1,
Expand Down
12 changes: 6 additions & 6 deletions src/spikeinterface/sortingcomponents/clustering/sliding_nn.py
Original file line number Diff line number Diff line change
Expand Up @@ -313,13 +313,13 @@ def main_function(cls, recording, peaks, params):
peaks_in_chunk_idx <= end_last
]
# repeat for cluster probabilities
cluster_probabilities[
peaks_in_chunk_idx[peaks_in_chunk_idx > end_last], 0
] = clusterer.probabilities_[peaks_in_chunk_idx > end_last]
cluster_probabilities[peaks_in_chunk_idx[peaks_in_chunk_idx > end_last], 0] = (
clusterer.probabilities_[peaks_in_chunk_idx > end_last]
)
# put overlapping neighbors from previous in second row
cluster_probabilities[
peaks_in_chunk_idx[peaks_in_chunk_idx <= end_last], 1
] = clusterer.probabilities_[peaks_in_chunk_idx <= end_last]
cluster_probabilities[peaks_in_chunk_idx[peaks_in_chunk_idx <= end_last], 1] = (
clusterer.probabilities_[peaks_in_chunk_idx <= end_last]
)

# TODO retrieve templates for each cluster

Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Sorting components: peak waveform features."""

import numpy as np

from spikeinterface.core.job_tools import fix_job_kwargs
Expand Down
1 change: 0 additions & 1 deletion src/spikeinterface/sortingcomponents/matching/circus.py
Original file line number Diff line number Diff line change
Expand Up @@ -887,7 +887,6 @@ def main_function(cls, traces, d):


class CircusPeeler(BaseTemplateMatchingEngine):

"""
Greedy Template-matching ported from the Spyking Circus sorter
Expand Down
1 change: 1 addition & 0 deletions src/spikeinterface/sortingcomponents/peak_detection.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Sorting components: peak detection."""

import copy
from typing import Tuple, Union, List, Dict, Any, Optional, Callable

Expand Down
1 change: 1 addition & 0 deletions src/spikeinterface/sortingcomponents/peak_localization.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Sorting components: peak localization."""

import numpy as np
from spikeinterface.core.job_tools import _shared_job_kwargs_doc, split_job_kwargs, fix_job_kwargs

Expand Down
2 changes: 1 addition & 1 deletion src/spikeinterface/widgets/utils_ipywidgets.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ def __init__(self, durations, sampling_frequency, time_range=(0, 1.0), **kwargs)
min=0.01,
max=30.0,
description="win (s)",
layout=W.Layout(width="auto")
layout=W.Layout(width="auto"),
# layout=W.Layout(width=f'10%')
)
self.window_sizer.observe(self.win_size_changed, names="value", type="change")
Expand Down

0 comments on commit 15d3899

Please sign in to comment.