From 507b6b3cf19d0f10069e2415f134dca7fb709b47 Mon Sep 17 00:00:00 2001 From: Charlie Windolf Date: Sun, 3 Nov 2024 19:16:26 -0500 Subject: [PATCH 01/45] Address time bin issue arising in LFP-based reg, which AP-based reg doesn't trigger --- .../motion/motion_interpolation.py | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py b/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py index a5e6ded519..975f43919d 100644 --- a/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py +++ b/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py @@ -3,7 +3,8 @@ import numpy as np from spikeinterface.core.core_tools import define_function_from_class from spikeinterface.preprocessing import get_spatial_interpolation_kernel -from spikeinterface.preprocessing.basepreprocessor import BasePreprocessor, BasePreprocessorSegment +from spikeinterface.preprocessing.basepreprocessor import ( + BasePreprocessor, BasePreprocessorSegment) from spikeinterface.preprocessing.filter import fix_dtype @@ -122,14 +123,18 @@ def interpolate_motion_on_traces( time_bins = interpolation_time_bin_centers_s if time_bins is None: time_bins = motion.temporal_bins_s[segment_index] + + # nearest interpolation bin: + # seachsorted(b, t, side="right") == i means that b[i-1] <= t < b[i] + # hence the -1. doing it with "left" is not as nice. + # time_bins are bin centers, so subtract half the bin length. this leads + # to snapping to the nearest bin center. bin_s = time_bins[1] - time_bins[0] - bins_start = time_bins[0] - 0.5 * bin_s - # nearest bin center for each frame? - bin_inds = (times - bins_start) // bin_s - bin_inds = bin_inds.astype(int) + bin_inds = np.searchsorted(time_bins - bin_s / 2, times, side="right") - 1 + # the time bins may not cover the whole set of times in the recording, # so we need to clip these indices to the valid range - np.clip(bin_inds, 0, time_bins.size, out=bin_inds) + np.clip(bin_inds, 0, time_bins.size - 1, out=bin_inds) # -- what are the possibilities here anyway? bins_here = np.arange(bin_inds[0], bin_inds[-1] + 1) From 4e38ac18be65051d30d15f3d25bada943af3e31f Mon Sep 17 00:00:00 2001 From: Charlie Windolf Date: Mon, 4 Nov 2024 11:06:46 -0500 Subject: [PATCH 02/45] Fix LFP-based AP interp bug and allow time_vector in interpolation --- .../motion/motion_interpolation.py | 18 +++-- .../motion/tests/test_motion_interpolation.py | 78 ++++++++++++++++--- 2 files changed, 77 insertions(+), 19 deletions(-) diff --git a/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py b/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py index 975f43919d..4fd42a8b39 100644 --- a/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py +++ b/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py @@ -126,11 +126,16 @@ def interpolate_motion_on_traces( # nearest interpolation bin: # seachsorted(b, t, side="right") == i means that b[i-1] <= t < b[i] - # hence the -1. doing it with "left" is not as nice. - # time_bins are bin centers, so subtract half the bin length. this leads - # to snapping to the nearest bin center. - bin_s = time_bins[1] - time_bins[0] - bin_inds = np.searchsorted(time_bins - bin_s / 2, times, side="right") - 1 + # hence the -1. doing it with "left" is not as nice -- we want t==b[0] + # to lead to i=1 (rounding down). + # time_bins are bin centers, but we want to snap to the nearest center. + # idea is to get the left bin edges and bin the interp times. + # this is like subtracting bin_dt_s/2, but allows non-equally-spaced bins. + bin_left = np.zeros_like(time_bins) + # it's fine to use the first bin center for the first left edge + bin_left[0] = time_bins[0] + bin_left[1:] = 0.5 * (time_bins[1:] + time_bins[:-1]) + bin_inds = np.searchsorted(bin_left, times, side="right") - 1 # the time bins may not cover the whole set of times in the recording, # so we need to clip these indices to the valid range @@ -438,9 +443,6 @@ def __init__( self.motion = motion def get_traces(self, start_frame, end_frame, channel_indices): - if self.time_vector is not None: - raise NotImplementedError("InterpolateMotionRecording does not yet support recordings with time_vectors.") - if start_frame is None: start_frame = 0 if end_frame is None: diff --git a/src/spikeinterface/sortingcomponents/motion/tests/test_motion_interpolation.py b/src/spikeinterface/sortingcomponents/motion/tests/test_motion_interpolation.py index e022f0cc6c..69f681a1be 100644 --- a/src/spikeinterface/sortingcomponents/motion/tests/test_motion_interpolation.py +++ b/src/spikeinterface/sortingcomponents/motion/tests/test_motion_interpolation.py @@ -1,16 +1,11 @@ -from pathlib import Path +import warnings import numpy as np -import pytest import spikeinterface.core as sc -from spikeinterface import download_dataset -from spikeinterface.sortingcomponents.motion.motion_interpolation import ( - InterpolateMotionRecording, - correct_motion_on_peaks, - interpolate_motion, - interpolate_motion_on_traces, -) from spikeinterface.sortingcomponents.motion import Motion +from spikeinterface.sortingcomponents.motion.motion_interpolation import ( + InterpolateMotionRecording, correct_motion_on_peaks, interpolate_motion, + interpolate_motion_on_traces) from spikeinterface.sortingcomponents.tests.common import make_dataset @@ -115,6 +110,66 @@ def test_interpolation_simple(): assert np.all(traces_corrected[:, 2:] == 0) +def test_cross_band_interpolation(): + """Simple version of using LFP to interpolate AP data + + This also tests the time vector implementation in interpolation. + The idea is to have two recordings which are all 0s with a 1 that + moves from one channel to another after 3s. They're at different + sampling frequencies. motion estimation in one sampling frequency + applied to the other should still lead to perfect correction. + """ + from spikeinterface.sortingcomponents.motion import estimate_motion + + # sampling freqs and timing for AP and LFP recordings + fs_lfp = 50.0 + fs_ap = 300.0 + t_start = 10.0 + total_duration = 5.0 + nt_lfp = int(fs_lfp * total_duration) + nt_ap = int(fs_ap * total_duration) + t_switch = 3 + + # because interpolation uses bin centers logic, there will be a half + # bin offset at the change point in the AP recording. + halfbin_ap_lfp = int(0.5 * (fs_ap / fs_lfp)) + + # channel geometry + nc = 10 + geom = np.c_[np.zeros(nc), np.arange(nc)] + + # make an LFP recording which drifts a bit + traces_lfp = np.zeros((nt_lfp, nc)) + traces_lfp[: int(t_switch * fs_lfp), 5] = 1.0 + traces_lfp[int(t_switch * fs_lfp) :, 6] = 1.0 + rec_lfp = sc.NumpyRecording(traces_lfp, sampling_frequency=fs_lfp) + rec_lfp.set_dummy_probe_from_locations(geom) + + # same for AP + traces_ap = np.zeros((nt_ap, nc)) + traces_ap[: int(t_switch * fs_ap) - halfbin_ap_lfp, 5] = 1.0 + traces_ap[int(t_switch * fs_ap) - halfbin_ap_lfp :, 6] = 1.0 + rec_ap = sc.NumpyRecording(traces_ap, sampling_frequency=fs_ap) + rec_ap.set_dummy_probe_from_locations(geom) + + # set times for both, and silence the warning + with warnings.catch_warnings(): + warnings.simplefilter("ignore", category=UserWarning) + rec_lfp.set_times(t_start + np.arange(nt_lfp) / fs_lfp) + rec_ap.set_times(t_start + np.arange(nt_ap) / fs_ap) + + # estimate motion + motion = estimate_motion(rec_lfp, method="dredge_lfp", rigid=True) + + # nearest to keep it simple + rec_corrected = interpolate_motion(rec_ap, motion, spatial_interpolation_method="nearest", num_closest=2) + traces_corrected = rec_corrected.get_traces() + target = np.zeros((nt_ap, nc - 2)) + target[:, 4] = 1 + ii, jj = np.nonzero(traces_corrected) + assert np.array_equal(traces_corrected, target) + + def test_InterpolateMotionRecording(): rec, sorting = make_dataset() motion = make_fake_motion(rec) @@ -148,5 +203,6 @@ def test_InterpolateMotionRecording(): if __name__ == "__main__": # test_correct_motion_on_peaks() # test_interpolate_motion_on_traces() - test_interpolation_simple() - test_InterpolateMotionRecording() + # test_interpolation_simple() + # test_InterpolateMotionRecording() + test_cross_band_interpolation() From 726170b1526b954b5a26edd70d3162e476ed9f53 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 4 Nov 2024 16:28:44 +0000 Subject: [PATCH 03/45] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- .../sortingcomponents/motion/motion_interpolation.py | 3 +-- .../motion/tests/test_motion_interpolation.py | 7 +++++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py b/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py index 4fd42a8b39..810264d9e4 100644 --- a/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py +++ b/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py @@ -3,8 +3,7 @@ import numpy as np from spikeinterface.core.core_tools import define_function_from_class from spikeinterface.preprocessing import get_spatial_interpolation_kernel -from spikeinterface.preprocessing.basepreprocessor import ( - BasePreprocessor, BasePreprocessorSegment) +from spikeinterface.preprocessing.basepreprocessor import BasePreprocessor, BasePreprocessorSegment from spikeinterface.preprocessing.filter import fix_dtype diff --git a/src/spikeinterface/sortingcomponents/motion/tests/test_motion_interpolation.py b/src/spikeinterface/sortingcomponents/motion/tests/test_motion_interpolation.py index 69f681a1be..88af619220 100644 --- a/src/spikeinterface/sortingcomponents/motion/tests/test_motion_interpolation.py +++ b/src/spikeinterface/sortingcomponents/motion/tests/test_motion_interpolation.py @@ -4,8 +4,11 @@ import spikeinterface.core as sc from spikeinterface.sortingcomponents.motion import Motion from spikeinterface.sortingcomponents.motion.motion_interpolation import ( - InterpolateMotionRecording, correct_motion_on_peaks, interpolate_motion, - interpolate_motion_on_traces) + InterpolateMotionRecording, + correct_motion_on_peaks, + interpolate_motion, + interpolate_motion_on_traces, +) from spikeinterface.sortingcomponents.tests.common import make_dataset From e791fe18671c2998fde9d44295c54a5781ca2e46 Mon Sep 17 00:00:00 2001 From: Charlie Windolf Date: Mon, 11 Nov 2024 10:43:48 -0500 Subject: [PATCH 04/45] Cache bin edges in 2 places as discussed with Sam --- .../motion/motion_interpolation.py | 39 +++++++++++++------ .../sortingcomponents/motion/motion_utils.py | 24 +++++++++++- .../motion/tests/test_motion_interpolation.py | 28 ++++++------- 3 files changed, 66 insertions(+), 25 deletions(-) diff --git a/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py b/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py index 4fd42a8b39..89696f5041 100644 --- a/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py +++ b/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py @@ -7,6 +7,8 @@ BasePreprocessor, BasePreprocessorSegment) from spikeinterface.preprocessing.filter import fix_dtype +from .motion_utils import ensure_time_bin_edges, ensure_time_bins + def correct_motion_on_peaks(peaks, peak_locations, motion, recording) -> np.ndarray: """ @@ -55,6 +57,7 @@ def interpolate_motion_on_traces( segment_index=None, channel_inds=None, interpolation_time_bin_centers_s=None, + interpolation_time_bin_edges_s=None, spatial_interpolation_method="kriging", spatial_interpolation_kwargs={}, dtype=None, @@ -120,9 +123,11 @@ def interpolate_motion_on_traces( total_num_chans = channel_locations.shape[0] # -- determine the blocks of frames that will land in the same interpolation time bin - time_bins = interpolation_time_bin_centers_s - if time_bins is None: - time_bins = motion.temporal_bins_s[segment_index] + if interpolation_time_bin_centers_s is None and interpolation_time_bin_edges_s is None: + bin_centers_s = motion.temporal_bin_edges_s[segment_index] + bin_edges_s = motion.temporal_bin_edges_s[segment_index] + else: + bin_centers_s, bin_edges_s = ensure_time_bins(interpolation_time_bin_centers_s, interpolation_time_bin_edges_s) # nearest interpolation bin: # seachsorted(b, t, side="right") == i means that b[i-1] <= t < b[i] @@ -131,15 +136,13 @@ def interpolate_motion_on_traces( # time_bins are bin centers, but we want to snap to the nearest center. # idea is to get the left bin edges and bin the interp times. # this is like subtracting bin_dt_s/2, but allows non-equally-spaced bins. - bin_left = np.zeros_like(time_bins) # it's fine to use the first bin center for the first left edge - bin_left[0] = time_bins[0] - bin_left[1:] = 0.5 * (time_bins[1:] + time_bins[:-1]) - bin_inds = np.searchsorted(bin_left, times, side="right") - 1 + bin_inds = np.searchsorted(bin_edges_s, times, side="right") - 1 # the time bins may not cover the whole set of times in the recording, # so we need to clip these indices to the valid range - np.clip(bin_inds, 0, time_bins.size - 1, out=bin_inds) + n_bins = bin_edges_s.shape[0] - 1 + np.clip(bin_inds, 0, n_bins - 1, out=bin_inds) # -- what are the possibilities here anyway? bins_here = np.arange(bin_inds[0], bin_inds[-1] + 1) @@ -148,7 +151,7 @@ def interpolate_motion_on_traces( interp_times = np.empty(total_num_chans) current_start_index = 0 for bin_ind in bins_here: - bin_time = time_bins[bin_ind] + bin_time = bin_centers_s[bin_ind] interp_times.fill(bin_time) channel_motions = motion.get_displacement_at_time_and_depth( interp_times, @@ -307,6 +310,7 @@ def __init__( p=1, num_closest=3, interpolation_time_bin_centers_s=None, + interpolation_time_bin_edges_s=None, interpolation_time_bin_size_s=None, dtype=None, **spatial_interpolation_kwargs, @@ -373,9 +377,14 @@ def __init__( # handle manual interpolation_time_bin_centers_s # the case where interpolation_time_bin_size_s is set is handled per-segment below - if interpolation_time_bin_centers_s is None: + if interpolation_time_bin_centers_s is None and interpolation_time_bin_edges_s is None: if interpolation_time_bin_size_s is None: interpolation_time_bin_centers_s = motion.temporal_bins_s + interpolation_time_bin_edges_s = motion.temporal_bin_edges_s + else: + interpolation_time_bin_centers_s, interpolation_time_bin_edges_s = ensure_time_bins( + interpolation_time_bin_centers_s, interpolation_time_bin_edges_s + ) for segment_index, parent_segment in enumerate(recording._recording_segments): # finish the per-segment part of the time bin logic @@ -385,8 +394,13 @@ def __init__( t_start, t_end = parent_segment.sample_index_to_time(np.array([0, s_end])) halfbin = interpolation_time_bin_size_s / 2.0 segment_interpolation_time_bins_s = np.arange(t_start + halfbin, t_end, interpolation_time_bin_size_s) + segment_interpolation_time_bin_edges_s = np.arange( + t_start, t_end + halfbin, interpolation_time_bin_size_s + ) + assert segment_interpolation_time_bin_edges_s.shape == (segment_interpolation_time_bins_s.shape[0] + 1,) else: segment_interpolation_time_bins_s = interpolation_time_bin_centers_s[segment_index] + segment_interpolation_time_bin_edges_s = interpolation_time_bin_edges_s[segment_index] rec_segment = InterpolateMotionRecordingSegment( parent_segment, @@ -397,6 +411,7 @@ def __init__( channel_inds, segment_index, segment_interpolation_time_bins_s, + segment_interpolation_time_bin_edges_s, dtype=dtype_, ) self.add_recording_segment(rec_segment) @@ -430,6 +445,7 @@ def __init__( channel_inds, segment_index, interpolation_time_bin_centers_s, + interpolation_time_bin_edges_s, dtype="float32", ): BasePreprocessorSegment.__init__(self, parent_recording_segment) @@ -439,6 +455,7 @@ def __init__( self.channel_inds = channel_inds self.segment_index = segment_index self.interpolation_time_bin_centers_s = interpolation_time_bin_centers_s + self.interpolation_time_bin_edges_s = interpolation_time_bin_edges_s self.dtype = dtype self.motion = motion @@ -460,7 +477,7 @@ def get_traces(self, start_frame, end_frame, channel_indices): channel_inds=self.channel_inds, spatial_interpolation_method=self.spatial_interpolation_method, spatial_interpolation_kwargs=self.spatial_interpolation_kwargs, - interpolation_time_bin_centers_s=self.interpolation_time_bin_centers_s, + interpolation_time_bin_edges_s=self.interpolation_time_bin_edges_s, ) if channel_indices is not None: diff --git a/src/spikeinterface/sortingcomponents/motion/motion_utils.py b/src/spikeinterface/sortingcomponents/motion/motion_utils.py index 635624cca8..ec0a55a8f8 100644 --- a/src/spikeinterface/sortingcomponents/motion/motion_utils.py +++ b/src/spikeinterface/sortingcomponents/motion/motion_utils.py @@ -1,5 +1,5 @@ -import warnings import json +import warnings from pathlib import Path import numpy as np @@ -54,6 +54,7 @@ def __init__(self, displacement, temporal_bins_s, spatial_bins_um, direction="y" self.direction = direction self.dim = ["x", "y", "z"].index(direction) self.check_properties() + self.temporal_bin_edges_s = [ensure_time_bin_edges(tbins) for tbins in self.temporal_bins_s] def check_properties(self): assert all(d.ndim == 2 for d in self.displacement) @@ -576,3 +577,24 @@ def make_3d_motion_histograms( motion_histograms = np.log2(1 + motion_histograms) return motion_histograms, temporal_bin_edges, spatial_bin_edges + + +def ensure_time_bins(time_bin_centers_s=None, time_bin_edges_s=None): + if time_bin_centers_s is None and time_bin_edges_s is None: + raise ValueError("Need at least one of time_bin_centers_s or time_bin_edges_s.") + + if time_bin_centers_s is None: + assert time_bin_edges_s.ndim == 1 and time_bin_edges_s.size >= 2 + time_bin_centers_s = 0.5 * (time_bin_edges_s[1:] + time_bin_edges_s[:-1]) + + if time_bin_edges_s is None: + time_bin_edges_s = np.empty(time_bin_centers_s.shape[0] + 1, dtype=time_bin_centers_s.dtype) + time_bin_edges_s[[0, -1]] = time_bin_centers_s[[0, -1]] + if time_bin_centers_s.size > 2: + time_bin_edges_s[1:-1] = 0.5 * (time_bin_centers_s[1:] + time_bin_centers_s[:-1]) + + return time_bin_centers_s, time_bin_edges_s + + +def ensure_time_bin_edges(time_bin_centers_s=None, time_bin_edges_s=None): + return ensure_time_bins(time_bin_centers_s, time_bin_edges_s)[1] diff --git a/src/spikeinterface/sortingcomponents/motion/tests/test_motion_interpolation.py b/src/spikeinterface/sortingcomponents/motion/tests/test_motion_interpolation.py index 69f681a1be..07cb5b8ab6 100644 --- a/src/spikeinterface/sortingcomponents/motion/tests/test_motion_interpolation.py +++ b/src/spikeinterface/sortingcomponents/motion/tests/test_motion_interpolation.py @@ -62,18 +62,20 @@ def test_interpolate_motion_on_traces(): times = rec.get_times()[0:30000] for method in ("kriging", "idw", "nearest"): - traces_corrected = interpolate_motion_on_traces( - traces, - times, - channel_locations, - motion, - channel_inds=None, - spatial_interpolation_method=method, - # spatial_interpolation_kwargs={}, - spatial_interpolation_kwargs={"force_extrapolate": True}, - ) - assert traces.shape == traces_corrected.shape - assert traces.dtype == traces_corrected.dtype + for interpolation_time_bin_centers_s in (None, np.linspace(*times[[0, -1]], num=3)): + traces_corrected = interpolate_motion_on_traces( + traces, + times, + channel_locations, + motion, + channel_inds=None, + spatial_interpolation_method=method, + interpolation_time_bin_centers_s=interpolation_time_bin_centers_s, + # spatial_interpolation_kwargs={}, + spatial_interpolation_kwargs={"force_extrapolate": True}, + ) + assert traces.shape == traces_corrected.shape + assert traces.dtype == traces_corrected.dtype def test_interpolation_simple(): @@ -202,7 +204,7 @@ def test_InterpolateMotionRecording(): if __name__ == "__main__": # test_correct_motion_on_peaks() - # test_interpolate_motion_on_traces() + test_interpolate_motion_on_traces() # test_interpolation_simple() # test_InterpolateMotionRecording() test_cross_band_interpolation() From d8f39b5a70dd83f4e1fff71d41036692fba20b38 Mon Sep 17 00:00:00 2001 From: Charlie Windolf Date: Mon, 11 Nov 2024 12:30:32 -0500 Subject: [PATCH 05/45] Sorry if this is shoe-horning in a change... --- src/spikeinterface/core/baserecording.py | 17 +++++++---------- 1 file changed, 7 insertions(+), 10 deletions(-) diff --git a/src/spikeinterface/core/baserecording.py b/src/spikeinterface/core/baserecording.py index 5e2e9e4014..b95bfb1ad0 100644 --- a/src/spikeinterface/core/baserecording.py +++ b/src/spikeinterface/core/baserecording.py @@ -1,20 +1,17 @@ from __future__ import annotations + import warnings from pathlib import Path import numpy as np -from probeinterface import Probe, ProbeGroup, read_probeinterface, select_axes, write_probeinterface +from probeinterface import (Probe, ProbeGroup, read_probeinterface, + select_axes, write_probeinterface) from .base import BaseSegment from .baserecordingsnippets import BaseRecordingSnippets -from .core_tools import ( - convert_bytes_to_str, - convert_seconds_to_str, -) -from .recording_tools import write_binary_recording - - +from .core_tools import convert_bytes_to_str, convert_seconds_to_str from .job_tools import split_job_kwargs +from .recording_tools import write_binary_recording class BaseRecording(BaseRecordingSnippets): @@ -921,11 +918,11 @@ def time_to_sample_index(self, time_s): sample_index = time_s * self.sampling_frequency else: sample_index = (time_s - self.t_start) * self.sampling_frequency - sample_index = round(sample_index) + sample_index = np.round(sample_index).astype(int) else: sample_index = np.searchsorted(self.time_vector, time_s, side="right") - 1 - return int(sample_index) + return sample_index def get_num_samples(self) -> int: """Returns the number of samples in this signal segment From 0a201e17a0b3de283f06c5456010fb20fd8cd209 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 11 Nov 2024 17:31:00 +0000 Subject: [PATCH 06/45] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- src/spikeinterface/core/baserecording.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/spikeinterface/core/baserecording.py b/src/spikeinterface/core/baserecording.py index b95bfb1ad0..6d4509db12 100644 --- a/src/spikeinterface/core/baserecording.py +++ b/src/spikeinterface/core/baserecording.py @@ -4,8 +4,7 @@ from pathlib import Path import numpy as np -from probeinterface import (Probe, ProbeGroup, read_probeinterface, - select_axes, write_probeinterface) +from probeinterface import Probe, ProbeGroup, read_probeinterface, select_axes, write_probeinterface from .base import BaseSegment from .baserecordingsnippets import BaseRecordingSnippets From 530864bceda4b436f5cf16fd5258efd19ccaa76d Mon Sep 17 00:00:00 2001 From: chrishalcrow <57948917+chrishalcrow@users.noreply.github.com> Date: Thu, 14 Nov 2024 09:33:03 +0000 Subject: [PATCH 07/45] Replace qm_params with metric_params --- .../qualitymetrics/pca_metrics.py | 36 ++++++++++++------- .../quality_metric_calculator.py | 30 ++++++++++------ .../tests/test_metrics_functions.py | 10 +++--- 3 files changed, 47 insertions(+), 29 deletions(-) diff --git a/src/spikeinterface/qualitymetrics/pca_metrics.py b/src/spikeinterface/qualitymetrics/pca_metrics.py index 4c68dfea59..b4952bfe6d 100644 --- a/src/spikeinterface/qualitymetrics/pca_metrics.py +++ b/src/spikeinterface/qualitymetrics/pca_metrics.py @@ -6,6 +6,7 @@ from copy import deepcopy import platform from tqdm.auto import tqdm +from warnings import warn import numpy as np @@ -52,6 +53,7 @@ def get_quality_pca_metric_list(): def compute_pc_metrics( sorting_analyzer, metric_names=None, + metric_params=None, qm_params=None, unit_ids=None, seed=None, @@ -70,7 +72,7 @@ def compute_pc_metrics( metric_names : list of str, default: None The list of PC metrics to compute. If not provided, defaults to all PC metrics. - qm_params : dict or None + metric_params : dict or None Dictionary with parameters for each PC metric function. unit_ids : list of int or None List of unit ids to compute metrics for. @@ -86,6 +88,14 @@ def compute_pc_metrics( pc_metrics : dict The computed PC metrics. """ + + if qm_params is not None and metric_params is None: + deprecation_msg = ( + "`qm_params` is deprecated and will be removed in version 0.104.0 Please use metric_params instead" + ) + metric_params = qm_params + warn(deprecation_msg, category=DeprecationWarning, stacklevel=2) + pca_ext = sorting_analyzer.get_extension("principal_components") assert pca_ext is not None, "calculate_pc_metrics() need extension 'principal_components'" @@ -93,8 +103,8 @@ def compute_pc_metrics( if metric_names is None: metric_names = _possible_pc_metric_names.copy() - if qm_params is None: - qm_params = _default_params + if metric_params is None: + metric_params = _default_params extremum_channels = get_template_extremum_channel(sorting_analyzer) @@ -147,7 +157,7 @@ def compute_pc_metrics( pcs = dense_projections[np.isin(all_labels, neighbor_unit_ids)][:, :, neighbor_channel_indices] pcs_flat = pcs.reshape(pcs.shape[0], -1) - func_args = (pcs_flat, labels, non_nn_metrics, unit_id, unit_ids, qm_params, max_threads_per_process) + func_args = (pcs_flat, labels, non_nn_metrics, unit_id, unit_ids, metric_params, max_threads_per_process) items.append(func_args) if not run_in_parallel and non_nn_metrics: @@ -184,7 +194,7 @@ def compute_pc_metrics( units_loop = tqdm(units_loop, desc=f"calculate {metric_name} metric", total=len(unit_ids)) func = _nn_metric_name_to_func[metric_name] - metric_params = qm_params[metric_name] if metric_name in qm_params else {} + metric_params = metric_params[metric_name] if metric_name in metric_params else {} for _, unit_id in units_loop: try: @@ -213,7 +223,7 @@ def compute_pc_metrics( def calculate_pc_metrics( - sorting_analyzer, metric_names=None, qm_params=None, unit_ids=None, seed=None, n_jobs=1, progress_bar=False + sorting_analyzer, metric_names=None, metric_params=None, unit_ids=None, seed=None, n_jobs=1, progress_bar=False ): warnings.warn( "The `calculate_pc_metrics` function is deprecated and will be removed in 0.103.0. Please use compute_pc_metrics instead", @@ -224,7 +234,7 @@ def calculate_pc_metrics( pc_metrics = compute_pc_metrics( sorting_analyzer, metric_names=metric_names, - qm_params=qm_params, + metric_params=metric_params, unit_ids=unit_ids, seed=seed, n_jobs=n_jobs, @@ -977,16 +987,16 @@ def _compute_isolation(pcs_target_unit, pcs_other_unit, n_neighbors: int): def pca_metrics_one_unit(args): - (pcs_flat, labels, metric_names, unit_id, unit_ids, qm_params, max_threads_per_process) = args + (pcs_flat, labels, metric_names, unit_id, unit_ids, metric_params, max_threads_per_process) = args if max_threads_per_process is None: - return _pca_metrics_one_unit(pcs_flat, labels, metric_names, unit_id, unit_ids, qm_params) + return _pca_metrics_one_unit(pcs_flat, labels, metric_names, unit_id, unit_ids, metric_params) else: with threadpool_limits(limits=int(max_threads_per_process)): - return _pca_metrics_one_unit(pcs_flat, labels, metric_names, unit_id, unit_ids, qm_params) + return _pca_metrics_one_unit(pcs_flat, labels, metric_names, unit_id, unit_ids, metric_params) -def _pca_metrics_one_unit(pcs_flat, labels, metric_names, unit_id, unit_ids, qm_params): +def _pca_metrics_one_unit(pcs_flat, labels, metric_names, unit_id, unit_ids, metric_params): pc_metrics = {} # metrics if "isolation_distance" in metric_names or "l_ratio" in metric_names: @@ -1015,7 +1025,7 @@ def _pca_metrics_one_unit(pcs_flat, labels, metric_names, unit_id, unit_ids, qm_ if "nearest_neighbor" in metric_names: try: nn_hit_rate, nn_miss_rate = nearest_neighbors_metrics( - pcs_flat, labels, unit_id, **qm_params["nearest_neighbor"] + pcs_flat, labels, unit_id, **metric_params["nearest_neighbor"] ) except: nn_hit_rate = np.nan @@ -1024,7 +1034,7 @@ def _pca_metrics_one_unit(pcs_flat, labels, metric_names, unit_id, unit_ids, qm_ pc_metrics["nn_miss_rate"] = nn_miss_rate if "silhouette" in metric_names: - silhouette_method = qm_params["silhouette"]["method"] + silhouette_method = metric_params["silhouette"]["method"] if "simplified" in silhouette_method: try: unit_silhouette_score = simplified_silhouette_score(pcs_flat, labels, unit_id) diff --git a/src/spikeinterface/qualitymetrics/quality_metric_calculator.py b/src/spikeinterface/qualitymetrics/quality_metric_calculator.py index b6a50d60f5..eb380304b6 100644 --- a/src/spikeinterface/qualitymetrics/quality_metric_calculator.py +++ b/src/spikeinterface/qualitymetrics/quality_metric_calculator.py @@ -6,6 +6,7 @@ from copy import deepcopy import numpy as np +from warnings import warn from spikeinterface.core.job_tools import fix_job_kwargs from spikeinterface.core.sortinganalyzer import register_result_extension, AnalyzerExtension @@ -31,7 +32,7 @@ class ComputeQualityMetrics(AnalyzerExtension): A SortingAnalyzer object. metric_names : list or None List of quality metrics to compute. - qm_params : dict or None + metric_params : dict or None Dictionary with parameters for quality metrics calculation. Default parameters can be obtained with: `si.qualitymetrics.get_default_qm_params()` skip_pc_metrics : bool, default: False @@ -58,6 +59,7 @@ class ComputeQualityMetrics(AnalyzerExtension): def _set_params( self, metric_names=None, + metric_params=None, qm_params=None, peak_sign=None, seed=None, @@ -65,6 +67,12 @@ def _set_params( delete_existing_metrics=False, metrics_to_compute=None, ): + if qm_params is not None and metric_params is None: + deprecation_msg = ( + "`qm_params` is deprecated and will be removed in version 0.104.0 Please use metric_params instead" + ) + metric_params = qm_params + warn(deprecation_msg, category=DeprecationWarning, stacklevel=2) if metric_names is None: metric_names = list(_misc_metric_name_to_func.keys()) @@ -80,12 +88,12 @@ def _set_params( if "drift" in metric_names: metric_names.remove("drift") - qm_params_ = get_default_qm_params() - for k in qm_params_: - if qm_params is not None and k in qm_params: - qm_params_[k].update(qm_params[k]) - if "peak_sign" in qm_params_[k] and peak_sign is not None: - qm_params_[k]["peak_sign"] = peak_sign + metric_params_ = get_default_qm_params() + for k in metric_params_: + if metric_params is not None and k in metric_params: + metric_params_[k].update(metric_params[k]) + if "peak_sign" in metric_params_[k] and peak_sign is not None: + metric_params_[k]["peak_sign"] = peak_sign metrics_to_compute = metric_names qm_extension = self.sorting_analyzer.get_extension("quality_metrics") @@ -101,7 +109,7 @@ def _set_params( metric_names=metric_names, peak_sign=peak_sign, seed=seed, - qm_params=qm_params_, + metric_params=metric_params_, skip_pc_metrics=skip_pc_metrics, delete_existing_metrics=delete_existing_metrics, metrics_to_compute=metrics_to_compute, @@ -141,7 +149,7 @@ def _compute_metrics(self, sorting_analyzer, unit_ids=None, verbose=False, metri """ import pandas as pd - qm_params = self.params["qm_params"] + metric_params = self.params["metric_params"] # sparsity = self.params["sparsity"] seed = self.params["seed"] @@ -177,7 +185,7 @@ def _compute_metrics(self, sorting_analyzer, unit_ids=None, verbose=False, metri func = _misc_metric_name_to_func[metric_name] - params = qm_params[metric_name] if metric_name in qm_params else {} + params = metric_params[metric_name] if metric_name in metric_params else {} res = func(sorting_analyzer, unit_ids=non_empty_unit_ids, **params) # QM with uninstall dependencies might return None if res is not None: @@ -205,7 +213,7 @@ def _compute_metrics(self, sorting_analyzer, unit_ids=None, verbose=False, metri # sparsity=sparsity, progress_bar=progress_bar, n_jobs=n_jobs, - qm_params=qm_params, + metric_params=metric_params, seed=seed, ) for col, values in pc_metrics.items(): diff --git a/src/spikeinterface/qualitymetrics/tests/test_metrics_functions.py b/src/spikeinterface/qualitymetrics/tests/test_metrics_functions.py index 4c0890b62b..20869aa44a 100644 --- a/src/spikeinterface/qualitymetrics/tests/test_metrics_functions.py +++ b/src/spikeinterface/qualitymetrics/tests/test_metrics_functions.py @@ -69,7 +69,7 @@ def test_compute_new_quality_metrics(small_sorting_analyzer): assert calculated_metrics == ["snr"] small_sorting_analyzer.compute( - {"quality_metrics": {"metric_names": list(qm_params.keys()), "qm_params": qm_params}} + {"quality_metrics": {"metric_names": list(qm_params.keys()), "metric_params": qm_params}} ) small_sorting_analyzer.compute({"quality_metrics": {"metric_names": ["snr"]}}) @@ -96,13 +96,13 @@ def test_compute_new_quality_metrics(small_sorting_analyzer): # check that, when parameters are changed, the data and metadata are updated old_snr_data = deepcopy(quality_metric_extension.get_data()["snr"].values) small_sorting_analyzer.compute( - {"quality_metrics": {"metric_names": ["snr"], "qm_params": {"snr": {"peak_mode": "peak_to_peak"}}}} + {"quality_metrics": {"metric_names": ["snr"], "metric_params": {"snr": {"peak_mode": "peak_to_peak"}}}} ) new_quality_metric_extension = small_sorting_analyzer.get_extension("quality_metrics") new_snr_data = new_quality_metric_extension.get_data()["snr"].values assert np.all(old_snr_data != new_snr_data) - assert new_quality_metric_extension.params["qm_params"]["snr"]["peak_mode"] == "peak_to_peak" + assert new_quality_metric_extension.params["metric_params"]["snr"]["peak_mode"] == "peak_to_peak" # check that all quality metrics are deleted when parents are recomputed, even after # recomputation @@ -280,10 +280,10 @@ def test_unit_id_order_independence(small_sorting_analyzer): } quality_metrics_1 = compute_quality_metrics( - small_sorting_analyzer, metric_names=get_quality_metric_list(), qm_params=qm_params + small_sorting_analyzer, metric_names=get_quality_metric_list(), metric_params=qm_params ) quality_metrics_2 = compute_quality_metrics( - small_sorting_analyzer_2, metric_names=get_quality_metric_list(), qm_params=qm_params + small_sorting_analyzer_2, metric_names=get_quality_metric_list(), metric_params=qm_params ) for metric, metric_2_data in quality_metrics_2.items(): From 908318a04731f6e8723a9cf3b34914d8e782e900 Mon Sep 17 00:00:00 2001 From: chrishalcrow <57948917+chrishalcrow@users.noreply.github.com> Date: Thu, 14 Nov 2024 09:38:28 +0000 Subject: [PATCH 08/45] fix tests --- .../tests/test_quality_metric_calculator.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/spikeinterface/qualitymetrics/tests/test_quality_metric_calculator.py b/src/spikeinterface/qualitymetrics/tests/test_quality_metric_calculator.py index a6415c58e8..60f0490f51 100644 --- a/src/spikeinterface/qualitymetrics/tests/test_quality_metric_calculator.py +++ b/src/spikeinterface/qualitymetrics/tests/test_quality_metric_calculator.py @@ -24,14 +24,14 @@ def test_compute_quality_metrics(sorting_analyzer_simple): metrics = compute_quality_metrics( sorting_analyzer, metric_names=["snr"], - qm_params=dict(isi_violation=dict(isi_threshold_ms=2)), + metric_params=dict(isi_violation=dict(isi_threshold_ms=2)), skip_pc_metrics=True, seed=2205, ) # print(metrics) qm = sorting_analyzer.get_extension("quality_metrics") - assert qm.params["qm_params"]["isi_violation"]["isi_threshold_ms"] == 2 + assert qm.params["metric_params"]["isi_violation"]["isi_threshold_ms"] == 2 assert "snr" in metrics.columns assert "isolation_distance" not in metrics.columns @@ -40,7 +40,7 @@ def test_compute_quality_metrics(sorting_analyzer_simple): metrics = compute_quality_metrics( sorting_analyzer, metric_names=None, - qm_params=dict(isi_violation=dict(isi_threshold_ms=2)), + metric_params=dict(isi_violation=dict(isi_threshold_ms=2)), skip_pc_metrics=False, seed=2205, ) @@ -54,7 +54,7 @@ def test_compute_quality_metrics_recordingless(sorting_analyzer_simple): metrics = compute_quality_metrics( sorting_analyzer, metric_names=None, - qm_params=dict(isi_violation=dict(isi_threshold_ms=2)), + metric_params=dict(isi_violation=dict(isi_threshold_ms=2)), skip_pc_metrics=False, seed=2205, ) @@ -68,7 +68,7 @@ def test_compute_quality_metrics_recordingless(sorting_analyzer_simple): metrics_norec = compute_quality_metrics( sorting_analyzer_norec, metric_names=None, - qm_params=dict(isi_violation=dict(isi_threshold_ms=2)), + metric_params=dict(isi_violation=dict(isi_threshold_ms=2)), skip_pc_metrics=False, seed=2205, ) @@ -101,7 +101,7 @@ def test_empty_units(sorting_analyzer_simple): metrics_empty = compute_quality_metrics( sorting_analyzer_empty, metric_names=None, - qm_params=dict(isi_violation=dict(isi_threshold_ms=2)), + metric_params=dict(isi_violation=dict(isi_threshold_ms=2)), skip_pc_metrics=True, seed=2205, ) From 2706a0bee9fef9dd3b3a4af99715366aae1c1625 Mon Sep 17 00:00:00 2001 From: chrishalcrow <57948917+chrishalcrow@users.noreply.github.com> Date: Thu, 14 Nov 2024 09:45:28 +0000 Subject: [PATCH 09/45] Change metrics_kwargs to metric_params and add depreciation message --- .../postprocessing/template_metrics.py | 36 +++++++++++-------- .../tests/test_template_metrics.py | 2 +- 2 files changed, 23 insertions(+), 15 deletions(-) diff --git a/src/spikeinterface/postprocessing/template_metrics.py b/src/spikeinterface/postprocessing/template_metrics.py index 6e7bcf21b8..ef6abfe51f 100644 --- a/src/spikeinterface/postprocessing/template_metrics.py +++ b/src/spikeinterface/postprocessing/template_metrics.py @@ -63,8 +63,8 @@ class ComputeTemplateMetrics(AnalyzerExtension): include_multi_channel_metrics : bool, default: False Whether to compute multi-channel metrics delete_existing_metrics : bool, default: False - If True, any template metrics attached to the `sorting_analyzer` are deleted. If False, any metrics which were previously calculated but are not included in `metric_names` are kept, provided the `metrics_kwargs` are unchanged. - metrics_kwargs : dict + If True, any template metrics attached to the `sorting_analyzer` are deleted. If False, any metrics which were previously calculated but are not included in `metric_names` are kept, provided the `metric_params` are unchanged. + metric_params : dict Additional arguments to pass to the metric functions. Including: * recovery_window_ms: the window in ms after the peak to compute the recovery_slope, default: 0.7 * peak_relative_threshold: the relative threshold to detect positive and negative peaks, default: 0.2 @@ -109,12 +109,20 @@ def _set_params( peak_sign="neg", upsampling_factor=10, sparsity=None, + metric_params=None, metrics_kwargs=None, include_multi_channel_metrics=False, delete_existing_metrics=False, **other_kwargs, ): + if metrics_kwargs is not None and metric_params is None: + deprecation_msg = ( + "`qm_params` is deprecated and will be removed in version 0.104.0 Please use metric_params instead" + ) + metric_params = metrics_kwargs + warnings.warn(deprecation_msg, category=DeprecationWarning, stacklevel=2) + import pandas as pd # TODO alessio can you check this : this used to be in the function but now we have ComputeTemplateMetrics.function_factory() @@ -134,27 +142,27 @@ def _set_params( if include_multi_channel_metrics: metric_names += get_multi_channel_template_metric_names() - if metrics_kwargs is None: - metrics_kwargs_ = _default_function_kwargs.copy() + if metric_params is None: + metric_params_ = _default_function_kwargs.copy() if len(other_kwargs) > 0: for m in other_kwargs: - if m in metrics_kwargs_: - metrics_kwargs_[m] = other_kwargs[m] + if m in metric_params_: + metric_params_[m] = other_kwargs[m] else: - metrics_kwargs_ = _default_function_kwargs.copy() - metrics_kwargs_.update(metrics_kwargs) + metric_params_ = _default_function_kwargs.copy() + metric_params_.update(metric_params) metrics_to_compute = metric_names tm_extension = self.sorting_analyzer.get_extension("template_metrics") if delete_existing_metrics is False and tm_extension is not None: - existing_params = tm_extension.params["metrics_kwargs"] + existing_params = tm_extension.params["metric_params"] # checks that existing metrics were calculated using the same params - if existing_params != metrics_kwargs_: + if existing_params != metric_params_: warnings.warn( f"The parameters used to calculate the previous template metrics are different" f"than those used now.\nPrevious parameters: {existing_params}\nCurrent " - f"parameters: {metrics_kwargs_}\nDeleting previous template metrics..." + f"parameters: {metric_params_}\nDeleting previous template metrics..." ) tm_extension.params["metric_names"] = [] existing_metric_names = [] @@ -171,7 +179,7 @@ def _set_params( sparsity=sparsity, peak_sign=peak_sign, upsampling_factor=int(upsampling_factor), - metrics_kwargs=metrics_kwargs_, + metric_params=metric_params_, delete_existing_metrics=delete_existing_metrics, metrics_to_compute=metrics_to_compute, ) @@ -273,7 +281,7 @@ def _compute_metrics(self, sorting_analyzer, unit_ids=None, verbose=False, metri sampling_frequency=sampling_frequency_up, trough_idx=trough_idx, peak_idx=peak_idx, - **self.params["metrics_kwargs"], + **self.params["metric_params"], ) except Exception as e: warnings.warn(f"Error computing metric {metric_name} for unit {unit_id}: {e}") @@ -312,7 +320,7 @@ def _compute_metrics(self, sorting_analyzer, unit_ids=None, verbose=False, metri template_upsampled, channel_locations=channel_locations_sparse, sampling_frequency=sampling_frequency_up, - **self.params["metrics_kwargs"], + **self.params["metric_params"], ) except Exception as e: warnings.warn(f"Error computing metric {metric_name} for unit {unit_id}: {e}") diff --git a/src/spikeinterface/postprocessing/tests/test_template_metrics.py b/src/spikeinterface/postprocessing/tests/test_template_metrics.py index 5056d4ff2a..1df723bfe3 100644 --- a/src/spikeinterface/postprocessing/tests/test_template_metrics.py +++ b/src/spikeinterface/postprocessing/tests/test_template_metrics.py @@ -47,7 +47,7 @@ def test_compute_new_template_metrics(small_sorting_analyzer): # check that, when parameters are changed, the old metrics are deleted small_sorting_analyzer.compute( - {"template_metrics": {"metric_names": ["exp_decay"], "metrics_kwargs": {"recovery_window_ms": 0.6}}} + {"template_metrics": {"metric_names": ["exp_decay"], "metric_params": {"recovery_window_ms": 0.6}}} ) From 22460710bca1114e5f11f5ba4cbdad1b82941d70 Mon Sep 17 00:00:00 2001 From: chrishalcrow <57948917+chrishalcrow@users.noreply.github.com> Date: Thu, 14 Nov 2024 09:46:16 +0000 Subject: [PATCH 10/45] Update warning message (oups) --- src/spikeinterface/postprocessing/template_metrics.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/spikeinterface/postprocessing/template_metrics.py b/src/spikeinterface/postprocessing/template_metrics.py index ef6abfe51f..9b85f99c0d 100644 --- a/src/spikeinterface/postprocessing/template_metrics.py +++ b/src/spikeinterface/postprocessing/template_metrics.py @@ -118,7 +118,7 @@ def _set_params( if metrics_kwargs is not None and metric_params is None: deprecation_msg = ( - "`qm_params` is deprecated and will be removed in version 0.104.0 Please use metric_params instead" + "`metrics_kwargs` is deprecated and will be removed in version 0.104.0 Please use metric_params instead" ) metric_params = metrics_kwargs warnings.warn(deprecation_msg, category=DeprecationWarning, stacklevel=2) From 3579934baf43e25759ca1e8ee7f1e3288180be71 Mon Sep 17 00:00:00 2001 From: chrishalcrow <57948917+chrishalcrow@users.noreply.github.com> Date: Thu, 14 Nov 2024 10:04:02 +0000 Subject: [PATCH 11/45] Make compute work and add `get_default_tm_params` --- .../postprocessing/template_metrics.py | 45 +++++++++---------- 1 file changed, 20 insertions(+), 25 deletions(-) diff --git a/src/spikeinterface/postprocessing/template_metrics.py b/src/spikeinterface/postprocessing/template_metrics.py index 9b85f99c0d..25e0d0d490 100644 --- a/src/spikeinterface/postprocessing/template_metrics.py +++ b/src/spikeinterface/postprocessing/template_metrics.py @@ -64,22 +64,10 @@ class ComputeTemplateMetrics(AnalyzerExtension): Whether to compute multi-channel metrics delete_existing_metrics : bool, default: False If True, any template metrics attached to the `sorting_analyzer` are deleted. If False, any metrics which were previously calculated but are not included in `metric_names` are kept, provided the `metric_params` are unchanged. - metric_params : dict - Additional arguments to pass to the metric functions. Including: - * recovery_window_ms: the window in ms after the peak to compute the recovery_slope, default: 0.7 - * peak_relative_threshold: the relative threshold to detect positive and negative peaks, default: 0.2 - * peak_width_ms: the width in samples to detect peaks, default: 0.2 - * depth_direction: the direction to compute velocity above and below, default: "y" (see notes) - * min_channels_for_velocity: the minimum number of channels above or below to compute velocity, default: 5 - * min_r2_velocity: the minimum r2 to accept the velocity fit, default: 0.7 - * exp_peak_function: the function to use to compute the peak amplitude for the exp decay, default: "ptp" - * min_r2_exp_decay: the minimum r2 to accept the exp decay fit, default: 0.5 - * spread_threshold: the threshold to compute the spread, default: 0.2 - * spread_smooth_um: the smoothing in um to compute the spread, default: 20 - * column_range: the range in um in the horizontal direction to consider channels for velocity, default: None - - If None, all channels all channels are considered - - If 0 or 1, only the "column" that includes the max channel is considered - - If > 1, only channels within range (+/-) um from the max channel horizontal position are used + metric_params : dict of dicts + metric_params : dict of dicts or None + Dictionary with parameters for quality metrics calculation. + Default parameters can be obtained with: `si.qualitymetrics.get_default_tm_params()` Returns ------- @@ -116,13 +104,6 @@ def _set_params( **other_kwargs, ): - if metrics_kwargs is not None and metric_params is None: - deprecation_msg = ( - "`metrics_kwargs` is deprecated and will be removed in version 0.104.0 Please use metric_params instead" - ) - metric_params = metrics_kwargs - warnings.warn(deprecation_msg, category=DeprecationWarning, stacklevel=2) - import pandas as pd # TODO alessio can you check this : this used to be in the function but now we have ComputeTemplateMetrics.function_factory() @@ -142,6 +123,13 @@ def _set_params( if include_multi_channel_metrics: metric_names += get_multi_channel_template_metric_names() + if metrics_kwargs is not None and metric_params is None: + deprecation_msg = ( + "`metrics_kwargs` is deprecated and will be removed in version 0.104.0 Please use metric_params instead" + ) + metric_params = dict(zip(metric_names, [metrics_kwargs] * len(metric_names))) + warnings.warn(deprecation_msg, category=DeprecationWarning, stacklevel=2) + if metric_params is None: metric_params_ = _default_function_kwargs.copy() if len(other_kwargs) > 0: @@ -281,7 +269,7 @@ def _compute_metrics(self, sorting_analyzer, unit_ids=None, verbose=False, metri sampling_frequency=sampling_frequency_up, trough_idx=trough_idx, peak_idx=peak_idx, - **self.params["metric_params"], + **self.params["metric_params"][metric_name], ) except Exception as e: warnings.warn(f"Error computing metric {metric_name} for unit {unit_id}: {e}") @@ -320,7 +308,7 @@ def _compute_metrics(self, sorting_analyzer, unit_ids=None, verbose=False, metri template_upsampled, channel_locations=channel_locations_sparse, sampling_frequency=sampling_frequency_up, - **self.params["metric_params"], + **self.params["metric_params"][metric_name], ) except Exception as e: warnings.warn(f"Error computing metric {metric_name} for unit {unit_id}: {e}") @@ -380,6 +368,13 @@ def _get_data(self): ) +def get_default_tm_params(): + metric_names = get_single_channel_template_metric_names() + get_multi_channel_template_metric_names() + base_tm_params = _default_function_kwargs + metric_params = dict(zip(metric_names, [base_tm_params] * len(metric_names))) + return metric_params + + def get_trough_and_peak_idx(template): """ Return the indices into the input template of the detected trough From 66190c3857bcabf30ca64af994693a3a029c41e1 Mon Sep 17 00:00:00 2001 From: chrishalcrow <57948917+chrishalcrow@users.noreply.github.com> Date: Thu, 14 Nov 2024 10:18:46 +0000 Subject: [PATCH 12/45] Update compute_name_to_column_names to qm_compute_name_to_column_names --- .../qualitymetrics/quality_metric_calculator.py | 6 +++--- src/spikeinterface/qualitymetrics/quality_metric_list.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/spikeinterface/qualitymetrics/quality_metric_calculator.py b/src/spikeinterface/qualitymetrics/quality_metric_calculator.py index eb380304b6..365d7bcc09 100644 --- a/src/spikeinterface/qualitymetrics/quality_metric_calculator.py +++ b/src/spikeinterface/qualitymetrics/quality_metric_calculator.py @@ -16,7 +16,7 @@ compute_pc_metrics, _misc_metric_name_to_func, _possible_pc_metric_names, - compute_name_to_column_names, + qm_compute_name_to_column_names, ) from .misc_metrics import _default_params as misc_metrics_params from .pca_metrics import _default_params as pca_metrics_params @@ -32,7 +32,7 @@ class ComputeQualityMetrics(AnalyzerExtension): A SortingAnalyzer object. metric_names : list or None List of quality metrics to compute. - metric_params : dict or None + metric_params : dict of dicts or None Dictionary with parameters for quality metrics calculation. Default parameters can be obtained with: `si.qualitymetrics.get_default_qm_params()` skip_pc_metrics : bool, default: False @@ -254,7 +254,7 @@ def _run(self, verbose=False, **job_kwargs): # append the metrics which were previously computed for metric_name in set(existing_metrics).difference(metrics_to_compute): # some metrics names produce data columns with other names. This deals with that. - for column_name in compute_name_to_column_names[metric_name]: + for column_name in qm_compute_name_to_column_names[metric_name]: computed_metrics[column_name] = qm_extension.data["metrics"][column_name] self.data["metrics"] = computed_metrics diff --git a/src/spikeinterface/qualitymetrics/quality_metric_list.py b/src/spikeinterface/qualitymetrics/quality_metric_list.py index 375dd320ae..fc7e92b50d 100644 --- a/src/spikeinterface/qualitymetrics/quality_metric_list.py +++ b/src/spikeinterface/qualitymetrics/quality_metric_list.py @@ -55,7 +55,7 @@ } # a dict converting the name of the metric for computation to the output of that computation -compute_name_to_column_names = { +qm_compute_name_to_column_names = { "num_spikes": ["num_spikes"], "firing_rate": ["firing_rate"], "presence_ratio": ["presence_ratio"], From 2de25b47013c8954ece03af1f47250e5db1f7ffb Mon Sep 17 00:00:00 2001 From: chrishalcrow <57948917+chrishalcrow@users.noreply.github.com> Date: Thu, 14 Nov 2024 10:19:27 +0000 Subject: [PATCH 13/45] Unify template param checks with quality param checks --- .../postprocessing/template_metrics.py | 63 +++++++++++-------- 1 file changed, 38 insertions(+), 25 deletions(-) diff --git a/src/spikeinterface/postprocessing/template_metrics.py b/src/spikeinterface/postprocessing/template_metrics.py index 25e0d0d490..cfdbd122b3 100644 --- a/src/spikeinterface/postprocessing/template_metrics.py +++ b/src/spikeinterface/postprocessing/template_metrics.py @@ -130,33 +130,18 @@ def _set_params( metric_params = dict(zip(metric_names, [metrics_kwargs] * len(metric_names))) warnings.warn(deprecation_msg, category=DeprecationWarning, stacklevel=2) - if metric_params is None: - metric_params_ = _default_function_kwargs.copy() - if len(other_kwargs) > 0: - for m in other_kwargs: - if m in metric_params_: - metric_params_[m] = other_kwargs[m] - else: - metric_params_ = _default_function_kwargs.copy() - metric_params_.update(metric_params) + metric_params_ = get_default_tm_params() + for k in metric_params_: + if metric_params is not None and k in metric_params: + metric_params_[k].update(metric_params[k]) + if "peak_sign" in metric_params_[k] and peak_sign is not None: + metric_params_[k]["peak_sign"] = peak_sign metrics_to_compute = metric_names tm_extension = self.sorting_analyzer.get_extension("template_metrics") if delete_existing_metrics is False and tm_extension is not None: - existing_params = tm_extension.params["metric_params"] - # checks that existing metrics were calculated using the same params - if existing_params != metric_params_: - warnings.warn( - f"The parameters used to calculate the previous template metrics are different" - f"than those used now.\nPrevious parameters: {existing_params}\nCurrent " - f"parameters: {metric_params_}\nDeleting previous template metrics..." - ) - tm_extension.params["metric_names"] = [] - existing_metric_names = [] - else: - existing_metric_names = tm_extension.params["metric_names"] - + existing_metric_names = tm_extension.params["metric_names"] existing_metric_names_propogated = [ metric_name for metric_name in existing_metric_names if metric_name not in metrics_to_compute ] @@ -322,8 +307,8 @@ def _compute_metrics(self, sorting_analyzer, unit_ids=None, verbose=False, metri def _run(self, verbose=False): - delete_existing_metrics = self.params["delete_existing_metrics"] metrics_to_compute = self.params["metrics_to_compute"] + delete_existing_metrics = self.params["delete_existing_metrics"] # compute the metrics which have been specified by the user computed_metrics = self._compute_metrics( @@ -339,9 +324,21 @@ def _run(self, verbose=False): ): existing_metrics = tm_extension.params["metric_names"] + existing_metrics = [] + # here we get in the loaded via the dict only (to avoid full loading from disk after params reset) + tm_extension = self.sorting_analyzer.extensions.get("template_metrics", None) + if ( + delete_existing_metrics is False + and tm_extension is not None + and tm_extension.data.get("metrics") is not None + ): + existing_metrics = tm_extension.params["metric_names"] + # append the metrics which were previously computed for metric_name in set(existing_metrics).difference(metrics_to_compute): - computed_metrics[metric_name] = tm_extension.data["metrics"][metric_name] + # some metrics names produce data columns with other names. This deals with that. + for column_name in tm_compute_name_to_column_names[metric_name]: + computed_metrics[column_name] = tm_extension.data["metrics"][column_name] self.data["metrics"] = computed_metrics @@ -369,12 +366,28 @@ def _get_data(self): def get_default_tm_params(): - metric_names = get_single_channel_template_metric_names() + get_multi_channel_template_metric_names() + metric_names = get_template_metric_names() base_tm_params = _default_function_kwargs metric_params = dict(zip(metric_names, [base_tm_params] * len(metric_names))) return metric_params +# a dict converting the name of the metric for computation to the output of that computation +tm_compute_name_to_column_names = { + "peak_to_valley": ["peak_to_valley"], + "peak_trough_ratio": ["peak_trough_ratio"], + "half_width": ["half_width"], + "repolarization_slope": ["repolarization_slope"], + "recovery_slope": ["recovery_slope"], + "num_positive_peaks": ["num_positive_peaks"], + "num_negative_peaks": ["num_negative_peaks"], + "velocity_above": ["velocity_above"], + "velocity_below": ["velocity_below"], + "exp_decay": ["exp_decay"], + "spread": ["spread"], +} + + def get_trough_and_peak_idx(template): """ Return the indices into the input template of the detected trough From 8f6602423d53dc625689da2183a24fe43cbb8629 Mon Sep 17 00:00:00 2001 From: chrishalcrow <57948917+chrishalcrow@users.noreply.github.com> Date: Thu, 14 Nov 2024 11:07:27 +0000 Subject: [PATCH 14/45] add some tests --- .../tests/test_template_metrics.py | 47 ++++++++++++++++++- 1 file changed, 46 insertions(+), 1 deletion(-) diff --git a/src/spikeinterface/postprocessing/tests/test_template_metrics.py b/src/spikeinterface/postprocessing/tests/test_template_metrics.py index 1df723bfe3..1bf49f64c1 100644 --- a/src/spikeinterface/postprocessing/tests/test_template_metrics.py +++ b/src/spikeinterface/postprocessing/tests/test_template_metrics.py @@ -1,5 +1,5 @@ from spikeinterface.postprocessing.tests.common_extension_tests import AnalyzerExtensionCommonTestSuite -from spikeinterface.postprocessing import ComputeTemplateMetrics +from spikeinterface.postprocessing import ComputeTemplateMetrics, compute_template_metrics import pytest import csv @@ -8,6 +8,49 @@ template_metrics = list(_single_channel_metric_name_to_func.keys()) +def test_different_params_template_metrics(small_sorting_analyzer): + """ + Computes template metrics using different params, and check that they are + actually calculated using the different params. + """ + compute_template_metrics( + sorting_analyzer=small_sorting_analyzer, + metric_names=["exp_decay", "spread", "half_width"], + metric_params={"exp_decay": {"recovery_window_ms": 0.8}, "spread": {"spread_smooth_um": 15}}, + ) + + tm_extension = small_sorting_analyzer.get_extension("template_metrics") + tm_params = tm_extension.params["metric_params"] + + assert tm_params["exp_decay"]["recovery_window_ms"] == 0.8 + assert tm_params["spread"]["recovery_window_ms"] == 0.7 + assert tm_params["half_width"]["recovery_window_ms"] == 0.7 + + assert tm_params["spread"]["spread_smooth_um"] == 15 + assert tm_params["exp_decay"]["spread_smooth_um"] == 20 + assert tm_params["half_width"]["spread_smooth_um"] == 20 + + +def test_backwards_compat_params_template_metrics(small_sorting_analyzer): + """ + Computes template metrics using the metrics_kwargs keyword + """ + compute_template_metrics( + sorting_analyzer=small_sorting_analyzer, + metric_names=["exp_decay", "spread"], + metrics_kwargs={"recovery_window_ms": 0.8}, + ) + + tm_extension = small_sorting_analyzer.get_extension("template_metrics") + tm_params = tm_extension.params["metric_params"] + + assert tm_params["exp_decay"]["recovery_window_ms"] == 0.8 + assert tm_params["spread"]["recovery_window_ms"] == 0.8 + + assert tm_params["spread"]["spread_smooth_um"] == 20 + assert tm_params["exp_decay"]["spread_smooth_um"] == 20 + + def test_compute_new_template_metrics(small_sorting_analyzer): """ Computes template metrics then computes a subset of template metrics, and checks @@ -17,6 +60,8 @@ def test_compute_new_template_metrics(small_sorting_analyzer): are deleted. """ + small_sorting_analyzer.delete_extension("template_metrics") + # calculate just exp_decay small_sorting_analyzer.compute({"template_metrics": {"metric_names": ["exp_decay"]}}) template_metric_extension = small_sorting_analyzer.get_extension("template_metrics") From fdc01f5adb81f55c5787166fa469100d7bc06239 Mon Sep 17 00:00:00 2001 From: chrishalcrow <57948917+chrishalcrow@users.noreply.github.com> Date: Thu, 14 Nov 2024 15:00:15 +0000 Subject: [PATCH 15/45] little fixes --- .../postprocessing/template_metrics.py | 31 +++++++++++-------- .../qualitymetrics/pca_metrics.py | 4 +-- 2 files changed, 20 insertions(+), 15 deletions(-) diff --git a/src/spikeinterface/postprocessing/template_metrics.py b/src/spikeinterface/postprocessing/template_metrics.py index cfdbd122b3..cbcf38d19d 100644 --- a/src/spikeinterface/postprocessing/template_metrics.py +++ b/src/spikeinterface/postprocessing/template_metrics.py @@ -66,8 +66,8 @@ class ComputeTemplateMetrics(AnalyzerExtension): If True, any template metrics attached to the `sorting_analyzer` are deleted. If False, any metrics which were previously calculated but are not included in `metric_names` are kept, provided the `metric_params` are unchanged. metric_params : dict of dicts metric_params : dict of dicts or None - Dictionary with parameters for quality metrics calculation. - Default parameters can be obtained with: `si.qualitymetrics.get_default_tm_params()` + Dictionary with parameters for template metrics calculation. + Default parameters can be obtained with: `si.postprocessing.template_metrics.get_default_tm_params()` Returns ------- @@ -124,18 +124,17 @@ def _set_params( metric_names += get_multi_channel_template_metric_names() if metrics_kwargs is not None and metric_params is None: - deprecation_msg = ( - "`metrics_kwargs` is deprecated and will be removed in version 0.104.0 Please use metric_params instead" - ) - metric_params = dict(zip(metric_names, [metrics_kwargs] * len(metric_names))) - warnings.warn(deprecation_msg, category=DeprecationWarning, stacklevel=2) + deprecation_msg = "`metrics_kwargs` is deprecated and will be removed in version 0.104.0. Please use metric_params instead" + warnings.warn(deprecation_msg, category=DeprecationWarning) + + metric_params = {} + for metric_name in metric_names: + metric_params[metric_name] = deepcopy(metrics_kwargs) - metric_params_ = get_default_tm_params() + metric_params_ = get_default_tm_params(metric_names) for k in metric_params_: if metric_params is not None and k in metric_params: metric_params_[k].update(metric_params[k]) - if "peak_sign" in metric_params_[k] and peak_sign is not None: - metric_params_[k]["peak_sign"] = peak_sign metrics_to_compute = metric_names tm_extension = self.sorting_analyzer.get_extension("template_metrics") @@ -365,10 +364,16 @@ def _get_data(self): ) -def get_default_tm_params(): - metric_names = get_template_metric_names() +def get_default_tm_params(metric_names): + if metric_names is None: + metric_names = get_template_metric_names() + base_tm_params = _default_function_kwargs - metric_params = dict(zip(metric_names, [base_tm_params] * len(metric_names))) + + metric_params = {} + for metric_name in metric_names: + metric_params[metric_name] = deepcopy(base_tm_params) + return metric_params diff --git a/src/spikeinterface/qualitymetrics/pca_metrics.py b/src/spikeinterface/qualitymetrics/pca_metrics.py index b4952bfe6d..ca21f1e45f 100644 --- a/src/spikeinterface/qualitymetrics/pca_metrics.py +++ b/src/spikeinterface/qualitymetrics/pca_metrics.py @@ -91,10 +91,10 @@ def compute_pc_metrics( if qm_params is not None and metric_params is None: deprecation_msg = ( - "`qm_params` is deprecated and will be removed in version 0.104.0 Please use metric_params instead" + "`qm_params` is deprecated and will be removed in version 0.104.0. Please use metric_params instead" ) - metric_params = qm_params warn(deprecation_msg, category=DeprecationWarning, stacklevel=2) + metric_params = qm_params pca_ext = sorting_analyzer.get_extension("principal_components") assert pca_ext is not None, "calculate_pc_metrics() need extension 'principal_components'" From 9db0b83f7c7ad2c773c8673fa3dd09e5c3cdecb6 Mon Sep 17 00:00:00 2001 From: chrishalcrow <57948917+chrishalcrow@users.noreply.github.com> Date: Thu, 14 Nov 2024 15:11:01 +0000 Subject: [PATCH 16/45] backwards compatible loading --- .../postprocessing/template_metrics.py | 12 ++++++++++++ .../qualitymetrics/quality_metric_calculator.py | 7 +++++++ 2 files changed, 19 insertions(+) diff --git a/src/spikeinterface/postprocessing/template_metrics.py b/src/spikeinterface/postprocessing/template_metrics.py index cbcf38d19d..477ad04440 100644 --- a/src/spikeinterface/postprocessing/template_metrics.py +++ b/src/spikeinterface/postprocessing/template_metrics.py @@ -344,6 +344,18 @@ def _run(self, verbose=False): def _get_data(self): return self.data["metrics"] + def load_params(self): + AnalyzerExtension.load_params(self) + # For backwards compatibility - this reformats metrics_kwargs as metric_params + if (metrics_kwargs := self.params.get("metrics_kwargs")) is not None: + + metric_params = {} + for metric_name in self.params["metric_names"]: + metric_params[metric_name] = deepcopy(metrics_kwargs) + self.params["metric_params"] = metric_params + + del self.params["metrics_kwargs"] + register_result_extension(ComputeTemplateMetrics) compute_template_metrics = ComputeTemplateMetrics.function_factory() diff --git a/src/spikeinterface/qualitymetrics/quality_metric_calculator.py b/src/spikeinterface/qualitymetrics/quality_metric_calculator.py index 365d7bcc09..e7e7c244ea 100644 --- a/src/spikeinterface/qualitymetrics/quality_metric_calculator.py +++ b/src/spikeinterface/qualitymetrics/quality_metric_calculator.py @@ -262,6 +262,13 @@ def _run(self, verbose=False, **job_kwargs): def _get_data(self): return self.data["metrics"] + def load_params(self): + AnalyzerExtension.load_params(self) + # For backwards compatibility - this renames qm_params as metric_params + if (qm_params := self.params.get("qm_params")) is not None: + self.params["metric_params"] = qm_params + del self.params["qm_params"] + register_result_extension(ComputeQualityMetrics) compute_quality_metrics = ComputeQualityMetrics.function_factory() From 9b2875fd06782bd6da722fed1f524dfda4203f0a Mon Sep 17 00:00:00 2001 From: Alessio Buccino Date: Thu, 21 Nov 2024 11:53:01 +0100 Subject: [PATCH 17/45] Add stream_mode as extra_requirements for NWB wghen streaming --- src/spikeinterface/extractors/nwbextractors.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/spikeinterface/extractors/nwbextractors.py b/src/spikeinterface/extractors/nwbextractors.py index d797e64910..171992f6b1 100644 --- a/src/spikeinterface/extractors/nwbextractors.py +++ b/src/spikeinterface/extractors/nwbextractors.py @@ -599,6 +599,8 @@ def __init__( else: gains, offsets, locations, groups = self._fetch_main_properties_backend() self.extra_requirements.append("h5py") + if stream_mode is not None: + self.extra_requirements.append(stream_mode) self.set_channel_gains(gains) self.set_channel_offsets(offsets) if locations is not None: @@ -1100,6 +1102,8 @@ def __init__( for property_name, property_values in properties.items(): values = [x.decode("utf-8") if isinstance(x, bytes) else x for x in property_values] self.set_property(property_name, values) + if stream_mode is not None: + self.extra_requirements.append(stream_mode) if stream_mode is None and file_path is not None: file_path = str(Path(file_path).resolve()) From ad00beb182967ebe68f59ebfd7f1abad3002a10e Mon Sep 17 00:00:00 2001 From: Charlie Windolf Date: Thu, 21 Nov 2024 20:44:20 +0000 Subject: [PATCH 18/45] Update `interpolate_motion_on_traces` docstring --- .../sortingcomponents/motion/motion_interpolation.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py b/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py index d207dced08..7c6f0ba71a 100644 --- a/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py +++ b/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py @@ -83,6 +83,9 @@ def interpolate_motion_on_traces( interpolation_time_bin_centers_s : None or np.array Manually specify the time bins which the interpolation happens in for this segment. If None, these are the motion estimate's time bins. + interpolation_time_bin_edges_s : None or np.array + If present, interpolation chunks will be the time bins defined by these edges + rather than interpolation_time_bin_centers_s or the motion's bins. spatial_interpolation_method : "idw" | "kriging", default: "kriging" The spatial interpolation method used to interpolate the channel locations: * idw : Inverse Distance Weighing From 28527d2a8bcd03a3c6d9036ac9be180ad4dc6334 Mon Sep 17 00:00:00 2001 From: Charlie Windolf Date: Thu, 21 Nov 2024 20:46:30 +0000 Subject: [PATCH 19/45] Should be centers! --- .../sortingcomponents/motion/motion_interpolation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py b/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py index 7c6f0ba71a..f0fff5c039 100644 --- a/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py +++ b/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py @@ -126,7 +126,7 @@ def interpolate_motion_on_traces( # -- determine the blocks of frames that will land in the same interpolation time bin if interpolation_time_bin_centers_s is None and interpolation_time_bin_edges_s is None: - bin_centers_s = motion.temporal_bin_edges_s[segment_index] + bin_centers_s = motion.temporal_bin_centers_s[segment_index] bin_edges_s = motion.temporal_bin_edges_s[segment_index] else: bin_centers_s, bin_edges_s = ensure_time_bins(interpolation_time_bin_centers_s, interpolation_time_bin_edges_s) From b3b3fcf5be7b67451f54cac753844cbb06b8d4a3 Mon Sep 17 00:00:00 2001 From: Charlie Windolf Date: Thu, 21 Nov 2024 20:47:34 +0000 Subject: [PATCH 20/45] Typo --- .../sortingcomponents/motion/motion_interpolation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py b/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py index f0fff5c039..9e32e189d9 100644 --- a/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py +++ b/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py @@ -132,7 +132,7 @@ def interpolate_motion_on_traces( bin_centers_s, bin_edges_s = ensure_time_bins(interpolation_time_bin_centers_s, interpolation_time_bin_edges_s) # nearest interpolation bin: - # seachsorted(b, t, side="right") == i means that b[i-1] <= t < b[i] + # searchsorted(b, t, side="right") == i means that b[i-1] <= t < b[i] # hence the -1. doing it with "left" is not as nice -- we want t==b[0] # to lead to i=1 (rounding down). # time_bins are bin centers, but we want to snap to the nearest center. From b02860e463262a3b9522ebf9badc45c860cf8d29 Mon Sep 17 00:00:00 2001 From: Charlie Windolf Date: Thu, 21 Nov 2024 20:51:11 +0000 Subject: [PATCH 21/45] Clarify comments --- .../sortingcomponents/motion/motion_interpolation.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py b/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py index 9e32e189d9..8f96579228 100644 --- a/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py +++ b/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py @@ -131,14 +131,10 @@ def interpolate_motion_on_traces( else: bin_centers_s, bin_edges_s = ensure_time_bins(interpolation_time_bin_centers_s, interpolation_time_bin_edges_s) - # nearest interpolation bin: + # bin the frame times according to the interpolation time bins. # searchsorted(b, t, side="right") == i means that b[i-1] <= t < b[i] # hence the -1. doing it with "left" is not as nice -- we want t==b[0] # to lead to i=1 (rounding down). - # time_bins are bin centers, but we want to snap to the nearest center. - # idea is to get the left bin edges and bin the interp times. - # this is like subtracting bin_dt_s/2, but allows non-equally-spaced bins. - # it's fine to use the first bin center for the first left edge bin_inds = np.searchsorted(bin_edges_s, times, side="right") - 1 # the time bins may not cover the whole set of times in the recording, From df2484002b562861f0f64bc053b5da619253e983 Mon Sep 17 00:00:00 2001 From: Charlie Windolf Date: Thu, 21 Nov 2024 20:57:08 +0000 Subject: [PATCH 22/45] Rename variables for clarity --- .../motion/motion_interpolation.py | 25 ++++++++++--------- 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py b/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py index 8f96579228..2bd3493650 100644 --- a/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py +++ b/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py @@ -126,30 +126,30 @@ def interpolate_motion_on_traces( # -- determine the blocks of frames that will land in the same interpolation time bin if interpolation_time_bin_centers_s is None and interpolation_time_bin_edges_s is None: - bin_centers_s = motion.temporal_bin_centers_s[segment_index] - bin_edges_s = motion.temporal_bin_edges_s[segment_index] + interpolation_time_bin_centers_s = motion.temporal_bin_centers_s[segment_index] + interpolation_time_bin_edges_s = motion.temporal_bin_edges_s[segment_index] else: - bin_centers_s, bin_edges_s = ensure_time_bins(interpolation_time_bin_centers_s, interpolation_time_bin_edges_s) + interpolation_time_bin_centers_s, interpolation_time_bin_edges_s = ensure_time_bins(interpolation_time_bin_centers_s, interpolation_time_bin_edges_s) # bin the frame times according to the interpolation time bins. # searchsorted(b, t, side="right") == i means that b[i-1] <= t < b[i] # hence the -1. doing it with "left" is not as nice -- we want t==b[0] # to lead to i=1 (rounding down). - bin_inds = np.searchsorted(bin_edges_s, times, side="right") - 1 + interpolation_bin_inds = np.searchsorted(interpolation_time_bin_edges_s, times, side="right") - 1 # the time bins may not cover the whole set of times in the recording, # so we need to clip these indices to the valid range - n_bins = bin_edges_s.shape[0] - 1 - np.clip(bin_inds, 0, n_bins - 1, out=bin_inds) + n_bins = interpolation_time_bin_edges_s.shape[0] - 1 + np.clip(interpolation_bin_inds, 0, n_bins - 1, out=interpolation_bin_inds) # -- what are the possibilities here anyway? - bins_here = np.arange(bin_inds[0], bin_inds[-1] + 1) + interpolation_bins_here = np.arange(interpolation_bin_inds[0], interpolation_bin_inds[-1] + 1) # inperpolation kernel will be the same per temporal bin interp_times = np.empty(total_num_chans) current_start_index = 0 - for bin_ind in bins_here: - bin_time = bin_centers_s[bin_ind] + for interp_bin_ind in interpolation_bins_here: + bin_time = bin_centers_s[interp_bin_ind] interp_times.fill(bin_time) channel_motions = motion.get_displacement_at_time_and_depth( interp_times, @@ -177,16 +177,17 @@ def interpolate_motion_on_traces( # ax.set_title(f"bin_ind {bin_ind} - {bin_time}s - {spatial_interpolation_method}") # plt.show() + # quick search logic to find frames corresponding to this interpolation bin in the recording # quickly find the end of this bin, which is also the start of the next next_start_index = current_start_index + np.searchsorted( - bin_inds[current_start_index:], bin_ind + 1, side="left" + interpolation_bin_inds[current_start_index:], interp_bin_ind + 1, side="left" ) - in_bin = slice(current_start_index, next_start_index) + frames_in_bin = slice(current_start_index, next_start_index) # here we use a simple np.matmul even if dirft_kernel can be super sparse. # because the speed for a sparse matmul is not so good when we disable multi threaad (due multi processing # in ChunkRecordingExecutor) - np.matmul(traces[in_bin], drift_kernel, out=traces_corrected[in_bin]) + np.matmul(traces[frames_in_bin], drift_kernel, out=traces_corrected[frames_in_bin]) current_start_index = next_start_index return traces_corrected From 91fb7320eafbcdaa8fe728e36cbc7fa686f32ba8 Mon Sep 17 00:00:00 2001 From: Charlie Windolf Date: Thu, 21 Nov 2024 20:59:13 +0000 Subject: [PATCH 23/45] Note on clipping behavior --- .../sortingcomponents/motion/motion_interpolation.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py b/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py index 2bd3493650..8aed8085bf 100644 --- a/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py +++ b/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py @@ -64,7 +64,11 @@ def interpolate_motion_on_traces( """ Apply inverse motion with spatial interpolation on traces. - Traces can be full traces, but also waveforms snippets. + Traces can be full traces, but also waveforms snippets. Times used for looking up + displacements are controlled by interpolation_time_bin_edges_s or + interpolation_time_bin_centers_s, or fall back to the Motion object's time bins + by default; times in the recording outside these time bins use the closest edge + bin's displacement value during interpolation. Parameters ---------- From b80bad71e2c1ee316048beb79a9169dd8f68c6ff Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 21 Nov 2024 21:01:47 +0000 Subject: [PATCH 24/45] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- .../sortingcomponents/motion/motion_interpolation.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py b/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py index 8aed8085bf..14471f77fc 100644 --- a/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py +++ b/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py @@ -65,7 +65,7 @@ def interpolate_motion_on_traces( Apply inverse motion with spatial interpolation on traces. Traces can be full traces, but also waveforms snippets. Times used for looking up - displacements are controlled by interpolation_time_bin_edges_s or + displacements are controlled by interpolation_time_bin_edges_s or interpolation_time_bin_centers_s, or fall back to the Motion object's time bins by default; times in the recording outside these time bins use the closest edge bin's displacement value during interpolation. @@ -133,7 +133,9 @@ def interpolate_motion_on_traces( interpolation_time_bin_centers_s = motion.temporal_bin_centers_s[segment_index] interpolation_time_bin_edges_s = motion.temporal_bin_edges_s[segment_index] else: - interpolation_time_bin_centers_s, interpolation_time_bin_edges_s = ensure_time_bins(interpolation_time_bin_centers_s, interpolation_time_bin_edges_s) + interpolation_time_bin_centers_s, interpolation_time_bin_edges_s = ensure_time_bins( + interpolation_time_bin_centers_s, interpolation_time_bin_edges_s + ) # bin the frame times according to the interpolation time bins. # searchsorted(b, t, side="right") == i means that b[i-1] <= t < b[i] From c89060314e233b89e4e9112e74c7643545806d22 Mon Sep 17 00:00:00 2001 From: Charlie Windolf Date: Thu, 21 Nov 2024 16:09:45 -0500 Subject: [PATCH 25/45] Fix variable typo; add docstring --- .../motion/motion_interpolation.py | 5 +++-- .../sortingcomponents/motion/motion_utils.py | 16 ++++++++++++++++ 2 files changed, 19 insertions(+), 2 deletions(-) diff --git a/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py b/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py index 14471f77fc..e87f83751c 100644 --- a/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py +++ b/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py @@ -3,7 +3,8 @@ import numpy as np from spikeinterface.core.core_tools import define_function_from_class from spikeinterface.preprocessing import get_spatial_interpolation_kernel -from spikeinterface.preprocessing.basepreprocessor import BasePreprocessor, BasePreprocessorSegment +from spikeinterface.preprocessing.basepreprocessor import ( + BasePreprocessor, BasePreprocessorSegment) from spikeinterface.preprocessing.filter import fix_dtype from .motion_utils import ensure_time_bin_edges, ensure_time_bins @@ -155,7 +156,7 @@ def interpolate_motion_on_traces( interp_times = np.empty(total_num_chans) current_start_index = 0 for interp_bin_ind in interpolation_bins_here: - bin_time = bin_centers_s[interp_bin_ind] + bin_time = interpolation_time_bin_centers_s[interp_bin_ind] interp_times.fill(bin_time) channel_motions = motion.get_displacement_at_time_and_depth( interp_times, diff --git a/src/spikeinterface/sortingcomponents/motion/motion_utils.py b/src/spikeinterface/sortingcomponents/motion/motion_utils.py index ec0a55a8f8..680d75f221 100644 --- a/src/spikeinterface/sortingcomponents/motion/motion_utils.py +++ b/src/spikeinterface/sortingcomponents/motion/motion_utils.py @@ -580,6 +580,22 @@ def make_3d_motion_histograms( def ensure_time_bins(time_bin_centers_s=None, time_bin_edges_s=None): + """Ensure that both bin edges and bin centers are present + + If either of the inputs are None but not both, the missing is reconstructed + from the present. Going from edges to centers is done by taking midpoints. + Going from centers to edges is done by taking midpoints and padding with the + left and rightmost centers. + + Parameters + ---------- + time_bin_centers_s : None or np.array + time_bin_edges_s : None or np.array + + Returns + ------- + time_bin_centers_s, time_bin_edges_s + """ if time_bin_centers_s is None and time_bin_edges_s is None: raise ValueError("Need at least one of time_bin_centers_s or time_bin_edges_s.") From 6d2e47911a5e64737f2c98f9cdc199e9f6d306fc Mon Sep 17 00:00:00 2001 From: Charlie Windolf Date: Thu, 21 Nov 2024 16:14:00 -0500 Subject: [PATCH 26/45] Variable names in tests --- .../motion/tests/test_motion_interpolation.py | 45 +++++++++---------- 1 file changed, 21 insertions(+), 24 deletions(-) diff --git a/src/spikeinterface/sortingcomponents/motion/tests/test_motion_interpolation.py b/src/spikeinterface/sortingcomponents/motion/tests/test_motion_interpolation.py index 8542b62524..c97c8324ba 100644 --- a/src/spikeinterface/sortingcomponents/motion/tests/test_motion_interpolation.py +++ b/src/spikeinterface/sortingcomponents/motion/tests/test_motion_interpolation.py @@ -4,11 +4,8 @@ import spikeinterface.core as sc from spikeinterface.sortingcomponents.motion import Motion from spikeinterface.sortingcomponents.motion.motion_interpolation import ( - InterpolateMotionRecording, - correct_motion_on_peaks, - interpolate_motion, - interpolate_motion_on_traces, -) + InterpolateMotionRecording, correct_motion_on_peaks, interpolate_motion, + interpolate_motion_on_traces) from spikeinterface.sortingcomponents.tests.common import make_dataset @@ -84,26 +81,26 @@ def test_interpolate_motion_on_traces(): def test_interpolation_simple(): # a recording where a 1 moves at 1 chan per second. 30 chans 10 frames. # there will be 9 chans of drift, so we add 9 chans of padding to the bottom - nt = nc0 = 10 # these need to be the same for this test - nc1 = nc0 + nc0 - 1 - traces = np.zeros((nt, nc1), dtype="float32") - traces[:, :nc0] = np.eye(nc0) + n_samples = num_chans_orig = 10 # these need to be the same for this test + num_chans_drifted = num_chans_orig + num_chans_orig - 1 + traces = np.zeros((n_samples, num_chans_drifted), dtype="float32") + traces[:, :num_chans_orig] = np.eye(num_chans_orig) rec = sc.NumpyRecording(traces, sampling_frequency=1) - rec.set_dummy_probe_from_locations(np.c_[np.zeros(nc1), np.arange(nc1)]) + rec.set_dummy_probe_from_locations(np.c_[np.zeros(num_chans_drifted), np.arange(num_chans_drifted)]) - true_motion = Motion(np.arange(nt)[:, None], 0.5 + np.arange(nt), np.zeros(1)) + true_motion = Motion(np.arange(n_samples)[:, None], 0.5 + np.arange(n_samples), np.zeros(1)) rec_corrected = interpolate_motion(rec, true_motion, spatial_interpolation_method="nearest") traces_corrected = rec_corrected.get_traces() - assert traces_corrected.shape == (nc0, nc0) - assert np.array_equal(traces_corrected[:, 0], np.ones(nt)) - assert np.array_equal(traces_corrected[:, 1:], np.zeros((nt, nc0 - 1))) + assert traces_corrected.shape == (num_chans_orig, num_chans_orig) + assert np.array_equal(traces_corrected[:, 0], np.ones(n_samples)) + assert np.array_equal(traces_corrected[:, 1:], np.zeros((n_samples, num_chans_orig - 1))) # let's try a new version where we interpolate too slowly rec_corrected = interpolate_motion( rec, true_motion, spatial_interpolation_method="nearest", num_closest=2, interpolation_time_bin_size_s=2 ) traces_corrected = rec_corrected.get_traces() - assert traces_corrected.shape == (nc0, nc0) + assert traces_corrected.shape == (num_chans_orig, num_chans_orig) # what happens with nearest here? # well... due to rounding towards the nearest even number, the motion (which at # these time bin centers is 0.5, 2.5, 4.5, ...) flips the signal's nearest @@ -131,8 +128,8 @@ def test_cross_band_interpolation(): fs_ap = 300.0 t_start = 10.0 total_duration = 5.0 - nt_lfp = int(fs_lfp * total_duration) - nt_ap = int(fs_ap * total_duration) + num_samples_lfp = int(fs_lfp * total_duration) + num_samples_ap = int(fs_ap * total_duration) t_switch = 3 # because interpolation uses bin centers logic, there will be a half @@ -140,18 +137,18 @@ def test_cross_band_interpolation(): halfbin_ap_lfp = int(0.5 * (fs_ap / fs_lfp)) # channel geometry - nc = 10 - geom = np.c_[np.zeros(nc), np.arange(nc)] + num_chans = 10 + geom = np.c_[np.zeros(num_chans), np.arange(num_chans)] # make an LFP recording which drifts a bit - traces_lfp = np.zeros((nt_lfp, nc)) + traces_lfp = np.zeros((num_samples_lfp, num_chans)) traces_lfp[: int(t_switch * fs_lfp), 5] = 1.0 traces_lfp[int(t_switch * fs_lfp) :, 6] = 1.0 rec_lfp = sc.NumpyRecording(traces_lfp, sampling_frequency=fs_lfp) rec_lfp.set_dummy_probe_from_locations(geom) # same for AP - traces_ap = np.zeros((nt_ap, nc)) + traces_ap = np.zeros((num_samples_ap, num_chans)) traces_ap[: int(t_switch * fs_ap) - halfbin_ap_lfp, 5] = 1.0 traces_ap[int(t_switch * fs_ap) - halfbin_ap_lfp :, 6] = 1.0 rec_ap = sc.NumpyRecording(traces_ap, sampling_frequency=fs_ap) @@ -160,8 +157,8 @@ def test_cross_band_interpolation(): # set times for both, and silence the warning with warnings.catch_warnings(): warnings.simplefilter("ignore", category=UserWarning) - rec_lfp.set_times(t_start + np.arange(nt_lfp) / fs_lfp) - rec_ap.set_times(t_start + np.arange(nt_ap) / fs_ap) + rec_lfp.set_times(t_start + np.arange(num_samples_lfp) / fs_lfp) + rec_ap.set_times(t_start + np.arange(num_samples_ap) / fs_ap) # estimate motion motion = estimate_motion(rec_lfp, method="dredge_lfp", rigid=True) @@ -169,7 +166,7 @@ def test_cross_band_interpolation(): # nearest to keep it simple rec_corrected = interpolate_motion(rec_ap, motion, spatial_interpolation_method="nearest", num_closest=2) traces_corrected = rec_corrected.get_traces() - target = np.zeros((nt_ap, nc - 2)) + target = np.zeros((num_samples_ap, num_chans - 2)) target[:, 4] = 1 ii, jj = np.nonzero(traces_corrected) assert np.array_equal(traces_corrected, target) From b4c91a0d941e97be68908b11974d18f802ec74a5 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 21 Nov 2024 21:15:30 +0000 Subject: [PATCH 27/45] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- .../sortingcomponents/motion/motion_interpolation.py | 3 +-- .../motion/tests/test_motion_interpolation.py | 7 +++++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py b/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py index e87f83751c..b3a4c9a207 100644 --- a/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py +++ b/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py @@ -3,8 +3,7 @@ import numpy as np from spikeinterface.core.core_tools import define_function_from_class from spikeinterface.preprocessing import get_spatial_interpolation_kernel -from spikeinterface.preprocessing.basepreprocessor import ( - BasePreprocessor, BasePreprocessorSegment) +from spikeinterface.preprocessing.basepreprocessor import BasePreprocessor, BasePreprocessorSegment from spikeinterface.preprocessing.filter import fix_dtype from .motion_utils import ensure_time_bin_edges, ensure_time_bins diff --git a/src/spikeinterface/sortingcomponents/motion/tests/test_motion_interpolation.py b/src/spikeinterface/sortingcomponents/motion/tests/test_motion_interpolation.py index c97c8324ba..e4ba870325 100644 --- a/src/spikeinterface/sortingcomponents/motion/tests/test_motion_interpolation.py +++ b/src/spikeinterface/sortingcomponents/motion/tests/test_motion_interpolation.py @@ -4,8 +4,11 @@ import spikeinterface.core as sc from spikeinterface.sortingcomponents.motion import Motion from spikeinterface.sortingcomponents.motion.motion_interpolation import ( - InterpolateMotionRecording, correct_motion_on_peaks, interpolate_motion, - interpolate_motion_on_traces) + InterpolateMotionRecording, + correct_motion_on_peaks, + interpolate_motion, + interpolate_motion_on_traces, +) from spikeinterface.sortingcomponents.tests.common import make_dataset From 38e0adac18c2c862f9193e5a5d643b394a6d3d52 Mon Sep 17 00:00:00 2001 From: Charlie Windolf Date: Fri, 22 Nov 2024 10:28:34 -0500 Subject: [PATCH 28/45] Typo --- .../sortingcomponents/motion/motion_interpolation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py b/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py index b3a4c9a207..fc8ccb788b 100644 --- a/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py +++ b/src/spikeinterface/sortingcomponents/motion/motion_interpolation.py @@ -130,7 +130,7 @@ def interpolate_motion_on_traces( # -- determine the blocks of frames that will land in the same interpolation time bin if interpolation_time_bin_centers_s is None and interpolation_time_bin_edges_s is None: - interpolation_time_bin_centers_s = motion.temporal_bin_centers_s[segment_index] + interpolation_time_bin_centers_s = motion.temporal_bins_s[segment_index] interpolation_time_bin_edges_s = motion.temporal_bin_edges_s[segment_index] else: interpolation_time_bin_centers_s, interpolation_time_bin_edges_s = ensure_time_bins( From b0e7b1c60086fd818759b8df0f617d5441f4ff40 Mon Sep 17 00:00:00 2001 From: Pierre Yger Date: Fri, 29 Nov 2024 06:04:48 +0100 Subject: [PATCH 29/45] Fix kwargs in silence periods --- src/spikeinterface/preprocessing/silence_periods.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/spikeinterface/preprocessing/silence_periods.py b/src/spikeinterface/preprocessing/silence_periods.py index 85169011d8..a188f5d8db 100644 --- a/src/spikeinterface/preprocessing/silence_periods.py +++ b/src/spikeinterface/preprocessing/silence_periods.py @@ -97,7 +97,12 @@ def __init__(self, recording, list_periods, mode="zeros", noise_levels=None, see rec_segment = SilencedPeriodsRecordingSegment(parent_segment, periods, mode, noise_generator, seg_index) self.add_recording_segment(rec_segment) - self._kwargs = dict(recording=recording, list_periods=list_periods, mode=mode, noise_generator=noise_generator) + self._kwargs = dict(recording=recording, + list_periods=list_periods, + mode=mode, + noise_levels=noise_levels, + seed=seed, + random_chunk_kwargs=random_chunk_kwargs) class SilencedPeriodsRecordingSegment(BasePreprocessorSegment): From 60d7ad53b59fac5b47bd976905d64efc62b3daeb Mon Sep 17 00:00:00 2001 From: Pierre Yger Date: Fri, 29 Nov 2024 06:10:02 +0100 Subject: [PATCH 30/45] Fix --- src/spikeinterface/preprocessing/silence_periods.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/spikeinterface/preprocessing/silence_periods.py b/src/spikeinterface/preprocessing/silence_periods.py index a188f5d8db..5e410d51d5 100644 --- a/src/spikeinterface/preprocessing/silence_periods.py +++ b/src/spikeinterface/preprocessing/silence_periods.py @@ -100,7 +100,6 @@ def __init__(self, recording, list_periods, mode="zeros", noise_levels=None, see self._kwargs = dict(recording=recording, list_periods=list_periods, mode=mode, - noise_levels=noise_levels, seed=seed, random_chunk_kwargs=random_chunk_kwargs) From 01ae85cbb3ebcfd6f9e20eb191d92a278bbcb5e4 Mon Sep 17 00:00:00 2001 From: Pierre Yger Date: Fri, 29 Nov 2024 06:14:16 +0100 Subject: [PATCH 31/45] WIP --- src/spikeinterface/preprocessing/silence_periods.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/spikeinterface/preprocessing/silence_periods.py b/src/spikeinterface/preprocessing/silence_periods.py index 5e410d51d5..7c518d02a0 100644 --- a/src/spikeinterface/preprocessing/silence_periods.py +++ b/src/spikeinterface/preprocessing/silence_periods.py @@ -100,8 +100,8 @@ def __init__(self, recording, list_periods, mode="zeros", noise_levels=None, see self._kwargs = dict(recording=recording, list_periods=list_periods, mode=mode, - seed=seed, - random_chunk_kwargs=random_chunk_kwargs) + seed=seed) + self._kwargs.update(random_chunk_kwargs) class SilencedPeriodsRecordingSegment(BasePreprocessorSegment): From db2b4d5130a095500637c06f9d74aa2f03d41b73 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 29 Nov 2024 05:15:54 +0000 Subject: [PATCH 32/45] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- src/spikeinterface/preprocessing/silence_periods.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/src/spikeinterface/preprocessing/silence_periods.py b/src/spikeinterface/preprocessing/silence_periods.py index 7c518d02a0..00d9a1a407 100644 --- a/src/spikeinterface/preprocessing/silence_periods.py +++ b/src/spikeinterface/preprocessing/silence_periods.py @@ -97,10 +97,7 @@ def __init__(self, recording, list_periods, mode="zeros", noise_levels=None, see rec_segment = SilencedPeriodsRecordingSegment(parent_segment, periods, mode, noise_generator, seg_index) self.add_recording_segment(rec_segment) - self._kwargs = dict(recording=recording, - list_periods=list_periods, - mode=mode, - seed=seed) + self._kwargs = dict(recording=recording, list_periods=list_periods, mode=mode, seed=seed) self._kwargs.update(random_chunk_kwargs) From 5af4f858268c18421c8d1e7a3cbaca3c9957491e Mon Sep 17 00:00:00 2001 From: chrishalcrow <57948917+chrishalcrow@users.noreply.github.com> Date: Mon, 2 Dec 2024 09:19:15 +0000 Subject: [PATCH 33/45] Hard code synchony_size for users, but leave flexible code underneathe --- doc/get_started/quickstart.rst | 2 +- doc/modules/qualitymetrics/synchrony.rst | 4 +- .../qualitymetrics/misc_metrics.py | 27 ++++++------- .../tests/test_metrics_functions.py | 39 ++++++++----------- 4 files changed, 30 insertions(+), 42 deletions(-) diff --git a/doc/get_started/quickstart.rst b/doc/get_started/quickstart.rst index 3d45606a78..1349802ce5 100644 --- a/doc/get_started/quickstart.rst +++ b/doc/get_started/quickstart.rst @@ -673,7 +673,7 @@ compute quality metrics (some quality metrics require certain extensions 'min_spikes': 0, 'window_size_s': 1}, 'snr': {'peak_mode': 'extremum', 'peak_sign': 'neg'}, - 'synchrony': {'synchrony_sizes': (2, 4, 8)}} + 'synchrony': {} Since the recording is very short, let’s change some parameters to diff --git a/doc/modules/qualitymetrics/synchrony.rst b/doc/modules/qualitymetrics/synchrony.rst index d244fd0c0f..696dacbd3c 100644 --- a/doc/modules/qualitymetrics/synchrony.rst +++ b/doc/modules/qualitymetrics/synchrony.rst @@ -12,7 +12,7 @@ trains. This way synchronous events can be found both in multi-unit and single-u Complexity is calculated by counting the number of spikes (i.e. non-empty bins) that occur at the same sample index, within and across spike trains. -Synchrony metrics can be computed for different synchrony sizes (>1), defining the number of simultaneous spikes to count. +Synchrony metrics are computed for 2, 4 and 8 synchronous spikes. @@ -29,7 +29,7 @@ Example code import spikeinterface.qualitymetrics as sqm # Combine a sorting and recording into a sorting_analyzer - synchrony = sqm.compute_synchrony_metrics(sorting_analyzer=sorting_analyzer synchrony_sizes=(2, 4, 8)) + synchrony = sqm.compute_synchrony_metrics(sorting_analyzer=sorting_analyzer) # synchrony is a tuple of dicts with the synchrony metrics for each unit diff --git a/src/spikeinterface/qualitymetrics/misc_metrics.py b/src/spikeinterface/qualitymetrics/misc_metrics.py index 8dfd41cf88..b0e0a0ad19 100644 --- a/src/spikeinterface/qualitymetrics/misc_metrics.py +++ b/src/spikeinterface/qualitymetrics/misc_metrics.py @@ -520,7 +520,7 @@ def compute_sliding_rp_violations( ) -def get_synchrony_counts(spikes, synchrony_sizes, all_unit_ids): +def _get_synchrony_counts(spikes, all_unit_ids, synchrony_sizes=np.array([2, 4, 8])): """ Compute synchrony counts, the number of simultaneous spikes with sizes `synchrony_sizes`. @@ -528,10 +528,10 @@ def get_synchrony_counts(spikes, synchrony_sizes, all_unit_ids): ---------- spikes : np.array Structured numpy array with fields ("sample_index", "unit_index", "segment_index"). - synchrony_sizes : numpy array - The synchrony sizes to compute. Should be pre-sorted. all_unit_ids : list or None, default: None List of unit ids to compute the synchrony metrics. Expecting all units. + synchrony_sizes : numpy array + The synchrony sizes to compute. Should be pre-sorted. Returns ------- @@ -565,17 +565,15 @@ def get_synchrony_counts(spikes, synchrony_sizes, all_unit_ids): return synchrony_counts -def compute_synchrony_metrics(sorting_analyzer, synchrony_sizes=(2, 4, 8), unit_ids=None): +def compute_synchrony_metrics(sorting_analyzer, unit_ids=None): """ Compute synchrony metrics. Synchrony metrics represent the rate of occurrences of - "synchrony_size" spikes at the exact same sample index. + spikes at the exact same sample index, with synchrony sizes 2, 4 and 8. Parameters ---------- sorting_analyzer : SortingAnalyzer A SortingAnalyzer object. - synchrony_sizes : list or tuple, default: (2, 4, 8) - The synchrony sizes to compute. unit_ids : list or None, default: None List of unit ids to compute the synchrony metrics. If None, all units are used. @@ -583,19 +581,16 @@ def compute_synchrony_metrics(sorting_analyzer, synchrony_sizes=(2, 4, 8), unit_ ------- sync_spike_{X} : dict The synchrony metric for synchrony size X. - Returns are as many as synchrony_sizes. References ---------- Based on concepts described in [Grün]_ This code was adapted from `Elephant - Electrophysiology Analysis Toolkit `_ """ - assert min(synchrony_sizes) > 1, "Synchrony sizes must be greater than 1" - # Sort the synchrony times so we can slice numpy arrays, instead of using dicts - synchrony_sizes_np = np.array(synchrony_sizes, dtype=np.int16) - synchrony_sizes_np.sort() - res = namedtuple("synchrony_metrics", [f"sync_spike_{size}" for size in synchrony_sizes_np]) + synchrony_sizes = np.array([2, 4, 8]) + + res = namedtuple("synchrony_metrics", [f"sync_spike_{size}" for size in synchrony_sizes]) sorting = sorting_analyzer.sorting @@ -606,10 +601,10 @@ def compute_synchrony_metrics(sorting_analyzer, synchrony_sizes=(2, 4, 8), unit_ spikes = sorting.to_spike_vector() all_unit_ids = sorting.unit_ids - synchrony_counts = get_synchrony_counts(spikes, synchrony_sizes_np, all_unit_ids) + synchrony_counts = _get_synchrony_counts(spikes, all_unit_ids, synchrony_sizes=synchrony_sizes) synchrony_metrics_dict = {} - for sync_idx, synchrony_size in enumerate(synchrony_sizes_np): + for sync_idx, synchrony_size in enumerate(synchrony_sizes): sync_id_metrics_dict = {} for i, unit_id in enumerate(all_unit_ids): if unit_id not in unit_ids: @@ -623,7 +618,7 @@ def compute_synchrony_metrics(sorting_analyzer, synchrony_sizes=(2, 4, 8), unit_ return res(**synchrony_metrics_dict) -_default_params["synchrony"] = dict(synchrony_sizes=(2, 4, 8)) +_default_params["synchrony"] = dict() def compute_firing_ranges(sorting_analyzer, bin_size_s=5, percentiles=(5, 95), unit_ids=None): diff --git a/src/spikeinterface/qualitymetrics/tests/test_metrics_functions.py b/src/spikeinterface/qualitymetrics/tests/test_metrics_functions.py index 4c0890b62b..f51dc3e884 100644 --- a/src/spikeinterface/qualitymetrics/tests/test_metrics_functions.py +++ b/src/spikeinterface/qualitymetrics/tests/test_metrics_functions.py @@ -39,7 +39,7 @@ compute_firing_ranges, compute_amplitude_cv_metrics, compute_sd_ratio, - get_synchrony_counts, + _get_synchrony_counts, compute_quality_metrics, ) @@ -352,7 +352,7 @@ def test_synchrony_counts_no_sync(): one_spike["sample_index"] = spike_times one_spike["unit_index"] = spike_units - sync_count = get_synchrony_counts(one_spike, np.array((2)), [0]) + sync_count = _get_synchrony_counts(one_spike, [0]) assert np.all(sync_count[0] == np.array([0])) @@ -372,7 +372,7 @@ def test_synchrony_counts_one_sync(): two_spikes["sample_index"] = np.concatenate((spike_indices, added_spikes_indices)) two_spikes["unit_index"] = np.concatenate((spike_labels, added_spikes_labels)) - sync_count = get_synchrony_counts(two_spikes, np.array((2)), [0, 1]) + sync_count = _get_synchrony_counts(two_spikes, [0, 1]) assert np.all(sync_count[0] == np.array([1, 1])) @@ -392,7 +392,7 @@ def test_synchrony_counts_one_quad_sync(): four_spikes["sample_index"] = np.concatenate((spike_indices, added_spikes_indices)) four_spikes["unit_index"] = np.concatenate((spike_labels, added_spikes_labels)) - sync_count = get_synchrony_counts(four_spikes, np.array((2, 4)), [0, 1, 2, 3]) + sync_count = _get_synchrony_counts(four_spikes, [0, 1, 2, 3]) assert np.all(sync_count[0] == np.array([1, 1, 1, 1])) assert np.all(sync_count[1] == np.array([1, 1, 1, 1])) @@ -409,7 +409,7 @@ def test_synchrony_counts_not_all_units(): three_spikes["sample_index"] = np.concatenate((spike_indices, added_spikes_indices)) three_spikes["unit_index"] = np.concatenate((spike_labels, added_spikes_labels)) - sync_count = get_synchrony_counts(three_spikes, np.array((2)), [0, 1, 2]) + sync_count = _get_synchrony_counts(three_spikes, [0, 1, 2]) assert np.all(sync_count[0] == np.array([0, 1, 1])) @@ -610,9 +610,9 @@ def test_calculate_rp_violations(sorting_analyzer_violations): def test_synchrony_metrics(sorting_analyzer_simple): sorting_analyzer = sorting_analyzer_simple sorting = sorting_analyzer.sorting - synchrony_sizes = (2, 3, 4) - synchrony_metrics = compute_synchrony_metrics(sorting_analyzer, synchrony_sizes=synchrony_sizes) - print(synchrony_metrics) + synchrony_metrics = compute_synchrony_metrics(sorting_analyzer) + + synchrony_sizes = np.array([2, 4, 8]) # check returns for size in synchrony_sizes: @@ -625,10 +625,8 @@ def test_synchrony_metrics(sorting_analyzer_simple): sorting_sync = add_synchrony_to_sorting(sorting, sync_event_ratio=sync_level) sorting_analyzer_sync = create_sorting_analyzer(sorting_sync, sorting_analyzer.recording, format="memory") - previous_synchrony_metrics = compute_synchrony_metrics( - previous_sorting_analyzer, synchrony_sizes=synchrony_sizes - ) - current_synchrony_metrics = compute_synchrony_metrics(sorting_analyzer_sync, synchrony_sizes=synchrony_sizes) + previous_synchrony_metrics = compute_synchrony_metrics(previous_sorting_analyzer) + current_synchrony_metrics = compute_synchrony_metrics(sorting_analyzer_sync) print(current_synchrony_metrics) # check that all values increased for i, col in enumerate(previous_synchrony_metrics._fields): @@ -647,22 +645,17 @@ def test_synchrony_metrics_unit_id_subset(sorting_analyzer_simple): unit_ids_subset = [3, 7] - synchrony_sizes = (2,) - (synchrony_metrics,) = compute_synchrony_metrics( - sorting_analyzer_simple, synchrony_sizes=synchrony_sizes, unit_ids=unit_ids_subset - ) + synchrony_metrics = compute_synchrony_metrics(sorting_analyzer_simple, unit_ids=unit_ids_subset) - assert list(synchrony_metrics.keys()) == [3, 7] + assert list(synchrony_metrics.sync_spike_2.keys()) == [3, 7] + assert list(synchrony_metrics.sync_spike_4.keys()) == [3, 7] + assert list(synchrony_metrics.sync_spike_8.keys()) == [3, 7] def test_synchrony_metrics_no_unit_ids(sorting_analyzer_simple): - # all_unit_ids = sorting_analyzer_simple.sorting.unit_ids - - synchrony_sizes = (2,) - (synchrony_metrics,) = compute_synchrony_metrics(sorting_analyzer_simple, synchrony_sizes=synchrony_sizes) - - assert np.all(list(synchrony_metrics.keys()) == sorting_analyzer_simple.unit_ids) + synchrony_metrics = compute_synchrony_metrics(sorting_analyzer_simple) + assert np.all(list(synchrony_metrics.sync_spike_2.keys()) == sorting_analyzer_simple.unit_ids) @pytest.mark.sortingcomponents From 45eb5b74e58061ee04dcb2a4bba10dbcf2a2c892 Mon Sep 17 00:00:00 2001 From: chrishalcrow <57948917+chrishalcrow@users.noreply.github.com> Date: Mon, 2 Dec 2024 09:31:23 +0000 Subject: [PATCH 34/45] Add warning and ability to pass synchrony_sizes --- src/spikeinterface/qualitymetrics/__init__.py | 2 +- src/spikeinterface/qualitymetrics/misc_metrics.py | 6 +++++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/src/spikeinterface/qualitymetrics/__init__.py b/src/spikeinterface/qualitymetrics/__init__.py index 9d604f6ae2..754c82d8e3 100644 --- a/src/spikeinterface/qualitymetrics/__init__.py +++ b/src/spikeinterface/qualitymetrics/__init__.py @@ -6,4 +6,4 @@ get_default_qm_params, ) from .pca_metrics import get_quality_pca_metric_list -from .misc_metrics import get_synchrony_counts +from .misc_metrics import _get_synchrony_counts diff --git a/src/spikeinterface/qualitymetrics/misc_metrics.py b/src/spikeinterface/qualitymetrics/misc_metrics.py index b0e0a0ad19..2f178c46f3 100644 --- a/src/spikeinterface/qualitymetrics/misc_metrics.py +++ b/src/spikeinterface/qualitymetrics/misc_metrics.py @@ -565,7 +565,7 @@ def _get_synchrony_counts(spikes, all_unit_ids, synchrony_sizes=np.array([2, 4, return synchrony_counts -def compute_synchrony_metrics(sorting_analyzer, unit_ids=None): +def compute_synchrony_metrics(sorting_analyzer, unit_ids=None, synchrony_sizes=None): """ Compute synchrony metrics. Synchrony metrics represent the rate of occurrences of spikes at the exact same sample index, with synchrony sizes 2, 4 and 8. @@ -588,6 +588,10 @@ def compute_synchrony_metrics(sorting_analyzer, unit_ids=None): This code was adapted from `Elephant - Electrophysiology Analysis Toolkit `_ """ + if synchrony_sizes is not None: + warning_message = "Custom `synchrony_sizes` is deprecated; the `synchrony_metrics` will be computed using `synchrony_sizes = [2,4,8]`" + warnings.warn(warning_message) + synchrony_sizes = np.array([2, 4, 8]) res = namedtuple("synchrony_metrics", [f"sync_spike_{size}" for size in synchrony_sizes]) From 039b408a59ce965b91908de82d0bc55114f8655e Mon Sep 17 00:00:00 2001 From: chrishalcrow <57948917+chrishalcrow@users.noreply.github.com> Date: Mon, 2 Dec 2024 13:43:39 +0000 Subject: [PATCH 35/45] move backwards compat to `_handle_backward_compatibility_on_load` --- .../postprocessing/template_metrics.py | 25 ++++++++++--------- .../quality_metric_calculator.py | 14 +++++------ 2 files changed, 20 insertions(+), 19 deletions(-) diff --git a/src/spikeinterface/postprocessing/template_metrics.py b/src/spikeinterface/postprocessing/template_metrics.py index 477ad04440..7de6e8766a 100644 --- a/src/spikeinterface/postprocessing/template_metrics.py +++ b/src/spikeinterface/postprocessing/template_metrics.py @@ -88,9 +88,22 @@ class ComputeTemplateMetrics(AnalyzerExtension): need_recording = False use_nodepipeline = False need_job_kwargs = False + need_backward_compatibility_on_load = True min_channels_for_multi_channel_warning = 10 + def _handle_backward_compatibility_on_load(self): + + # For backwards compatibility - this reformats metrics_kwargs as metric_params + if (metrics_kwargs := self.params.get("metrics_kwargs")) is not None: + + metric_params = {} + for metric_name in self.params["metric_names"]: + metric_params[metric_name] = deepcopy(metrics_kwargs) + self.params["metric_params"] = metric_params + + del self.params["metrics_kwargs"] + def _set_params( self, metric_names=None, @@ -344,18 +357,6 @@ def _run(self, verbose=False): def _get_data(self): return self.data["metrics"] - def load_params(self): - AnalyzerExtension.load_params(self) - # For backwards compatibility - this reformats metrics_kwargs as metric_params - if (metrics_kwargs := self.params.get("metrics_kwargs")) is not None: - - metric_params = {} - for metric_name in self.params["metric_names"]: - metric_params[metric_name] = deepcopy(metrics_kwargs) - self.params["metric_params"] = metric_params - - del self.params["metrics_kwargs"] - register_result_extension(ComputeTemplateMetrics) compute_template_metrics = ComputeTemplateMetrics.function_factory() diff --git a/src/spikeinterface/qualitymetrics/quality_metric_calculator.py b/src/spikeinterface/qualitymetrics/quality_metric_calculator.py index e7e7c244ea..d71450853f 100644 --- a/src/spikeinterface/qualitymetrics/quality_metric_calculator.py +++ b/src/spikeinterface/qualitymetrics/quality_metric_calculator.py @@ -55,6 +55,13 @@ class ComputeQualityMetrics(AnalyzerExtension): need_recording = False use_nodepipeline = False need_job_kwargs = True + need_backward_compatibility_on_load = True + + def _handle_backward_compatibility_on_load(self): + # For backwards compatibility - this renames qm_params as metric_params + if (qm_params := self.params.get("qm_params")) is not None: + self.params["metric_params"] = qm_params + del self.params["qm_params"] def _set_params( self, @@ -262,13 +269,6 @@ def _run(self, verbose=False, **job_kwargs): def _get_data(self): return self.data["metrics"] - def load_params(self): - AnalyzerExtension.load_params(self) - # For backwards compatibility - this renames qm_params as metric_params - if (qm_params := self.params.get("qm_params")) is not None: - self.params["metric_params"] = qm_params - del self.params["qm_params"] - register_result_extension(ComputeQualityMetrics) compute_quality_metrics = ComputeQualityMetrics.function_factory() From 771de98c6ddd07e064cb28d6e2450e599d54d2a6 Mon Sep 17 00:00:00 2001 From: chrishalcrow <57948917+chrishalcrow@users.noreply.github.com> Date: Tue, 3 Dec 2024 09:51:28 +0000 Subject: [PATCH 36/45] Respond to z-man --- src/spikeinterface/postprocessing/template_metrics.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/spikeinterface/postprocessing/template_metrics.py b/src/spikeinterface/postprocessing/template_metrics.py index 7de6e8766a..da917e673c 100644 --- a/src/spikeinterface/postprocessing/template_metrics.py +++ b/src/spikeinterface/postprocessing/template_metrics.py @@ -64,8 +64,7 @@ class ComputeTemplateMetrics(AnalyzerExtension): Whether to compute multi-channel metrics delete_existing_metrics : bool, default: False If True, any template metrics attached to the `sorting_analyzer` are deleted. If False, any metrics which were previously calculated but are not included in `metric_names` are kept, provided the `metric_params` are unchanged. - metric_params : dict of dicts - metric_params : dict of dicts or None + metric_params : dict of dicts or None, default: None Dictionary with parameters for template metrics calculation. Default parameters can be obtained with: `si.postprocessing.template_metrics.get_default_tm_params()` @@ -138,7 +137,7 @@ def _set_params( if metrics_kwargs is not None and metric_params is None: deprecation_msg = "`metrics_kwargs` is deprecated and will be removed in version 0.104.0. Please use metric_params instead" - warnings.warn(deprecation_msg, category=DeprecationWarning) + deprecation_msg = "`metrics_kwargs` is deprecated and will be removed in version 0.104.0. Please use `metric_params` instead" metric_params = {} for metric_name in metric_names: From de7210a43135c1164ee2f214e117543441935375 Mon Sep 17 00:00:00 2001 From: chrishalcrow <57948917+chrishalcrow@users.noreply.github.com> Date: Tue, 3 Dec 2024 09:52:16 +0000 Subject: [PATCH 37/45] oups --- src/spikeinterface/postprocessing/template_metrics.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/spikeinterface/postprocessing/template_metrics.py b/src/spikeinterface/postprocessing/template_metrics.py index da917e673c..1969480503 100644 --- a/src/spikeinterface/postprocessing/template_metrics.py +++ b/src/spikeinterface/postprocessing/template_metrics.py @@ -136,7 +136,7 @@ def _set_params( metric_names += get_multi_channel_template_metric_names() if metrics_kwargs is not None and metric_params is None: - deprecation_msg = "`metrics_kwargs` is deprecated and will be removed in version 0.104.0. Please use metric_params instead" + deprecation_msg = "`metrics_kwargs` is deprecated and will be removed in version 0.104.0. Please use `metric_params` instead" deprecation_msg = "`metrics_kwargs` is deprecated and will be removed in version 0.104.0. Please use `metric_params` instead" metric_params = {} From 2081916e33d467145223a7c3099aca556f6e3864 Mon Sep 17 00:00:00 2001 From: chrishalcrow <57948917+chrishalcrow@users.noreply.github.com> Date: Tue, 3 Dec 2024 14:18:15 +0000 Subject: [PATCH 38/45] respond to review --- src/spikeinterface/qualitymetrics/misc_metrics.py | 10 ++++++---- .../qualitymetrics/tests/test_metrics_functions.py | 8 ++++---- 2 files changed, 10 insertions(+), 8 deletions(-) diff --git a/src/spikeinterface/qualitymetrics/misc_metrics.py b/src/spikeinterface/qualitymetrics/misc_metrics.py index 2f178c46f3..6007de379c 100644 --- a/src/spikeinterface/qualitymetrics/misc_metrics.py +++ b/src/spikeinterface/qualitymetrics/misc_metrics.py @@ -520,7 +520,7 @@ def compute_sliding_rp_violations( ) -def _get_synchrony_counts(spikes, all_unit_ids, synchrony_sizes=np.array([2, 4, 8])): +def _get_synchrony_counts(spikes, synchrony_sizes, all_unit_ids): """ Compute synchrony counts, the number of simultaneous spikes with sizes `synchrony_sizes`. @@ -530,7 +530,7 @@ def _get_synchrony_counts(spikes, all_unit_ids, synchrony_sizes=np.array([2, 4, Structured numpy array with fields ("sample_index", "unit_index", "segment_index"). all_unit_ids : list or None, default: None List of unit ids to compute the synchrony metrics. Expecting all units. - synchrony_sizes : numpy array + synchrony_sizes : None or np.array, default: None The synchrony sizes to compute. Should be pre-sorted. Returns @@ -576,6 +576,8 @@ def compute_synchrony_metrics(sorting_analyzer, unit_ids=None, synchrony_sizes=N A SortingAnalyzer object. unit_ids : list or None, default: None List of unit ids to compute the synchrony metrics. If None, all units are used. + synchrony_sizes: None, default: None + Deprecated argument. Please use private `_get_synchrony_counts` if you need finer control over number of synchronous spikes. Returns ------- @@ -590,7 +592,7 @@ def compute_synchrony_metrics(sorting_analyzer, unit_ids=None, synchrony_sizes=N if synchrony_sizes is not None: warning_message = "Custom `synchrony_sizes` is deprecated; the `synchrony_metrics` will be computed using `synchrony_sizes = [2,4,8]`" - warnings.warn(warning_message) + warnings.warn(warning_message, DeprecationWarning, stacklevel=2) synchrony_sizes = np.array([2, 4, 8]) @@ -605,7 +607,7 @@ def compute_synchrony_metrics(sorting_analyzer, unit_ids=None, synchrony_sizes=N spikes = sorting.to_spike_vector() all_unit_ids = sorting.unit_ids - synchrony_counts = _get_synchrony_counts(spikes, all_unit_ids, synchrony_sizes=synchrony_sizes) + synchrony_counts = _get_synchrony_counts(spikes, synchrony_sizes, all_unit_ids) synchrony_metrics_dict = {} for sync_idx, synchrony_size in enumerate(synchrony_sizes): diff --git a/src/spikeinterface/qualitymetrics/tests/test_metrics_functions.py b/src/spikeinterface/qualitymetrics/tests/test_metrics_functions.py index f51dc3e884..ae4c7ab62d 100644 --- a/src/spikeinterface/qualitymetrics/tests/test_metrics_functions.py +++ b/src/spikeinterface/qualitymetrics/tests/test_metrics_functions.py @@ -352,7 +352,7 @@ def test_synchrony_counts_no_sync(): one_spike["sample_index"] = spike_times one_spike["unit_index"] = spike_units - sync_count = _get_synchrony_counts(one_spike, [0]) + sync_count = _get_synchrony_counts(one_spike, np.array([2, 4, 8]), [0]) assert np.all(sync_count[0] == np.array([0])) @@ -372,7 +372,7 @@ def test_synchrony_counts_one_sync(): two_spikes["sample_index"] = np.concatenate((spike_indices, added_spikes_indices)) two_spikes["unit_index"] = np.concatenate((spike_labels, added_spikes_labels)) - sync_count = _get_synchrony_counts(two_spikes, [0, 1]) + sync_count = _get_synchrony_counts(two_spikes, np.array([2, 4, 8]), [0, 1]) assert np.all(sync_count[0] == np.array([1, 1])) @@ -392,7 +392,7 @@ def test_synchrony_counts_one_quad_sync(): four_spikes["sample_index"] = np.concatenate((spike_indices, added_spikes_indices)) four_spikes["unit_index"] = np.concatenate((spike_labels, added_spikes_labels)) - sync_count = _get_synchrony_counts(four_spikes, [0, 1, 2, 3]) + sync_count = _get_synchrony_counts(four_spikes, np.array([2, 4, 8]), [0, 1, 2, 3]) assert np.all(sync_count[0] == np.array([1, 1, 1, 1])) assert np.all(sync_count[1] == np.array([1, 1, 1, 1])) @@ -409,7 +409,7 @@ def test_synchrony_counts_not_all_units(): three_spikes["sample_index"] = np.concatenate((spike_indices, added_spikes_indices)) three_spikes["unit_index"] = np.concatenate((spike_labels, added_spikes_labels)) - sync_count = _get_synchrony_counts(three_spikes, [0, 1, 2]) + sync_count = _get_synchrony_counts(three_spikes, np.array([2, 4, 8]), [0, 1, 2]) assert np.all(sync_count[0] == np.array([0, 1, 1])) From 09ff624817b53d30d35f1e4f9060edabab45a308 Mon Sep 17 00:00:00 2001 From: Alessio Buccino Date: Wed, 4 Dec 2024 10:33:40 +0100 Subject: [PATCH 39/45] Remove venv in full-tests-with-codecov --- .../actions/build-test-environment/action.yml | 36 +++++++------------ .github/workflows/all-tests.yml | 2 +- 2 files changed, 14 insertions(+), 24 deletions(-) diff --git a/.github/actions/build-test-environment/action.yml b/.github/actions/build-test-environment/action.yml index 723e8a702f..a212bd64d5 100644 --- a/.github/actions/build-test-environment/action.yml +++ b/.github/actions/build-test-environment/action.yml @@ -1,41 +1,20 @@ name: Install packages description: This action installs the package and its dependencies for testing -inputs: - python-version: - description: 'Python version to set up' - required: false - os: - description: 'Operating system to set up' - required: false - runs: using: "composite" steps: - name: Install dependencies run: | - sudo apt install git git config --global user.email "CI@example.com" git config --global user.name "CI Almighty" - python -m venv ${{ github.workspace }}/test_env # Environment used in the caching step - python -m pip install -U pip # Official recommended way - source ${{ github.workspace }}/test_env/bin/activate pip install tabulate # This produces summaries at the end pip install -e .[test,extractors,streaming_extractors,test_extractors,full] shell: bash - - name: Force installation of latest dev from key-packages when running dev (not release) - run: | - source ${{ github.workspace }}/test_env/bin/activate - spikeinterface_is_dev_version=$(python -c "import spikeinterface; print(spikeinterface.DEV_MODE)") - if [ $spikeinterface_is_dev_version = "True" ]; then - echo "Running spikeinterface dev version" - pip install --no-cache-dir git+https://github.com/NeuralEnsemble/python-neo - pip install --no-cache-dir git+https://github.com/SpikeInterface/probeinterface - fi - echo "Running tests for release, using pyproject.toml versions of neo and probeinterface" + - name: Install git-annex shell: bash - - name: git-annex install run: | + pip install datalad-installer wget https://downloads.kitenet.net/git-annex/linux/current/git-annex-standalone-amd64.tar.gz mkdir /home/runner/work/installation mv git-annex-standalone-amd64.tar.gz /home/runner/work/installation/ @@ -44,4 +23,15 @@ runs: tar xvzf git-annex-standalone-amd64.tar.gz echo "$(pwd)/git-annex.linux" >> $GITHUB_PATH cd $workdir + git config --global filter.annex.process "git-annex filter-process" # recommended for efficiency + - name: Force installation of latest dev from key-packages when running dev (not release) + run: | + source ${{ github.workspace }}/test_env/bin/activate + spikeinterface_is_dev_version=$(python -c "import spikeinterface; print(spikeinterface.DEV_MODE)") + if [ $spikeinterface_is_dev_version = "True" ]; then + echo "Running spikeinterface dev version" + pip install --no-cache-dir git+https://github.com/NeuralEnsemble/python-neo + pip install --no-cache-dir git+https://github.com/SpikeInterface/probeinterface + fi + echo "Running tests for release, using pyproject.toml versions of neo and probeinterface" shell: bash diff --git a/.github/workflows/all-tests.yml b/.github/workflows/all-tests.yml index dcaec8b272..a9c840d5d5 100644 --- a/.github/workflows/all-tests.yml +++ b/.github/workflows/all-tests.yml @@ -47,7 +47,7 @@ jobs: echo "$file was changed" done - - name: Set testing environment # This decides which tests are run and whether to install especial dependencies + - name: Set testing environment # This decides which tests are run and whether to install special dependencies shell: bash run: | changed_files="${{ steps.changed-files.outputs.all_changed_files }}" From 8500b9d0f4488794dcc6d6b71afec2ebf4697b1d Mon Sep 17 00:00:00 2001 From: Alessio Buccino Date: Wed, 4 Dec 2024 10:48:11 +0100 Subject: [PATCH 40/45] Oups --- .github/actions/build-test-environment/action.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/actions/build-test-environment/action.yml b/.github/actions/build-test-environment/action.yml index a212bd64d5..c2524d2c16 100644 --- a/.github/actions/build-test-environment/action.yml +++ b/.github/actions/build-test-environment/action.yml @@ -26,7 +26,6 @@ runs: git config --global filter.annex.process "git-annex filter-process" # recommended for efficiency - name: Force installation of latest dev from key-packages when running dev (not release) run: | - source ${{ github.workspace }}/test_env/bin/activate spikeinterface_is_dev_version=$(python -c "import spikeinterface; print(spikeinterface.DEV_MODE)") if [ $spikeinterface_is_dev_version = "True" ]; then echo "Running spikeinterface dev version" From 922606b6d4d279da103b7e7edde3ecb79a76e3c8 Mon Sep 17 00:00:00 2001 From: Alessio Buccino Date: Wed, 4 Dec 2024 11:16:01 +0100 Subject: [PATCH 41/45] Oups 2 --- .github/workflows/full-test-with-codecov.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/full-test-with-codecov.yml b/.github/workflows/full-test-with-codecov.yml index 407c614ebf..f8ed2aa7a9 100644 --- a/.github/workflows/full-test-with-codecov.yml +++ b/.github/workflows/full-test-with-codecov.yml @@ -45,7 +45,6 @@ jobs: env: HDF5_PLUGIN_PATH: ${{ github.workspace }}/hdf5_plugin_path_maxwell run: | - source ${{ github.workspace }}/test_env/bin/activate pytest -m "not sorters_external" --cov=./ --cov-report xml:./coverage.xml -vv -ra --durations=0 | tee report_full.txt; test ${PIPESTATUS[0]} -eq 0 || exit 1 echo "# Timing profile of full tests" >> $GITHUB_STEP_SUMMARY python ./.github/scripts/build_job_summary.py report_full.txt >> $GITHUB_STEP_SUMMARY From 986a74a30c94a49ed2a2dd6183e8ddc078105b85 Mon Sep 17 00:00:00 2001 From: Alessio Buccino Date: Thu, 5 Dec 2024 09:02:31 +0100 Subject: [PATCH 42/45] Pin ONE-API version --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index fc09ad9198..22fbdc7f22 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -73,7 +73,7 @@ extractors = [ ] streaming_extractors = [ - "ONE-api>=2.7.0", # alf sorter and streaming IBL + "ONE-api>=2.7.0,<2.10.0", # alf sorter and streaming IBL "ibllib>=2.36.0", # streaming IBL # Following dependencies are for streaming with nwb files "pynwb>=2.6.0", From 96da22f7ac509bfc83a2a90eed06d58e3f71f990 Mon Sep 17 00:00:00 2001 From: chrishalcrow <57948917+chrishalcrow@users.noreply.github.com> Date: Thu, 5 Dec 2024 10:00:30 +0000 Subject: [PATCH 43/45] Correct method default in docstring --- src/spikeinterface/postprocessing/unit_locations.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/spikeinterface/postprocessing/unit_locations.py b/src/spikeinterface/postprocessing/unit_locations.py index 3f6dd47eec..bea06fd8f5 100644 --- a/src/spikeinterface/postprocessing/unit_locations.py +++ b/src/spikeinterface/postprocessing/unit_locations.py @@ -26,7 +26,7 @@ class ComputeUnitLocations(AnalyzerExtension): ---------- sorting_analyzer : SortingAnalyzer A SortingAnalyzer object - method : "center_of_mass" | "monopolar_triangulation" | "grid_convolution", default: "center_of_mass" + method : "monopolar_triangulation" or "center_of_mass" or "grid_convolution", default: "monopolar_triangulation" The method to use for localization **method_kwargs : dict, default: {} Kwargs which are passed to the method function. These can be found in the docstrings of `compute_center_of_mass`, `compute_grid_convolution` and `compute_monopolar_triangulation`. From 10d459f3d45315cee3079e3b14428222487ef9c6 Mon Sep 17 00:00:00 2001 From: chrishalcrow <57948917+chrishalcrow@users.noreply.github.com> Date: Thu, 5 Dec 2024 15:18:59 +0000 Subject: [PATCH 44/45] change or to | in docstring --- src/spikeinterface/postprocessing/unit_locations.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/spikeinterface/postprocessing/unit_locations.py b/src/spikeinterface/postprocessing/unit_locations.py index bea06fd8f5..df19458316 100644 --- a/src/spikeinterface/postprocessing/unit_locations.py +++ b/src/spikeinterface/postprocessing/unit_locations.py @@ -26,7 +26,7 @@ class ComputeUnitLocations(AnalyzerExtension): ---------- sorting_analyzer : SortingAnalyzer A SortingAnalyzer object - method : "monopolar_triangulation" or "center_of_mass" or "grid_convolution", default: "monopolar_triangulation" + method : "monopolar_triangulation" | "center_of_mass" | "grid_convolution", default: "monopolar_triangulation" The method to use for localization **method_kwargs : dict, default: {} Kwargs which are passed to the method function. These can be found in the docstrings of `compute_center_of_mass`, `compute_grid_convolution` and `compute_monopolar_triangulation`. From 4c7b6a5be65af4aa4ce6461e84956455f970942f Mon Sep 17 00:00:00 2001 From: Pierre Yger Date: Wed, 11 Dec 2024 09:53:59 +0100 Subject: [PATCH 45/45] Patch --- src/spikeinterface/widgets/unit_waveforms.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/spikeinterface/widgets/unit_waveforms.py b/src/spikeinterface/widgets/unit_waveforms.py index c593836061..3b31eacee5 100644 --- a/src/spikeinterface/widgets/unit_waveforms.py +++ b/src/spikeinterface/widgets/unit_waveforms.py @@ -565,7 +565,7 @@ def _update_plot(self, change): channel_locations = self.sorting_analyzer.get_channel_locations() else: unit_indices = [list(self.templates.unit_ids).index(unit_id) for unit_id in unit_ids] - templates = self.templates.templates_array[unit_indices] + templates = self.templates.get_dense_templates()[unit_indices] templates_shadings = None channel_locations = self.templates.get_channel_locations()