From fea6cb4eb4d8c7c77a59bb48d3042c79ad615075 Mon Sep 17 00:00:00 2001 From: Samuel Garcia Date: Fri, 3 May 2024 11:36:08 +0200 Subject: [PATCH] Remove SharedMemory from SortingAnalyzer and use NumpySorting instead. --- src/spikeinterface/core/numpyextractors.py | 2 +- src/spikeinterface/core/sortinganalyzer.py | 17 ++++++++--------- 2 files changed, 9 insertions(+), 10 deletions(-) diff --git a/src/spikeinterface/core/numpyextractors.py b/src/spikeinterface/core/numpyextractors.py index 7572c78adb..0e7aba3080 100644 --- a/src/spikeinterface/core/numpyextractors.py +++ b/src/spikeinterface/core/numpyextractors.py @@ -287,7 +287,7 @@ def from_sorting(source_sorting: BaseSorting, with_metadata=False, copy_spike_ve spike_vector = source_sorting.to_spike_vector() if copy_spike_vector: spike_vector = spike_vector.copy() - sorting = NumpySorting(spike_vector, source_sorting.get_sampling_frequency(), source_sorting.unit_ids) + sorting = NumpySorting(spike_vector, source_sorting.get_sampling_frequency(), source_sorting.unit_ids.copy()) if with_metadata: sorting.copy_metadata(source_sorting) return sorting diff --git a/src/spikeinterface/core/sortinganalyzer.py b/src/spikeinterface/core/sortinganalyzer.py index bbc428f2eb..0a3870c9aa 100644 --- a/src/spikeinterface/core/sortinganalyzer.py +++ b/src/spikeinterface/core/sortinganalyzer.py @@ -23,7 +23,7 @@ from .recording_tools import check_probe_do_not_overlap, get_rec_attributes from .core_tools import check_json, retrieve_importing_provenance from .job_tools import split_job_kwargs -from .numpyextractors import SharedMemorySorting +from .numpyextractors import NumpySorting from .sparsity import ChannelSparsity, estimate_sparsity from .sortingfolder import NumpyFolderSorting from .zarrextractors import get_default_zarr_compressor, ZarrSortingExtractor @@ -296,8 +296,9 @@ def create_memory(cls, sorting, recording, sparsity, return_scaled, rec_attribut # a copy is done to avoid shared dict between instances (which can block garbage collector) rec_attributes = rec_attributes.copy() - # a copy of sorting is created directly in shared memory format to avoid further duplication of spikes. - sorting_copy = SharedMemorySorting.from_sorting(sorting, with_metadata=True) + # a copy of sorting is copied in memory for fast access + sorting_copy = NumpySorting.from_sorting(sorting, with_metadata=True, copy_spike_vector=True) + sorting_analyzer = SortingAnalyzer( sorting=sorting_copy, recording=recording, @@ -375,8 +376,8 @@ def load_from_binary_folder(cls, folder, recording=None): folder = Path(folder) assert folder.is_dir(), f"This folder does not exists {folder}" - # load internal sorting copy and make it sharedmem - sorting = SharedMemorySorting.from_sorting(NumpyFolderSorting(folder / "sorting"), with_metadata=True) + # load internal sorting copy in memory + sorting = NumpySorting.from_sorting(NumpyFolderSorting(folder / "sorting"), with_metadata=True, copy_spike_vector=True) # load recording if possible if recording is None: @@ -537,11 +538,9 @@ def load_from_zarr(cls, folder, recording=None): zarr_root = zarr.open(folder, mode="r") - # load internal sorting and make it sharedmem + # load internal sorting in memory # TODO propagate storage_options - sorting = SharedMemorySorting.from_sorting( - ZarrSortingExtractor(folder, zarr_group="sorting"), with_metadata=True - ) + sorting = NumpySorting.from_sorting(ZarrSortingExtractor(folder, zarr_group="sorting"), with_metadata=True, copy_spike_vector=True) # load recording if possible if recording is None: