Skip to content

Commit

Permalink
Merge branch 'main' of github.com:SpikeInterface/spikeinterface into …
Browse files Browse the repository at this point in the history
…refactor_GTStudy
  • Loading branch information
samuelgarcia committed Oct 7, 2024
2 parents 68a8691 + 5e13593 commit d1afea4
Show file tree
Hide file tree
Showing 33 changed files with 923 additions and 274 deletions.
8 changes: 6 additions & 2 deletions .github/scripts/determine_testing_environment.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
sortingcomponents_changed = False
generation_changed = False
stream_extractors_changed = False
github_actions_changed = False


for changed_file in changed_files_in_the_pull_request_paths:
Expand Down Expand Up @@ -78,9 +79,12 @@
sorters_internal_changed = True
else:
sorters_changed = True
elif ".github" in changed_file.parts:
if "workflows" in changed_file.parts:
github_actions_changed = True


run_everything = core_changed or pyproject_toml_changed or neobaseextractor_changed
run_everything = core_changed or pyproject_toml_changed or neobaseextractor_changed or github_actions_changed
run_generation_tests = run_everything or generation_changed
run_extractor_tests = run_everything or extractors_changed or plexon2_changed
run_preprocessing_tests = run_everything or preprocessing_changed
Expand All @@ -96,7 +100,7 @@
run_sorters_test = run_everything or sorters_changed
run_internal_sorters_test = run_everything or run_sortingcomponents_tests or sorters_internal_changed

run_streaming_extractors_test = stream_extractors_changed
run_streaming_extractors_test = stream_extractors_changed or github_actions_changed

install_plexon_dependencies = plexon2_changed

Expand Down
3 changes: 2 additions & 1 deletion .github/workflows/all-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ on:
env:
KACHERY_CLOUD_CLIENT_ID: ${{ secrets.KACHERY_CLOUD_CLIENT_ID }}
KACHERY_CLOUD_PRIVATE_KEY: ${{ secrets.KACHERY_CLOUD_PRIVATE_KEY }}
KACHERY_ZONE: ${{ secrets.KACHERY_ZONE }}

concurrency: # Cancel previous workflows on the same pull request
group: ${{ github.workflow }}-${{ github.ref }}
Expand All @@ -25,7 +26,7 @@ jobs:
fail-fast: false
matrix:
python-version: ["3.9", "3.12"] # Lower and higher versions we support
os: [macos-13, windows-latest, ubuntu-latest]
os: [macos-latest, windows-latest, ubuntu-latest]
steps:
- uses: actions/checkout@v4
- name: Setup Python ${{ matrix.python-version }}
Expand Down
1 change: 1 addition & 0 deletions .github/workflows/full-test-with-codecov.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ on:
env:
KACHERY_CLOUD_CLIENT_ID: ${{ secrets.KACHERY_CLOUD_CLIENT_ID }}
KACHERY_CLOUD_PRIVATE_KEY: ${{ secrets.KACHERY_CLOUD_PRIVATE_KEY }}
KACHERY_ZONE: ${{ secrets.KACHERY_ZONE }}

jobs:
full-tests-with-codecov:
Expand Down
1 change: 1 addition & 0 deletions doc/development/development.rst
Original file line number Diff line number Diff line change
Expand Up @@ -192,6 +192,7 @@ Miscelleaneous Stylistic Conventions
#. Avoid using abbreviations in variable names (e.g. use :code:`recording` instead of :code:`rec`). It is especially important to avoid single letter variables.
#. Use index as singular and indices for plural following the NumPy convention. Avoid idx or indexes. Plus, id and ids are reserved for identifiers (i.e. channel_ids)
#. We use file_path and folder_path (instead of file_name and folder_name) for clarity.
#. For the titles of documentation pages, only capitalize the first letter of the first word and classes or software packages. For example, "How to use a SortingAnalyzer in SpikeInterface".
#. For creating headers to divide sections of code we use the following convention (see issue `#3019 <https://github.com/SpikeInterface/spikeinterface/issues/3019>`_):


Expand Down
2 changes: 1 addition & 1 deletion doc/how_to/combine_recordings.rst
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
Combine Recordings in SpikeInterface
Combine recordings in SpikeInterface
====================================

In this tutorial we will walk through combining multiple recording objects. Sometimes this occurs due to hardware
Expand Down
2 changes: 1 addition & 1 deletion doc/how_to/load_matlab_data.rst
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
Export MATLAB Data to Binary & Load in SpikeInterface
Export MATLAB data to binary & load in SpikeInterface
========================================================

In this tutorial, we will walk through the process of exporting data from MATLAB in a binary format and subsequently loading it using SpikeInterface in Python.
Expand Down
4 changes: 2 additions & 2 deletions doc/how_to/load_your_data_into_sorting.rst
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
Load Your Own Data into a Sorting
=================================
Load your own data into a Sorting object
========================================

Why make a :code:`Sorting`?

Expand Down
2 changes: 1 addition & 1 deletion doc/how_to/process_by_channel_group.rst
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
Process a Recording by Channel Group
Process a recording by channel group
====================================

In this tutorial, we will walk through how to preprocess and sort a recording
Expand Down
2 changes: 1 addition & 1 deletion doc/how_to/viewers.rst
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
Visualize Data
Visualize data
==============

There are several ways to plot signals (raw, preprocessed) and spikes.
Expand Down
66 changes: 66 additions & 0 deletions doc/releases/0.101.2.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
.. _release0.101.2:

SpikeInterface 0.101.2 release notes
------------------------------------

4th October 2024

Minor release with bug fixes

core:

* Fix `random_spikes_selection()` (#3456)
* Expose `backend_options` at the analyzer level to set `storage_options` and `saving_options` (#3446)
* Avoid warnings in `SortingAnalyzer` (#3455)
* Fix `reset_global_job_kwargs` (#3452)
* Allow to save recordingless analyzer as (#3443)
* Fix compute analyzer pipeline with tmp recording (#3433)
* Fix bug in saving zarr recordings (#3432)
* Set `run_info` to `None` for `load_waveforms` (#3430)
* Fix integer overflow in parallel computing (#3426)
* Refactor `pandas` save load and `convert_dtypes` (#3412)
* Add spike-train based lazy `SortingGenerator` (#2227)


extractors:

* Improve IBL recording extractors by PID (#3449)

sorters:

* Get default encoding for `Popen` (#3439)

postprocessing:

* Add `max_threads_per_process` and `mp_context` to pca by channel computation and PCA metrics (#3434)

widgets:

* Fix metrics widgets for convert_dtypes (#3417)
* Fix plot motion for multi-segment (#3414)

motion correction:

* Auto-cast recording to float prior to interpolation (#3415)

documentation:

* Add docstring for `generate_unit_locations` (#3418)
* Add `get_channel_locations` to the base recording API (#3403)

continuous integration:

* Enable testing arm64 Mac architecture in the CI (#3422)
* Add kachery_zone secret (#3416)

testing:

* Relax causal filter tests (#3445)

Contributors:

* @alejoe91
* @h-mayorquin
* @jiumao2
* @samuelgarcia
* @zm711
6 changes: 6 additions & 0 deletions doc/whatisnew.rst
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ Release notes
.. toctree::
:maxdepth: 1

releases/0.101.2.rst
releases/0.101.1.rst
releases/0.101.0.rst
releases/0.100.8.rst
Expand Down Expand Up @@ -44,6 +45,11 @@ Release notes
releases/0.9.1.rst


Version 0.101.2
===============

* Minor release with bug fixes

Version 0.101.1
===============

Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[project]
name = "spikeinterface"
version = "0.101.1"
version = "0.101.2"
authors = [
{ name="Alessio Buccino", email="[email protected]" },
{ name="Samuel Garcia", email="[email protected]" },
Expand Down
34 changes: 29 additions & 5 deletions src/spikeinterface/core/baserecording.py
Original file line number Diff line number Diff line change
Expand Up @@ -608,11 +608,11 @@ def _save(self, format="binary", verbose: bool = False, **save_kwargs):
probegroup = self.get_probegroup()
cached.set_probegroup(probegroup)

time_vectors = self._get_time_vectors()
if time_vectors is not None:
for segment_index, time_vector in enumerate(time_vectors):
if time_vector is not None:
cached.set_times(time_vector, segment_index=segment_index)
for segment_index in range(self.get_num_segments()):
if self.has_time_vector(segment_index):
# the use of get_times is preferred since timestamps are converted to array
time_vector = self.get_times(segment_index=segment_index)
cached.set_times(time_vector, segment_index=segment_index)

return cached

Expand Down Expand Up @@ -746,6 +746,30 @@ def _select_segments(self, segment_indices):

return SelectSegmentRecording(self, segment_indices=segment_indices)

def get_channel_locations(
self,
channel_ids: list | np.ndarray | tuple | None = None,
axes: "xy" | "yz" | "xz" | "xyz" = "xy",
) -> np.ndarray:
"""
Get the physical locations of specified channels.
Parameters
----------
channel_ids : array-like, optional
The IDs of the channels for which to retrieve locations. If None, retrieves locations
for all available channels. Default is None.
axes : "xy" | "yz" | "xz" | "xyz", default: "xy"
The spatial axes to return, specified as a string (e.g., "xy", "xyz"). Default is "xy".
Returns
-------
np.ndarray
A 2D or 3D array of shape (n_channels, n_dimensions) containing the locations of the channels.
The number of dimensions depends on the `axes` argument (e.g., 2 for "xy", 3 for "xyz").
"""
return super().get_channel_locations(channel_ids=channel_ids, axes=axes)

def is_binary_compatible(self) -> bool:
"""
Checks if the recording is "binary" compatible.
Expand Down
2 changes: 1 addition & 1 deletion src/spikeinterface/core/baserecordingsnippets.py
Original file line number Diff line number Diff line change
Expand Up @@ -349,7 +349,7 @@ def set_channel_locations(self, locations, channel_ids=None):
raise ValueError("set_channel_locations(..) destroys the probe description, prefer _set_probes(..)")
self.set_property("location", locations, ids=channel_ids)

def get_channel_locations(self, channel_ids=None, axes: str = "xy"):
def get_channel_locations(self, channel_ids=None, axes: str = "xy") -> np.ndarray:
if channel_ids is None:
channel_ids = self.get_channel_ids()
channel_indices = self.ids_to_indices(channel_ids)
Expand Down
Loading

0 comments on commit d1afea4

Please sign in to comment.