Skip to content

Commit 366ff1d

Browse files
authored
Merge branch 'main' into white_matter
2 parents 5937ab8 + 175df8b commit 366ff1d

7 files changed

Lines changed: 121 additions & 17 deletions

File tree

doc/development/development.rst

Lines changed: 49 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -397,3 +397,52 @@ After this you need to add a block in `Install Sorters <https://github.com/Spike
397397
to describe your sorter.
398398

399399
Finally, make a pull request so we can review the code and incorporate into the sorters module of SpikeInterface!
400+
401+
402+
403+
How to make a release
404+
---------------------
405+
406+
Checklist
407+
^^^^^^^^^
408+
* pyproject.toml: check that the version is ahead of current release. Also, comment out the @ (git dependencies)
409+
* In the top level ``__init__`` (located at ``src/spikeinterface/__init__.py``) set ``DEV_MODE`` to ``False`` (this is used for the docker installations)
410+
* Create a new release note for the appropriate version on doc/releases/new_version_tag.
411+
412+
There can be large releases like:
413+
414+
``doc/releases/0.101.0.rst``
415+
416+
Which contain a section called "Main Changes" and minor releases which include only bug fixes like:
417+
418+
``doc/releases/0.101.2.rst``
419+
420+
To collect all the PRs and bug fixes we have a script in:
421+
``doc/scripts/``
422+
called ``auto-release-notes.sh``. Run it with ``bash auto-release-notes.sh`` and it will create the release notes for the module specific changes.
423+
424+
The first time you run the script, GitHub will guide you through an authorization process if you've not already done so.
425+
426+
The signature of the script is:
427+
428+
.. code-block:: bash
429+
430+
bash auto-release-notes.sh <start_date> <end_date>
431+
432+
Where the start date is the date of the last release and the end date is the current date. Dates are in YYYY-MM-DD format
433+
434+
The date of the last release can be found on `PyPI <https://pypi.org/project/spikeinterface/>`_.
435+
436+
437+
As a specific example:
438+
.. code-block:: bash
439+
440+
bash auto-release-notes.sh 2025-02-19 2025-03-24
441+
442+
* Finish the release notes and merge
443+
* Locally tag the main branch with the newly merged release notes with the new version
444+
* Push the tag to the remote repository which will trigger the release action (.github/workflows/publish-to-pypi.yml)
445+
* Do an after-release `PR <https://github.com/SpikeInterface/spikeinterface/pull/3828/files>`_:
446+
- Uncomment the git installs in pyproject
447+
- Set ``DEV_MODE`` to ``True`` in the top level ``__init__`` (located at ``src/spikeinterface/__init__.py``)
448+
- Update `pyproject.toml` version one patch ahead or one minor if it is larger one.

pyproject.toml

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -151,8 +151,6 @@ test = [
151151
"pytest-cov",
152152
"psutil",
153153

154-
"huggingface_hub",
155-
156154
# preprocessing
157155
"ibllib", # for IBL
158156

@@ -195,8 +193,8 @@ docs = [
195193
"hdbscan>=0.8.33", # For sorters spykingcircus2 + tridesclous
196194
"numba", # For many postprocessing functions
197195
"networkx",
198-
"skops", # For auotmated curation
199-
"scikit-learn", # For auotmated curation
196+
"skops", # For automated curation
197+
"scikit-learn", # For automated curation
200198
# Download data
201199
"pooch>=1.8.2",
202200
"datalad>=1.0.2",

src/spikeinterface/core/analyzer_extension_core.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -709,7 +709,7 @@ class ComputeNoiseLevels(AnalyzerExtension):
709709
depend_on = []
710710
need_recording = True
711711
use_nodepipeline = False
712-
need_job_kwargs = False
712+
need_job_kwargs = True
713713
need_backward_compatibility_on_load = True
714714

715715
def __init__(self, sorting_analyzer):
@@ -729,9 +729,12 @@ def _merge_extension_data(
729729
# this does not depend on units
730730
return self.data.copy()
731731

732-
def _run(self, verbose=False):
732+
def _run(self, verbose=False, **job_kwargs):
733733
self.data["noise_levels"] = get_noise_levels(
734-
self.sorting_analyzer.recording, return_scaled=self.sorting_analyzer.return_scaled, **self.params
734+
self.sorting_analyzer.recording,
735+
return_scaled=self.sorting_analyzer.return_scaled,
736+
**self.params,
737+
**job_kwargs,
735738
)
736739

737740
def _get_data(self):

src/spikeinterface/core/binaryrecordingextractor.py

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -148,6 +148,18 @@ def get_binary_description(self):
148148
)
149149
return d
150150

151+
def __del__(self):
152+
"""
153+
Ensures that all segment resources are properly cleaned up when this recording extractor is deleted.
154+
Closes any open file handles in the recording segments.
155+
"""
156+
# Close all recording segments
157+
if hasattr(self, "_recording_segments"):
158+
for segment in self._recording_segments:
159+
# This will trigger the __del__ method of the BinaryRecordingSegment
160+
# which will close the file handle
161+
del segment
162+
151163

152164
BinaryRecordingExtractor.write_recording.__doc__ = BinaryRecordingExtractor.write_recording.__doc__.format(
153165
_shared_job_kwargs_doc
@@ -223,6 +235,15 @@ def get_traces(
223235

224236
return traces
225237

238+
def __del__(self):
239+
# Ensure that the file handle is closed when the segment is garbage-collected
240+
try:
241+
if hasattr(self, "file") and self.file and not self.file.closed:
242+
self.file.close()
243+
except Exception as e:
244+
warnings.warn(f"Error closing file handle in BinaryRecordingSegment: {e}")
245+
pass
246+
226247

227248
# For backward compatibility (old good time)
228249
BinDatRecordingExtractor = BinaryRecordingExtractor

src/spikeinterface/generation/tests/test_hybrid_tools.py

Lines changed: 10 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
1+
import pytest
2+
13
import numpy as np
24

35
from spikeinterface.core import Templates
@@ -69,7 +71,8 @@ def test_generate_hybrid_from_templates():
6971
assert sorting_hybrid.get_num_units() == num_units
7072

7173

72-
def test_estimate_templates(create_cache_folder):
74+
@pytest.mark.skip("Spykingcircus2 is not stable enought for estimating templates from recording")
75+
def test_estimate_templates_from_recording(create_cache_folder):
7376
cache_folder = create_cache_folder
7477
rec, _ = generate_ground_truth_recording(num_units=10, sampling_frequency=20000, seed=0)
7578
templates = estimate_templates_from_recording(
@@ -79,7 +82,11 @@ def test_estimate_templates(create_cache_folder):
7982

8083

8184
if __name__ == "__main__":
85+
from pathlib import Path
86+
87+
cache_folder = Path(__file__).resolve().parents[4] / "cache_folder" / "generation"
88+
8289
# test_generate_hybrid_no_motion()
83-
test_generate_hybrid_motion()
84-
# test_estimate_templates()
90+
# test_generate_hybrid_motion()
91+
test_estimate_templates_from_recording(cache_folder)
8592
# test_generate_hybrid_with_sorting()

src/spikeinterface/preprocessing/preprocessinglist.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@
2525
CenterRecording,
2626
center,
2727
)
28+
2829
from .scale import scale_to_uV
2930

3031
from .whiten import WhitenRecording, whiten, compute_whitening_matrix

src/spikeinterface/sorters/external/kilosort4.py

Lines changed: 32 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -138,11 +138,7 @@ def check_sorter_version(cls):
138138

139139
@classmethod
140140
def _setup_recording(cls, recording, sorter_output_folder, params, verbose):
141-
from probeinterface import write_prb
142-
143-
pg = recording.get_probegroup()
144-
probe_filename = sorter_output_folder / "probe.prb"
145-
write_prb(probe_filename, pg)
141+
cls._setup_json_probe_map(recording, sorter_output_folder)
146142

147143
if params["use_binary_file"]:
148144
if not recording.binary_compatible_with(time_axis=0, file_paths_length=1):
@@ -189,7 +185,7 @@ def _run_from_folder(cls, sorter_output_folder, params, verbose):
189185

190186
sorter_output_folder = sorter_output_folder.absolute()
191187

192-
probe_filename = sorter_output_folder / "probe.prb"
188+
probe_filename = sorter_output_folder / "chanMap.json"
193189

194190
torch_device = params["torch_device"]
195191
if torch_device == "auto":
@@ -358,7 +354,10 @@ def _run_from_folder(cls, sorter_output_folder, params, verbose):
358354
)
359355
if version.parse(ks_version) >= version.parse("4.0.28"):
360356
cluster_spikes_kwargs.update(dict(verbose=verbose))
361-
clu, Wall = cluster_spikes(**cluster_spikes_kwargs)
357+
if version.parse(ks_version) <= version.parse("4.0.30"):
358+
clu, Wall = cluster_spikes(**cluster_spikes_kwargs)
359+
else:
360+
clu, Wall, st, tF = cluster_spikes(**cluster_spikes_kwargs)
362361

363362
if params["skip_kilosort_preprocessing"]:
364363
ops["preprocessing"] = dict(
@@ -386,3 +385,29 @@ def _run_from_folder(cls, sorter_output_folder, params, verbose):
386385
@classmethod
387386
def _get_result_from_folder(cls, sorter_output_folder):
388387
return KilosortBase._get_result_from_folder(sorter_output_folder)
388+
389+
@classmethod
390+
def _setup_json_probe_map(cls, recording, sorter_output_folder):
391+
"""Create a JSON probe map file for Kilosort4."""
392+
from kilosort.io import save_probe
393+
import numpy as np
394+
395+
groups = recording.get_channel_groups()
396+
positions = np.array(recording.get_channel_locations())
397+
if positions.shape[1] != 2:
398+
raise RuntimeError("3D 'location' are not supported. Set 2D locations instead.")
399+
400+
n_chan = recording.get_num_channels()
401+
chanMap = np.arange(n_chan)
402+
xc = positions[:, 0]
403+
yc = positions[:, 1]
404+
kcoords = groups.astype(float)
405+
406+
probe = {
407+
"chanMap": chanMap,
408+
"xc": xc,
409+
"yc": yc,
410+
"kcoords": kcoords,
411+
"n_chan": n_chan,
412+
}
413+
save_probe(probe, str(sorter_output_folder / "chanMap.json"))

0 commit comments

Comments
 (0)