Skip to content

Commit fe732c9

Browse files
authored
Merge branch 'main' into metric_system_units_docs
2 parents b00d190 + cb83327 commit fe732c9

37 files changed

Lines changed: 547 additions & 101 deletions

doc/api.rst

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -336,6 +336,7 @@ spikeinterface.exporters
336336
.. automodule:: spikeinterface.exporters
337337

338338
.. autofunction:: export_to_phy
339+
.. autofunction:: export_to_ibl_gui
339340
.. autofunction:: export_report
340341

341342

doc/modules/exporters.rst

Lines changed: 35 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,6 @@ The input of the :py:func:`~spikeinterface.exporters.export_to_phy` is a :code:`
2525
.. code-block:: python
2626
2727
import spikeinterface as si # core module only
28-
from spikeinterface.postprocessing import compute_spike_amplitudes, compute_principal_components
2928
from spikeinterface.exporters import export_to_phy
3029
3130
# the waveforms are sparse so it is faster to export to phy
@@ -40,6 +39,41 @@ The input of the :py:func:`~spikeinterface.exporters.export_to_phy` is a :code:`
4039
export_to_phy(sorting_analyzer=sorting_analyzer, output_folder='path/to/phy_folder')
4140
4241
42+
Export to IBL GUI
43+
-----------------
44+
45+
The :py:func:`~spikeinterface.exporters.export_to_ibl_gui` function allows you to use the
46+
`IBL GUI <https://github.com/int-brain-lab/iblapps/wiki>`_ for probe alignment.
47+
48+
The IBL GUI can also be installed as a standalone app using `this fork <https://github.com/AllenNeuralDynamics/ibl-ephys-alignment-gui>`_ from the Allen Institute.
49+
50+
The input of the :py:func:`~spikeinterface.exporters.export_to_ibl_gui` is a :code:`SortingAnalyzer` object.
51+
52+
.. code-block:: python
53+
54+
import spikeinterface as si # core module only
55+
import spikeinterface.preprocessing as spre
56+
from spikeinterface.exporters import export_to_ibl_gui
57+
58+
sorting_analyzer = si.create_sorting_analyzer(sorting=sorting, recording=recording)
59+
60+
# we need to compute some required extensions
61+
sorting_analyzer.compute(['random_spikes', 'templates', 'spike_amplitudes', 'spike_locations', 'noise_levels', 'quality_metrics'])
62+
# note that spike_locations are optional, but recommended to compute accurate spike depths
63+
64+
# optionally, we can pass an LFP recording to compute RMS/PSD in the LFP band
65+
recording_lfp = spre.bandpass_filter(recording, freq_min=1, freq_max=300)
66+
# we can also decimate the LFP to speed up the process
67+
recording_lfp = spre.decimate(recording_lfp, 10)
68+
69+
# the export process is fast because everything is pre-computed
70+
export_to_ibl_gui(
71+
sorting_analyzer=sorting_analyzer,
72+
output_folder='path/to/ibl_folder',
73+
lfp_recording=recording_lfp,
74+
n_jobs=-1
75+
)
76+
4377
4478
Export a spike sorting report
4579
-----------------------------
@@ -68,8 +102,6 @@ with many units!
68102
.. code-block:: python
69103
70104
import spikeinterface as si # core module only
71-
from spikeinterface.postprocessing import compute_spike_amplitudes, compute_correlograms
72-
from spikeinterface.qualitymetrics import compute_quality_metrics
73105
from spikeinterface.exporters import export_report
74106
75107

examples/get_started/quickstart.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -78,7 +78,7 @@
7878
# https://gin.g-node.org/NeuralEnsemble/ephy_testing_data repo
7979
# We download the dataset using DataLad but it can also be downloaded directly.
8080

81-
# Then we can open it. Note that [MEArec](https://mearec.readthedocs.io>) simulated files
81+
# Then we can open it. Note that [MEArec](https://mearec.readthedocs.io) simulated files
8282
# contain both a "recording" and a "sorting" object.
8383

8484
local_path = si.download_dataset(remote_path="mearec/mearec_test_10s.h5")
@@ -288,15 +288,15 @@
288288

289289
# The calculations are saved in the `extensions` subfolder of the `SortingAnalyzer` folder.
290290
# Similar to the waveforms we can access them using `get_extension` and `get_data`. For example,
291-
# here we can make a historgram of spike amplitudes
291+
# here we can make a histogram of spike amplitudes
292292

293293
# +
294294
amplitudes = analyzer_TDC.get_extension("spike_amplitudes").get_data()
295295
plt.hist(amplitudes, bins=50)
296296
plt.show()
297297
# -
298298

299-
# You can check which extensions have been saved (in your local folder) and which have been loaded (in your enviroment)...
299+
# You can check which extensions have been saved (in your local folder) and which have been loaded (in your environment)...
300300

301301
# +
302302
print(analyzer_TDC.get_saved_extension_names())

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,7 @@ extractors = [
6464
"MEArec>=1.8",
6565
"pynwb>=2.6.0",
6666
"hdmf-zarr>=0.11.0",
67-
"pyedflib>=0.1.30,<0.1.39",
67+
"pyedflib>=0.1.30",
6868
"sonpy;python_version<'3.10'",
6969
"lxml", # lxml for neuroscope
7070
"scipy",
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,3 @@
11
from .to_phy import export_to_phy
22
from .report import export_report
3+
from .to_ibl import export_to_ibl_gui

src/spikeinterface/exporters/tests/common.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,10 @@ def make_sorting_analyzer(sparse=True, with_group=False):
4545
sorting_analyzer.compute("noise_levels")
4646
sorting_analyzer.compute("principal_components")
4747
sorting_analyzer.compute("template_similarity")
48-
sorting_analyzer.compute("quality_metrics", metric_names=["snr"])
48+
sorting_analyzer.compute(
49+
"quality_metrics", metric_names=["snr", "amplitude_median", "isi_violation", "amplitude_cutoff"]
50+
)
51+
sorting_analyzer.compute(["spike_amplitudes", "spike_locations"])
4952

5053
return sorting_analyzer
5154

Lines changed: 115 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,115 @@
1+
import pytest
2+
3+
from spikeinterface.preprocessing import bandpass_filter, decimate
4+
from spikeinterface.exporters import export_to_ibl_gui
5+
6+
from spikeinterface.exporters.tests.common import (
7+
make_sorting_analyzer,
8+
sorting_analyzer_sparse_for_export,
9+
)
10+
11+
required_output_files = [
12+
"spikes.times.npy",
13+
"spikes.clusters.npy",
14+
"spikes.depths.npy",
15+
"spikes.amps.npy",
16+
"clusters.waveforms.npy",
17+
"clusters.peakToTrough.npy",
18+
"clusters.channels.npy",
19+
"clusters.metrics.csv",
20+
"channels.localCoordinates.npy",
21+
"channels.rawInd.npy",
22+
]
23+
ap_output_files = ["_iblqc_ephysTimeRmsAP.rms.npy", "_iblqc_ephysTimeRmsAP.timestamps.npy"]
24+
lfp_output_files = [
25+
"_iblqc_ephysTimeRmsLF.rms.npy",
26+
"_iblqc_ephysTimeRmsLF.timestamps.npy",
27+
"_iblqc_ephysSpectralDensityLF.power.npy",
28+
"_iblqc_ephysSpectralDensityLF.freqs.npy",
29+
]
30+
31+
good_units_query = "amplitude_median < -30"
32+
33+
34+
def test_export_ap_to_ibl(sorting_analyzer_sparse_for_export, create_cache_folder):
35+
cache_folder = create_cache_folder
36+
output_folder = cache_folder / "ibl_ap_output"
37+
38+
sorting_analyzer = sorting_analyzer_sparse_for_export
39+
# AP, but no LFP
40+
export_to_ibl_gui(
41+
sorting_analyzer,
42+
output_folder,
43+
# good_units_query=good_units_query,
44+
verbose=True,
45+
n_jobs=-1,
46+
)
47+
for f in required_output_files:
48+
assert (output_folder / f).exists(), f"Missing file: {f}"
49+
for f in ap_output_files:
50+
assert (output_folder / f).exists(), f"Missing file: {f}"
51+
for f in lfp_output_files:
52+
assert not (output_folder / f).exists(), f"Unexpected file: {f}"
53+
54+
55+
def test_export_recordingless_to_ibl(sorting_analyzer_sparse_for_export, create_cache_folder):
56+
cache_folder = create_cache_folder
57+
output_folder = cache_folder / "ibl_recordingless_output"
58+
59+
sorting_analyzer = sorting_analyzer_sparse_for_export
60+
recording = sorting_analyzer.recording
61+
sorting_analyzer._recording = None
62+
63+
# AP, but no LFP
64+
export_to_ibl_gui(sorting_analyzer_sparse_for_export, output_folder, good_units_query=good_units_query, n_jobs=-1)
65+
for f in required_output_files:
66+
assert (output_folder / f).exists(), f"Missing file: {f}"
67+
for f in ap_output_files:
68+
assert not (output_folder / f).exists(), f"Missing file: {f}"
69+
for f in lfp_output_files:
70+
assert not (output_folder / f).exists(), f"Unexpected file: {f}"
71+
72+
sorting_analyzer._recording = recording
73+
74+
75+
def test_export_lfp_to_ibl(sorting_analyzer_sparse_for_export, create_cache_folder):
76+
cache_folder = create_cache_folder
77+
output_folder = cache_folder / "ibl_lfp_output"
78+
79+
sorting_analyzer = sorting_analyzer_sparse_for_export
80+
recording = sorting_analyzer.recording
81+
recording_lfp = bandpass_filter(recording, freq_min=0.5, freq_max=300)
82+
recording_lfp = decimate(recording_lfp, 10)
83+
# LFP, but no AP
84+
export_to_ibl_gui(
85+
sorting_analyzer, output_folder, lfp_recording=recording_lfp, good_units_query=good_units_query, n_jobs=-1
86+
)
87+
for f in required_output_files:
88+
assert (output_folder / f).exists(), f"Missing file: {f}"
89+
for f in ap_output_files:
90+
assert (output_folder / f).exists(), f"Unexpected file: {f}"
91+
for f in lfp_output_files:
92+
assert (output_folder / f).exists(), f"Missing file: {f}"
93+
94+
95+
def test_missing_info(sorting_analyzer_sparse_for_export, create_cache_folder):
96+
cache_folder = create_cache_folder
97+
output_folder = cache_folder / "ibl_missing_info_output"
98+
99+
sorting_analyzer = sorting_analyzer_sparse_for_export
100+
101+
# missing metrics
102+
good_units_query = "rp_violations < 0.2"
103+
104+
with pytest.raises(ValueError, match="Missing required quality metrics"):
105+
export_to_ibl_gui(sorting_analyzer, output_folder, good_units_query=good_units_query, n_jobs=-1)
106+
107+
sorting_analyzer.delete_extension("spike_amplitudes")
108+
109+
with pytest.raises(ValueError, match="Missing required extension"):
110+
export_to_ibl_gui(sorting_analyzer, output_folder, n_jobs=-1)
111+
112+
113+
if __name__ == "__main__":
114+
sorting_analyzer = make_sorting_analyzer(sparse=True)
115+
test_export_ap_to_ibl(sorting_analyzer)

0 commit comments

Comments
 (0)