-
Notifications
You must be signed in to change notification settings - Fork 271
Expand file tree
/
Copy pathmaxwellrawio.py
More file actions
295 lines (244 loc) · 11.4 KB
/
maxwellrawio.py
File metadata and controls
295 lines (244 loc) · 11.4 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
"""
Class for reading data from maxwell biosystem device:
* MaxOne
* MaxTwo
https://www.mxwbio.com/resources/mea/
The implementation is a mix between:
* the implementation in spikeextractors
https://github.com/SpikeInterface/spikeextractors/blob/master/spikeextractors/extractors/maxwellextractors/maxwellextractors.py
* the implementation in spyking-circus
https://github.com/spyking-circus/spyking-circus/blob/master/circus/files/maxwell.py
This implementation does not handle spikes at the moment.
For maxtwo device, each well will be a different signal stream.
Author : Samuel Garcia, Alessio Buccino, Pierre Yger
"""
import os
from pathlib import Path
import platform
import warnings
from urllib.request import urlopen
import numpy as np
from .baserawio import (
BaseRawWithBufferApiIO,
_signal_channel_dtype,
_signal_stream_dtype,
_signal_buffer_dtype,
_spike_channel_dtype,
_event_channel_dtype,
)
from neo.core import NeoReadWriteError
class MaxwellRawIO(BaseRawWithBufferApiIO):
"""
Class for reading MaxOne or MaxTwo files.
Parameters
----------
filename: str, default: ''
The .h5 file to be loaded
rec_name: str | None, default: None
If the file has multiple recordings, specify the one to read.
For 24-well plates, the rec_name needs to be specified since different well
rows generate different recording ids.
E.g., rec0001, rec0002, etc.
"""
extensions = ["h5"]
rawmode = "one-file"
def __init__(self, filename="", rec_name=None):
BaseRawWithBufferApiIO.__init__(self)
self.filename = filename
self.rec_name = rec_name
def _source_name(self):
return self.filename
def _get_ids_and_electrodes(self, version, stream_id, h5file, mapping):
if int(version) == 20160704:
channel_ids = np.array(mapping["channel"])
electrode_ids = np.array(mapping["electrode"])
else:
channel_ids = np.array(h5file["wells"][stream_id][self.rec_name]["groups"]["routed"]["channels"])
electrode_ids = np.array(mapping["electrode"])
channel_ids = channel_ids[channel_ids >= 0]
routed_channel_ids_mask = np.isin(np.array(mapping["channel"]), channel_ids)
unique_channel_ids_mask = np.unique(mapping["channel"][routed_channel_ids_mask],
return_index=True)[1]
return channel_ids, electrode_ids[unique_channel_ids_mask]
def _parse_header(self):
import h5py
h5file = h5py.File(self.filename, mode="r")
self.h5_file = h5file
version = h5file["version"][0].decode()
# create signal stream
# one stream per well
signal_streams = []
if int(version) == 20160704:
self._old_format = True
signal_streams.append(("well000", "well000", "well000"))
elif int(version) > 20160704:
# multi stream stream (one well is one stream)
self._old_format = False
well_ids = list(h5file["wells"].keys())
unique_rec_names = []
for well_name in well_ids:
rec_names = list(h5file["wells"][well_name].keys())
for rec_name in rec_names:
unique_rec_names.append(rec_name)
# check consistency of rec_names
unique_rec_names = np.unique(unique_rec_names)
if len(unique_rec_names) > 1:
if self.rec_name is None:
raise ValueError(
f"Detected multiple recording IDs across wells. "
f"Please select a single recording using the `rec_name` parameter. "
f"Possible rec_names: {unique_rec_names}"
)
else:
if self.rec_name not in unique_rec_names:
raise NeoReadWriteError(f"rec_name {self.rec_name} not found")
else:
self.rec_name = unique_rec_names[0]
# add streams that contain the selected rec_name
for well_name in well_ids:
rec_names = list(h5file["wells"][well_name].keys())
if self.rec_name in rec_names:
signal_streams.append((well_name, well_name, well_name))
else:
raise NotImplementedError(f"This version {version} is not supported")
signal_streams = np.array(signal_streams, dtype=_signal_stream_dtype)
# one stream per buffer
signal_buffers = np.zeros(signal_streams.size, dtype=_signal_buffer_dtype)
signal_buffers["id"] = signal_streams["id"]
signal_buffers["name"] = signal_streams["name"]
# create signal channels
max_sig_length = 0
self._buffer_descriptions = {0: {0: {}}}
self._stream_buffer_slice = {}
sig_channels = []
well_indices_to_remove = []
for stream_index, stream_id in enumerate(signal_streams["id"]):
if int(version) == 20160704:
sr = 20000.0
settings = h5file["settings"]
if "lsb" in settings:
gain_uV = settings["lsb"][0] * 1e6
else:
if "gain" not in settings:
print("'gain' amd 'lsb' not found in settings. " "Setting gain to 512 (default)")
gain = 512
else:
gain = settings["gain"][0]
gain_uV = 3.3 / (1024 * gain) * 1e6
hdf5_path = "sig"
mapping = h5file["mapping"]
ids = np.array(mapping["channel"])
ids = ids[ids >= 0]
self._stream_buffer_slice[stream_id] = ids
elif int(version) > 20160704:
settings = h5file["wells"][stream_id][self.rec_name]["settings"]
sr = settings["sampling"][0]
if "lsb" in settings:
gain_uV = settings["lsb"][0] * 1e6
else:
if "gain" not in settings:
print("'gain' amd 'lsb' not found in settings. " "Setting gain to 512 (default)")
gain = 512
else:
gain = settings["gain"][0]
gain_uV = 3.3 / (1024 * gain) * 1e6
mapping = settings["mapping"]
if "routed" in h5file["wells"][stream_id][self.rec_name]["groups"]:
hdf5_path = f"/wells/{stream_id}/{self.rec_name}/groups/routed/raw"
else:
warnings.warn(f"No 'routed' group found for well {stream_id}")
well_indices_to_remove.append(stream_index)
continue
self._stream_buffer_slice[stream_id] = None
buffer_id = stream_id
shape = h5file[hdf5_path].shape
self._buffer_descriptions[0][0][buffer_id] = {
"type": "hdf5",
"file_path": str(self.filename),
"hdf5_path": hdf5_path,
"shape": shape,
"time_axis": 1,
}
self._stream_buffer_slice[stream_id] = slice(None)
channel_ids, electrode_ids = self._get_ids_and_electrodes(version, stream_id, h5file, mapping)
for i, chan_id in enumerate(channel_ids):
elec_id = electrode_ids[i]
ch_name = f"ch{chan_id} elec{elec_id}"
offset_uV = 0
buffer_id = stream_id
sig_channels.append(
(ch_name, str(chan_id), sr, "uint16", "uV", gain_uV, offset_uV, stream_id, buffer_id)
)
max_sig_length = max(max_sig_length, shape[1])
self._t_stop = max_sig_length / sr
if len(well_indices_to_remove) > 0:
signal_streams = np.delete(signal_streams, np.array(well_indices_to_remove))
sig_channels = np.array(sig_channels, dtype=_signal_channel_dtype)
spike_channels = []
spike_channels = np.array(spike_channels, dtype=_spike_channel_dtype)
event_channels = []
event_channels = np.array(event_channels, dtype=_event_channel_dtype)
self.header = {}
self.header["nb_block"] = 1
self.header["nb_segment"] = [1]
self.header["signal_buffers"] = signal_buffers
self.header["signal_streams"] = signal_streams
self.header["signal_channels"] = sig_channels
self.header["spike_channels"] = spike_channels
self.header["event_channels"] = event_channels
self._generate_minimal_annotations()
bl_ann = self.raw_annotations["blocks"][0]
bl_ann["maxwell_version"] = version
def _segment_t_start(self, block_index, seg_index):
return 0.0
def _segment_t_stop(self, block_index, seg_index):
return self._t_stop
def _get_analogsignal_buffer_description(self, block_index, seg_index, buffer_id):
return self._buffer_descriptions[block_index][seg_index][buffer_id]
def _get_signal_t_start(self, block_index, seg_index, stream_index):
return 0.0
def _get_analogsignal_chunk(self, block_index, seg_index, i_start, i_stop, stream_index, channel_indexes):
try:
return super()._get_analogsignal_chunk(
block_index, seg_index, i_start, i_stop, stream_index, channel_indexes
)
except OSError as e:
print("*" * 10)
print(_hdf_maxwell_error)
print("*" * 10)
raise (e)
_hdf_maxwell_error = """Maxwell file format is based on HDF5.
The internal compression requires a custom plugin!!!
This is a big pain for the end user.
You, as a end user, should ask Maxwell company to change this.
Please visit this page and install the missing decompression libraries:
https://share.mxwbio.com/d/4742248b2e674a85be97/
Then, link the decompression library by setting the `HDF5_PLUGIN_PATH` to your
installation location, e.g. via
os.environ['HDF5_PLUGIN_PATH'] = '/path/to/custom/hdf5/plugin/'
Alternatively, you can use the auto_install_maxwell_hdf5_compression_plugin() below
function that do it automagically.
"""
def auto_install_maxwell_hdf5_compression_plugin(hdf5_plugin_path=None, force_download=True):
if hdf5_plugin_path is None:
hdf5_plugin_path = os.getenv("HDF5_PLUGIN_PATH", None)
if hdf5_plugin_path is None:
hdf5_plugin_path = Path.home() / "hdf5_plugin_path_maxwell"
os.environ["HDF5_PLUGIN_PATH"] = str(hdf5_plugin_path)
hdf5_plugin_path = Path(hdf5_plugin_path)
hdf5_plugin_path.mkdir(exist_ok=True)
if platform.system() == "Linux":
remote_lib = "https://share.mxwbio.com/d/4742248b2e674a85be97/files/?p=%2FLinux%2Flibcompression.so&dl=1"
local_lib = hdf5_plugin_path / "libcompression.so"
elif platform.system() == "Darwin":
remote_lib = "https://share.mxwbio.com/d/4742248b2e674a85be97/files/?p=%2FMacOS%2Flibcompression.dylib&dl=1"
local_lib = hdf5_plugin_path / "libcompression.dylib"
elif platform.system() == "Windows":
remote_lib = "https://share.mxwbio.com/d/4742248b2e674a85be97/files/?p=%2FWindows%2Fcompression.dll&dl=1"
local_lib = hdf5_plugin_path / "compression.dll"
if not force_download and local_lib.is_file():
print(f"The h5 compression library for Maxwell is already located in {local_lib}!")
return
dist = urlopen(remote_lib)
with open(local_lib, "wb") as f:
f.write(dist.read())