Skip to content

Commit fc51e66

Browse files
authored
Merge pull request #1687 from NeuralEnsemble/black-formatting
Black formatting
2 parents 6696426 + c44bf49 commit fc51e66

File tree

5 files changed

+41
-40
lines changed

5 files changed

+41
-40
lines changed

neo/io/biocamio.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,5 @@ class BiocamIO(BiocamRawIO, BaseFromRaw):
77
mode = "file"
88

99
def __init__(self, filename, fill_gaps_strategy=None):
10-
BiocamRawIO.__init__(self, filename=filename,
11-
fill_gaps_strategy=fill_gaps_strategy)
10+
BiocamRawIO.__init__(self, filename=filename, fill_gaps_strategy=fill_gaps_strategy)
1211
BaseFromRaw.__init__(self, filename)

neo/rawio/biocamrawio.py

Lines changed: 25 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -145,20 +145,21 @@ def _get_analogsignal_chunk(self, block_index, seg_index, i_start, i_stop, strea
145145
# read functions are different based on the version of biocam
146146
if self._read_function is readHDF5t_brw4_sparse:
147147
if self._fill_gaps_strategy is None:
148-
raise ValueError(
149-
"Please set `fill_gaps_strategy` to 'zeros' or 'synthetic_noise'."
150-
)
148+
raise ValueError("Please set `fill_gaps_strategy` to 'zeros' or 'synthetic_noise'.")
151149
if self._fill_gaps_strategy == "synthetic_noise":
152-
warnings.warn("Event-based compression : gaps will be filled with synthetic noise. "
153-
"Set `fill_gaps_strategy` to 'zeros' to fill gaps with 0s.")
150+
warnings.warn(
151+
"Event-based compression : gaps will be filled with synthetic noise. "
152+
"Set `fill_gaps_strategy` to 'zeros' to fill gaps with 0s."
153+
)
154154
use_synthetic_noise = True
155155
elif self._fill_gaps_strategy == "zeros":
156156
use_synthetic_noise = False
157157
else:
158158
raise ValueError("`fill_gaps_strategy` must be 'zeros' or 'synthetic_noise'")
159159

160-
data = self._read_function(self._filehandle, i_start, i_stop, self._num_channels,
161-
use_synthetic_noise=use_synthetic_noise)
160+
data = self._read_function(
161+
self._filehandle, i_start, i_stop, self._num_channels, use_synthetic_noise=use_synthetic_noise
162+
)
162163
else:
163164
data = self._read_function(self._filehandle, i_start, i_stop, self._num_channels)
164165

@@ -273,7 +274,7 @@ def open_biocam_file_header(filename) -> dict:
273274
min_digital = experiment_settings["ValueConverter"]["MinDigitalValue"]
274275
scale_factor = experiment_settings["ValueConverter"]["ScaleFactor"]
275276
sampling_rate = experiment_settings["TimeConverter"]["FrameRate"]
276-
num_frames = rf['TOC'][-1,-1]
277+
num_frames = rf["TOC"][-1, -1]
277278

278279
num_channels = None
279280
well_ID = None
@@ -282,7 +283,9 @@ def open_biocam_file_header(filename) -> dict:
282283
num_channels = len(rf[well_ID]["StoredChIdxs"])
283284
if "Raw" in rf[well_ID]:
284285
if len(rf[well_ID]["Raw"]) % num_channels:
285-
raise NeoReadWriteError(f"Length of raw data array is not multiple of channel number in {well_ID}")
286+
raise NeoReadWriteError(
287+
f"Length of raw data array is not multiple of channel number in {well_ID}"
288+
)
286289
num_frames = len(rf[well_ID]["Raw"]) // num_channels
287290
break
288291
elif "EventsBasedSparseRaw" in rf[well_ID]:
@@ -360,7 +363,7 @@ def readHDF5t_brw4_sparse(rf, t0, t1, nch, use_synthetic_noise=False):
360363
data.fill(2048)
361364
else:
362365
# fill the data collection with Gaussian noise if requested
363-
data = generate_synthetic_noise(rf, data, well_ID, start_frame, num_frames) #, std=noise_std)
366+
data = generate_synthetic_noise(rf, data, well_ID, start_frame, num_frames) # , std=noise_std)
364367
# fill the data collection with the decoded event based sparse raw data
365368
data = decode_event_based_raw_data(rf, data, well_ID, start_frame, num_frames)
366369

@@ -376,38 +379,38 @@ def decode_event_based_raw_data(rf, data, well_ID, start_frame, num_frames):
376379
# from the given start position and duration in frames, localize the corresponding event positions
377380
# using the TOC
378381
toc_start_idx = np.searchsorted(toc[:, 1], start_frame)
379-
toc_end_idx = min(
380-
np.searchsorted(toc[:, 1], start_frame + num_frames, side="right") + 1,
381-
len(toc) - 1)
382+
toc_end_idx = min(np.searchsorted(toc[:, 1], start_frame + num_frames, side="right") + 1, len(toc) - 1)
382383
events_start_pos = events_toc[toc_start_idx]
383384
events_end_pos = events_toc[toc_end_idx]
384385
# decode all data for the given well ID and time interval
385386
binary_data = rf[well_ID]["EventsBasedSparseRaw"][events_start_pos:events_end_pos]
386387
binary_data_length = len(binary_data)
387388
pos = 0
388389
while pos < binary_data_length:
389-
ch_idx = int.from_bytes(binary_data[pos:pos + 4], byteorder="little")
390+
ch_idx = int.from_bytes(binary_data[pos : pos + 4], byteorder="little")
390391
pos += 4
391-
ch_data_length = int.from_bytes(binary_data[pos:pos + 4], byteorder="little")
392+
ch_data_length = int.from_bytes(binary_data[pos : pos + 4], byteorder="little")
392393
pos += 4
393394
ch_data_pos = pos
394395
while pos < ch_data_pos + ch_data_length:
395-
from_inclusive = int.from_bytes(binary_data[pos:pos + 8], byteorder="little")
396+
from_inclusive = int.from_bytes(binary_data[pos : pos + 8], byteorder="little")
396397
pos += 8
397-
to_exclusive = int.from_bytes(binary_data[pos:pos + 8], byteorder="little")
398+
to_exclusive = int.from_bytes(binary_data[pos : pos + 8], byteorder="little")
398399
pos += 8
399400
range_data_pos = pos
400401
for j in range(from_inclusive, to_exclusive):
401402
if j >= start_frame + num_frames:
402403
break
403404
if j >= start_frame:
404405
data[ch_idx][j - start_frame] = int.from_bytes(
405-
binary_data[range_data_pos:range_data_pos + 2], byteorder="little")
406+
binary_data[range_data_pos : range_data_pos + 2], byteorder="little"
407+
)
406408
range_data_pos += 2
407409
pos += (to_exclusive - from_inclusive) * 2
408410

409411
return data
410412

413+
411414
def generate_synthetic_noise(rf, data, well_ID, start_frame, num_frames):
412415
# Source: Documentation by 3Brain
413416
# https://gin.g-node.org/NeuralEnsemble/ephy_testing_data/src/master/biocam/documentation_brw_4.x_bxr_3.x_bcmp_1.x_in_brainwave_5.x_v1.1.3.pdf
@@ -451,10 +454,10 @@ def generate_synthetic_noise(rf, data, well_ID, start_frame, num_frames):
451454
# fill with Gaussian noise
452455
for ch_idx in range(len(data)):
453456
if ch_idx in noise_info:
454-
data[ch_idx] = np.array(np.random.normal(noise_info[ch_idx][0], noise_info[ch_idx][1],
455-
num_frames), dtype=np.uint16)
457+
data[ch_idx] = np.array(
458+
np.random.normal(noise_info[ch_idx][0], noise_info[ch_idx][1], num_frames), dtype=np.uint16
459+
)
456460
else:
457-
data[ch_idx] = np.array(np.random.normal(median_mean, median_std, num_frames),
458-
dtype=np.uint16)
461+
data[ch_idx] = np.array(np.random.normal(median_mean, median_std, num_frames), dtype=np.uint16)
459462

460463
return data

neo/rawio/blackrockrawio.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -133,7 +133,6 @@ class BlackrockRawIO(BaseRawIO):
133133
# We need to document the origin of this value
134134
main_sampling_rate = 30000.0
135135

136-
137136
def __init__(
138137
self, filename=None, nsx_override=None, nev_override=None, nsx_to_load=None, load_nev=True, verbose=False
139138
):
@@ -254,7 +253,6 @@ def __init__(
254253

255254
def _parse_header(self):
256255

257-
258256
event_channels = []
259257
spike_channels = []
260258
signal_buffers = []

neo/rawio/openephysbinaryrawio.py

Lines changed: 10 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -76,7 +76,8 @@ def __init__(self, dirname="", load_sync_channel=False, experiment_names=None):
7676
warn(
7777
"The load_sync_channel=True option is deprecated and will be removed in version 0.15. "
7878
"Use load_sync_channel=False instead, which will add sync channels as separate streams.",
79-
DeprecationWarning, stacklevel=2
79+
DeprecationWarning,
80+
stacklevel=2,
8081
)
8182
self.folder_structure = None
8283
self._use_direct_evt_timestamps = None
@@ -152,7 +153,7 @@ def _parse_header(self):
152153
# Every stream sync channel is added as its own stream
153154
sync_stream_id = f"{stream_name}SYNC"
154155
sync_stream_id_to_buffer_id[sync_stream_id] = buffer_id
155-
156+
156157
# We save this mapping for the buffer description protocol
157158
normal_stream_id_to_sync_stream_id[stream_id] = sync_stream_id
158159
# We then set the stream_id to the sync stream id
@@ -188,19 +189,19 @@ def _parse_header(self):
188189
signal_buffers = []
189190

190191
unique_streams_ids = np.unique(signal_channels["stream_id"])
191-
192+
192193
# This is getting too complicated, we probably should just have a table which would be easier to read
193194
# And for users to understand
194195
for stream_id in unique_streams_ids:
195-
196+
196197
# Handle sync channel on a special way
197198
if "SYNC" in stream_id:
198199
# This is a sync channel and should not be added to the signal streams
199200
buffer_id = sync_stream_id_to_buffer_id[stream_id]
200201
stream_name = stream_id
201202
signal_streams.append((stream_name, stream_id, buffer_id))
202203
continue
203-
204+
204205
# Neural signal
205206
stream_index = int(stream_id)
206207
if stream_index < self._num_of_signal_streams:
@@ -279,9 +280,9 @@ def _parse_header(self):
279280
if has_sync_trace and not self.load_sync_channel:
280281
# Exclude the sync channel from the main stream
281282
self._stream_buffer_slice[stream_id] = slice(None, -1)
282-
283+
283284
# Add a buffer slice for the sync channel
284-
sync_stream_id = normal_stream_id_to_sync_stream_id[stream_id]
285+
sync_stream_id = normal_stream_id_to_sync_stream_id[stream_id]
285286
self._stream_buffer_slice[sync_stream_id] = slice(-1, None)
286287
else:
287288
self._stream_buffer_slice[stream_id] = None
@@ -294,9 +295,9 @@ def _parse_header(self):
294295
if has_sync_trace and not self.load_sync_channel:
295296
# Exclude the sync channel from the non-neural stream
296297
self._stream_buffer_slice[stream_id_non_neural] = slice(num_neural_channels, -1)
297-
298+
298299
# Add a buffer slice for the sync channel
299-
sync_stream_id = normal_stream_id_to_sync_stream_id[stream_id]
300+
sync_stream_id = normal_stream_id_to_sync_stream_id[stream_id]
300301
self._stream_buffer_slice[sync_stream_id] = slice(-1, None)
301302
else:
302303
self._stream_buffer_slice[stream_id_non_neural] = slice(num_neural_channels, None)

neo/test/rawiotest/test_openephysbinaryrawio.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -43,26 +43,26 @@ def test_sync(self):
4343
block_index=0, seg_index=0, i_start=0, i_stop=100, stream_index=stream_index
4444
)
4545
assert chunk.shape[1] == 384
46-
46+
4747
def test_sync_channel_access(self):
4848
"""Test that sync channels can be accessed as separate streams when load_sync_channel=False."""
4949
rawio = OpenEphysBinaryRawIO(
5050
self.get_local_path("openephysbinary/v0.6.x_neuropixels_with_sync"), load_sync_channel=False
5151
)
5252
rawio.parse_header()
53-
53+
5454
# Find sync channel streams
5555
sync_stream_names = [s_name for s_name in rawio.header["signal_streams"]["name"] if "SYNC" in s_name]
5656
assert len(sync_stream_names) > 0, "No sync channel streams found"
57-
57+
5858
# Get the stream index for the first sync channel
5959
sync_stream_index = list(rawio.header["signal_streams"]["name"]).index(sync_stream_names[0])
60-
60+
6161
# Check that we can access the sync channel data
6262
chunk = rawio.get_analogsignal_chunk(
6363
block_index=0, seg_index=0, i_start=0, i_stop=100, stream_index=sync_stream_index
6464
)
65-
65+
6666
# Sync channel should have only one channel
6767
assert chunk.shape[1] == 1, f"Expected sync channel to have 1 channel, got {chunk.shape[1]}"
6868

0 commit comments

Comments
 (0)