ibl-neuropixel 1.9.0__py3-none-any.whl → 1.9.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ibl-neuropixel
3
- Version: 1.9.0
3
+ Version: 1.9.2
4
4
  Summary: Collection of tools for Neuropixel 1.0 and 2.0 probes data
5
5
  Home-page: https://github.com/int-brain-lab/ibl-neuropixel
6
6
  Author: The International Brain Laboratory
@@ -41,6 +41,53 @@ Minimum Python version supported is 3.10
41
41
 
42
42
  ## Destriping
43
43
  ### Getting started
44
+
45
+ #### Compress a binary file losslessly using `mtscomp`
46
+
47
+ The mtscomp util implements fast chunked compression for neurophysiology data in a single shard.
48
+ Package repository is [here](https://github.com/int-brain-lab/mtscomp).
49
+
50
+
51
+ ```python
52
+ from pathlib import Path
53
+ import spikeglx
54
+ file_spikeglx = Path('/datadisk/neuropixel/file.imec0.ap.bin')
55
+ sr = spikeglx.Reader(file_spikeglx)
56
+ sr.compress_file()
57
+ # note: you can use sr.compress_file(keep_original=False) to also remove the orginal bin file
58
+ ```
59
+
60
+ #### Reading raw spikeglx file and manipulating arrays
61
+
62
+ The mtscomp util implements fast chunked compression for neurophysiology data in a single shard.
63
+ Package repository is [here](https://github.com/int-brain-lab/mtscomp).
64
+
65
+ ```python
66
+ from pathlib import Path
67
+ import spikeglx
68
+
69
+ import ibldsp.voltage
70
+
71
+ file_spikeglx = Path('/datadisk/Data/neuropixel/human/Pt01.imec0.ap.bin')
72
+ sr = spikeglx.Reader(file_spikeglx)
73
+
74
+ # reads in 300ms of data
75
+ raw = sr[10_300_000:10_310_000, :sr.nc - sr.nsync].T
76
+ destripe = ibldsp.voltage.destripe(raw, fs=sr.fs, neuropixel_version=1)
77
+
78
+ # display with matplotlib backend
79
+ import ibldsp.plots
80
+ ibldsp.plots.voltageshow(raw, fs=sr.fs, title='raw')
81
+ ibldsp.plots.voltageshow(destripe, fs=sr.fs, title='destripe')
82
+
83
+ # display with QT backend
84
+ from viewephys.gui import viewephys
85
+ eqc = {}
86
+ eqc['raw'] = viewephys(raw, fs=sr.fs, title='raw')
87
+ eqc['destripe'] = viewephys(destripe, fs=sr.fs, title='destripe')
88
+ ```
89
+
90
+ #### Destripe a binary file
44
91
  This relies on a fast fourier transform external library: `pip install pyfftw`.
45
92
 
46
93
  Minimal working example to destripe a neuropixel binary file.
@@ -1,8 +1,8 @@
1
1
  neuropixel.py,sha256=P7sIBAtGIqKReK7OqMBqdwPaTeHjhHMyfyBRL_AvuQY,37987
2
- spikeglx.py,sha256=LjTPcEnml23-NYbO_157QVIBheswJs6hS_MKIxZm8Ng,41035
3
- ibl_neuropixel-1.9.0.dist-info/licenses/LICENSE,sha256=JJCjBeS78UPiX7TZpE-FnMjNNpCyrFb4s8VDGG2wD10,1087
2
+ spikeglx.py,sha256=4TPXnFGhJahClxr4fA9HwTeiiHBQS9ZEfkWl6t20q2s,41068
3
+ ibl_neuropixel-1.9.2.dist-info/licenses/LICENSE,sha256=JJCjBeS78UPiX7TZpE-FnMjNNpCyrFb4s8VDGG2wD10,1087
4
4
  ibldsp/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
- ibldsp/cadzow.py,sha256=pAtxDxBwoNhoxFNc2R5WLwUrmKsq4rQuaglRNgW2Lj8,7251
5
+ ibldsp/cadzow.py,sha256=YVQdF1HAZEhdanGKSBt3FWUSd66_VCxN8y2eQy8q9Fc,7307
6
6
  ibldsp/cuda_tools.py,sha256=6LpVhYOCuOXEEg8kJ3aOCE4hzA1Yq1dojsbbBQmQCF4,2387
7
7
  ibldsp/destripe_gpu.py,sha256=I5jzFocpsYw36kMMd533YThbrQaZix5e1sHqsUjHvO4,2824
8
8
  ibldsp/filter_gpu.py,sha256=DPrPBLRXeCh_6BcJWJnPFaxS9Q6kX4nPENZg-c2q5rc,5789
@@ -12,24 +12,24 @@ ibldsp/plots.py,sha256=XmYC4yca_seZYNEmC5hE5wBiJAl_fi_KU00DbNcM6jI,4577
12
12
  ibldsp/raw_metrics.py,sha256=Ie4b7unuFc-XiFc9-tpTsUkph29G-20NvM7iJ25jAPI,5198
13
13
  ibldsp/smooth.py,sha256=m_mByXHG_JyFErnYsZ27gXjcqpfwCEuWa6eOb9eFuyg,8033
14
14
  ibldsp/spiketrains.py,sha256=lYP1PD4l6T-4KhFu8ZXlbnUUnEQLOriGxN1szacolPY,6878
15
- ibldsp/utils.py,sha256=7Mp3UrSz_dZKXH806YkPKI3UBLJ91kLr3NtSVTIg2kI,17795
16
- ibldsp/voltage.py,sha256=brxkaONTdNzwZnzhuSHTkt5KxOcPln5Axj780mNjZTg,45339
15
+ ibldsp/utils.py,sha256=Ku1pdymbiCyQU5iZX8Akfq47YXM4xegW6G3_aomh6WA,18580
16
+ ibldsp/voltage.py,sha256=Pb1ZYlr8av2XDUspZaSUtF2wOoTy92fnrsxnW9sTIFA,47708
17
17
  ibldsp/waveform_extraction.py,sha256=yKrldgHqpwQ_Dq6xdoSCceKkfrL9FUXnpwKJUM3R41M,26570
18
- ibldsp/waveforms.py,sha256=5OBLYuM902WS_9WGDDmiTh4BpYWGe7-bQYTMxc2mYII,35166
18
+ ibldsp/waveforms.py,sha256=XKWO0sSEhZR1mBsXCdGpVU3ga96HX3CViXIgpl3bml8,35280
19
19
  neurowaveforms/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
20
20
  neurowaveforms/model.py,sha256=YOPWMMNNS_Op5TyK4Br1i9_Ni41jLSqHie5r1vb5VjY,6729
21
21
  tests/integration/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
22
22
  tests/integration/csd_experiments.py,sha256=bddMl2SCzeEM_QnBrZGypUYMKxFVDc6qderyUyX-iew,3158
23
- tests/integration/test_destripe.py,sha256=6OwqWWz3hJSPGAeEGDcJJkG4bZMnNeaU80AlH7vyrno,6170
23
+ tests/integration/test_destripe.py,sha256=ZV7gasuFib2AbVb63WczgQvc13PbIX9t4pQgamBMgRY,6161
24
24
  tests/unit/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
25
25
  tests/unit/test_ephys_np2.py,sha256=1wsgS_C5W8tUO_qDyORBRUKGsrB0Gq3wMLAjJcjrNZ4,15599
26
26
  tests/unit/test_neuropixel.py,sha256=ZFKrvTYaYgK5WgOfoHa5x9BNUpRomACPiIm6Kr-A3gw,2511
27
27
  tests/unit/test_plots.py,sha256=PhCxrEN1Zd1jTgmiwd16_dEghcI7kwmHT3AQmAPpzkA,850
28
- tests/unit/test_spikeglx.py,sha256=4Bv9nGm7o1LorPPIeHAqfRdsxI56xIsyYotzTSRlt4Q,33296
29
- tests/unit/test_utils.py,sha256=lraYbOkpQ6us43cpEFq8mlrcKnea9uSUrBKtxoRKE3g,21861
30
- tests/unit/test_voltage.py,sha256=4vpOCBCrXlvXAG5MRPMTIjKSoYT0kapQxyN1s_7t7Ns,5198
28
+ tests/unit/test_spikeglx.py,sha256=9PrSOPGrYAAQEeJPAOmqc3Rhgia6ftv-zihVWXglhqw,34388
29
+ tests/unit/test_utils.py,sha256=37XQDUqcABYrrsdX17kX54H4e5jld7GOn1ISxtgoa5U,21859
30
+ tests/unit/test_voltage.py,sha256=Nr6KqNGn2yOGPJYnvVzxdM5IiEHvK2FicDR_7fzvTHQ,6228
31
31
  tests/unit/test_waveforms.py,sha256=VnFvUi1pteROwwbC5Ebp2lqSxF3a8a7eXHpD8OUeuTg,16237
32
- ibl_neuropixel-1.9.0.dist-info/METADATA,sha256=KV0iSckmgwW_bMqNvgT1he4E6pEfZsSktYlxWWgJ-3A,2295
33
- ibl_neuropixel-1.9.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
34
- ibl_neuropixel-1.9.0.dist-info/top_level.txt,sha256=WtVcEUptnwU6BT72cgGmrWYFGM9d9qCEqe3LwR9FIw4,48
35
- ibl_neuropixel-1.9.0.dist-info/RECORD,,
32
+ ibl_neuropixel-1.9.2.dist-info/METADATA,sha256=4RuLzmWxYJjEyYJKA_1Szi0T0Dn5-VRjD7o4BaY-mHQ,3746
33
+ ibl_neuropixel-1.9.2.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
34
+ ibl_neuropixel-1.9.2.dist-info/top_level.txt,sha256=WtVcEUptnwU6BT72cgGmrWYFGM9d9qCEqe3LwR9FIw4,48
35
+ ibl_neuropixel-1.9.2.dist-info/RECORD,,
ibldsp/cadzow.py CHANGED
@@ -17,6 +17,8 @@ for N-spatial dimensions.
17
17
  }
18
18
  """
19
19
 
20
+ from functools import lru_cache
21
+
20
22
  import numpy as np
21
23
  import scipy.fft
22
24
  from iblutil.numerical import ismember2d
@@ -46,6 +48,7 @@ def traj_matrix_indices(n):
46
48
  return itraj
47
49
 
48
50
 
51
+ @lru_cache(maxsize=24)
49
52
  def trajectory(x, y, dtype=np.complex128):
50
53
  """
51
54
  Computes the 2 spatial dimensions block-Toeplitz indices from x and y trace coordinates.
ibldsp/utils.py CHANGED
@@ -89,7 +89,7 @@ def parabolic_max(x):
89
89
  # for 2D arrays, operate along the last dimension
90
90
  ns = x.shape[-1]
91
91
  axis = -1
92
- imax = np.argmax(x, axis=axis)
92
+ imax = np.nanargmax(x, axis=axis)
93
93
 
94
94
  if x.ndim == 1:
95
95
  v010 = x[np.maximum(np.minimum(imax + np.array([-1, 0, 1]), ns - 1), 0)]
@@ -382,17 +382,36 @@ class WindowGenerator(object):
382
382
  amp[-self.overlap :] = 1 if last == self.ns else np.flipud(w)
383
383
  yield (first, last, amp)
384
384
 
385
+ @property
386
+ def first_last_valid_noedge(self):
387
+ """
388
+ Generator that yields a tuple of first, last, first_valid, last_valid index of windows
389
+ The valid indices span up to half of the overlap.
390
+ The first and last windows have respectively the beginning and end of the signal discarded.
391
+ :return:
392
+ """
393
+ return self._firstlast_valid(discard_edges=True)
394
+
385
395
  @property
386
396
  def firstlast_valid(self):
387
397
  """
388
398
  Generator that yields a tuple of first, last, first_valid, last_valid index of windows
389
- The valid indices span up to half of the overlap
399
+ The valid indices span up to half of the overlap.
400
+ The first and last windows have the full beginning and end of the signal respectively.
401
+ To discard the beginning and end edges, use firstlast_valid_noedge instead.
390
402
  :return:
391
403
  """
404
+ return self._firstlast_valid(self)
405
+
406
+ def _firstlast_valid(self, discard_edges=False):
392
407
  assert self.overlap % 2 == 0, "Overlap must be even"
393
408
  for first, last in self.firstlast:
394
- first_valid = 0 if first == 0 else first + self.overlap // 2
395
- last_valid = last if last == self.ns else last - self.overlap // 2
409
+ first_valid = (
410
+ 0 if first == 0 and discard_edges else first + self.overlap // 2
411
+ )
412
+ last_valid = (
413
+ last if last == self.ns and discard_edges else last - self.overlap // 2
414
+ )
396
415
  yield (first, last, first_valid, last_valid)
397
416
 
398
417
  @property
ibldsp/voltage.py CHANGED
@@ -782,28 +782,67 @@ def decompress_destripe_cbin(
782
782
  np.save(
783
783
  output_qc_path.joinpath("_iblqc_ephysTimeRmsAP.timestamps.npy"), time_data
784
784
  )
785
- np.save(
786
- output_qc_path.joinpath("_iblqc_ephysSaturation.samples.npy"),
785
+ saturation_samples_to_intervals(
787
786
  saturation_data,
787
+ output_file=output_qc_path.joinpath("_iblqc_ephysSaturation.samples.pqt"),
788
788
  )
789
+ file_saturation.unlink()
789
790
 
790
791
 
791
792
  def detect_bad_channels(
792
- raw, fs, similarity_threshold=(-0.5, 1), psd_hf_threshold=None, display=False
793
+ raw,
794
+ fs,
795
+ similarity_threshold=(-0.5, 1),
796
+ psd_hf_threshold=None,
797
+ display=False,
798
+ outside_threshold=-0.75,
793
799
  ):
794
800
  """
795
- Bad channels detection for Neuropixel probes
796
- Labels channels
797
- 0: all clear
798
- 1: dead low coherence / amplitude
799
- 2: noisy
800
- 3: outside of the brain
801
- :param raw: [nc, ns]
802
- :param fs: sampling frequency
803
- :param similarity_threshold:
804
- :param psd_hf_threshold:
805
- :param display: optinal (False) will show a plot of features alongside a raw data snippet
806
- :return: labels (numpy vector [nc]), xfeats: dictionary of features [nc]
801
+ Detect bad channels in Neuropixel probe recordings based on signal quality metrics.
802
+
803
+ This function analyzes raw electrophysiology data to identify and label problematic channels
804
+ using multiple criteria including cross-correlation with neighboring channels, power spectral
805
+ density analysis, and spatial coherence patterns. Channels are classified into four categories:
806
+ good (0), dead (1), noisy (2), or outside the brain (3).
807
+
808
+ Parameters
809
+ ----------
810
+ raw : numpy.ndarray
811
+ Raw voltage traces array with shape (nc, ns), where nc is the number of channels
812
+ and ns is the number of samples.
813
+ fs : float
814
+ Sampling frequency in Hz.
815
+ similarity_threshold : tuple of float, optional
816
+ Two-element tuple (lower, upper) defining the acceptable range for high-frequency
817
+ cross-correlation values. Channels outside this range are flagged as dead (below lower)
818
+ or noisy (above upper). Defaults to (-0.5, 1).
819
+ psd_hf_threshold : float, optional
820
+ Threshold for high-frequency power spectral density to identify noisy channels.
821
+ If None, defaults to 0.02 for AP band (fs > 2600 Hz) or 1.4 for LF band (fs <= 2600 Hz).
822
+ Units are µV²/Hz.
823
+ display : bool, optional
824
+ If True, displays a diagnostic plot showing channel features and a raw data snippet.
825
+ Defaults to False.
826
+ outside_threshold : float or str, optional
827
+ Threshold for low-frequency cross-correlation to identify channels outside the brain.
828
+ Can be a float value (default -0.75) or 'adaptive' for automatic threshold detection
829
+ based on signal gradient analysis.
830
+
831
+ Returns
832
+ -------
833
+ ichannels : numpy.ndarray
834
+ Integer array of shape (nc,) containing channel labels:
835
+ - 0: good channel
836
+ - 1: dead channel (low coherence/amplitude)
837
+ - 2: noisy channel (high noise or excessive correlation)
838
+ - 3: outside of the brain
839
+ xfeats : dict
840
+ Dictionary containing computed features for each channel:
841
+ - 'ind': channel indices
842
+ - 'rms_raw': RMS amplitude of raw signal
843
+ - 'xcor_hf': detrended high-frequency cross-correlation
844
+ - 'xcor_lf': low-frequency cross-correlation component
845
+ - 'psd_hf': mean power spectral density in high-frequency band
807
846
  """
808
847
 
809
848
  def rneighbours(raw, n=1): # noqa
@@ -896,21 +935,28 @@ def detect_bad_channels(
896
935
  )[0]
897
936
  # the channels outside of the brains are the contiguous channels below the threshold on the trend coherency
898
937
 
899
- signal_noisy = xfeats["xcor_lf"]
900
- # Filter signal
901
- window_size = 25 # Choose based on desired smoothing (e.g., 25 samples)
902
- kernel = np.ones(window_size) / window_size
903
- # Apply convolution
904
- signal_filtered = np.convolve(signal_noisy, kernel, mode="same")
905
-
906
- diff_x = np.diff(signal_filtered)
907
- indx = np.where(diff_x < -0.02)[0] # hardcoded threshold
908
- if indx.size > 0:
909
- indx_threshold = np.floor(np.median(indx)).astype(int)
910
- threshold = signal_noisy[indx_threshold]
911
- ioutside = np.where(signal_noisy < threshold)[0]
938
+ # deal with channels outside of the brain
939
+ if outside_threshold == "adaptive":
940
+ signal_noisy = xfeats["xcor_lf"]
941
+ # Filter signal
942
+ window_size = 25 # Choose based on desired smoothing (e.g., 25 samples)
943
+ kernel = np.ones(window_size) / window_size
944
+ # Apply convolution
945
+ signal_filtered = np.convolve(signal_noisy, kernel, mode="same")
946
+
947
+ diff_x = np.diff(signal_filtered)
948
+ indx = np.where(diff_x < -0.02)[0] # hardcoded threshold
949
+ if indx.size > 0:
950
+ indx_threshold = np.floor(np.median(indx)).astype(int)
951
+ threshold = signal_noisy[indx_threshold]
952
+ ioutside = np.where(signal_noisy < threshold)[0]
953
+ else:
954
+ ioutside = np.array([])
912
955
  else:
913
- ioutside = np.array([])
956
+ assert np.isreal(outside_threshold) and np.isscalar(outside_threshold), (
957
+ "outside_threshold must be a real number or 'adaptive' for adaptive threshold"
958
+ )
959
+ ioutside = np.where(xfeats["xcor_lf"] < outside_threshold)[0]
914
960
 
915
961
  if ioutside.size > 0 and ioutside[-1] == (nc - 1):
916
962
  a = np.cumsum(np.r_[0, np.diff(ioutside) - 1])
ibldsp/waveforms.py CHANGED
@@ -297,7 +297,7 @@ def plot_wiggle(
297
297
  return ax
298
298
 
299
299
 
300
- def double_wiggle(wav, fs=1, ax=None, colors=None, **kwargs):
300
+ def double_wiggle(wav, fs=1, ax=None, colors=None, title=None, **kwargs):
301
301
  """
302
302
  Double trouble: this wiggle colours both the negative and the postive values
303
303
  :param wav: (nchannels, nsamples)
@@ -308,6 +308,7 @@ def double_wiggle(wav, fs=1, ax=None, colors=None, **kwargs):
308
308
  :param fill_sign: -1 for negative (default for spikes), 1 for positive
309
309
  :param plot_kwargs: kwargs for the line plot
310
310
  :param fill_kwargs: kwargs for the fill
311
+ :param title: title for the axis (optional)
311
312
  :return:
312
313
  """
313
314
  if colors is None:
@@ -334,6 +335,8 @@ def double_wiggle(wav, fs=1, ax=None, colors=None, **kwargs):
334
335
  fill_kwargs={"color": colors[1]},
335
336
  **kwargs,
336
337
  )
338
+ if title is not None:
339
+ ax.set(title=title)
337
340
  return ax
338
341
 
339
342
 
spikeglx.py CHANGED
@@ -144,11 +144,7 @@ class Reader:
144
144
  sglx_file = str(self.file_bin)
145
145
  if self.is_mtscomp:
146
146
  self._raw = mtscomp.Reader()
147
- self.ch_file = (
148
- _get_companion_file(sglx_file, ".ch")
149
- if self.ch_file is None
150
- else self.ch_file
151
- )
147
+ self.ch_file = self._parse_ch_file()
152
148
  self._raw.open(self.file_bin, self.ch_file)
153
149
  if self._raw.shape != (self.ns, self.nc):
154
150
  ftsec = self._raw.shape[0] / self.fs
@@ -396,10 +392,8 @@ class Reader:
396
392
  if "out" not in kwargs:
397
393
  kwargs["out"] = self.file_bin.with_suffix(".bin")
398
394
  assert self.is_mtscomp
399
- if file_ch is None:
400
- file_ch = self.file_bin.with_suffix(".ch")
401
-
402
- r = mtscomp.decompress(self.file_bin, file_ch, **kwargs)
395
+ ch_file = self._parse_ch_file(file_ch)
396
+ r = mtscomp.decompress(self.file_bin, ch_file, **kwargs)
403
397
  r.close()
404
398
  if not keep_original:
405
399
  self.close()
@@ -420,9 +414,10 @@ class Reader:
420
414
  bin_file = Path(self.file_bin).with_suffix(".bin")
421
415
  else:
422
416
  scratch_dir.mkdir(exist_ok=True, parents=True)
423
- bin_file = scratch_dir / Path(self.file_bin).with_suffix(".bin").name
424
- file_meta_scratch = scratch_dir / file_meta.name
425
- shutil.copy(self.file_meta_data, file_meta_scratch)
417
+ bin_file = (
418
+ Path(scratch_dir).joinpath(self.file_bin.name).with_suffix(".bin")
419
+ )
420
+ shutil.copy(self.file_meta_data, bin_file.parent / self.file_meta_data.name)
426
421
  if not bin_file.exists():
427
422
  t0 = time.time()
428
423
  _logger.info("File is compressed, decompressing to a temporary file...")
@@ -464,6 +459,12 @@ class Reader:
464
459
  log_func(f"SHA1 computed: {sc}")
465
460
  return sm == sc
466
461
 
462
+ def _parse_ch_file(self, ch_file=None):
463
+ ch_file = (
464
+ _get_companion_file(self.file_bin, ".ch") if ch_file is None else ch_file
465
+ )
466
+ return ch_file
467
+
467
468
 
468
469
  class OnlineReader(Reader):
469
470
  @property
@@ -999,7 +1000,7 @@ def _mock_spikeglx_file(
999
1000
  meta_file,
1000
1001
  ns,
1001
1002
  nc,
1002
- sync_depth,
1003
+ sync_depth=16,
1003
1004
  random=False,
1004
1005
  int2volts=0.6 / 32768,
1005
1006
  corrupt=False,
@@ -4,6 +4,7 @@ import logging
4
4
  import shutil
5
5
  import unittest
6
6
  from pathlib import Path
7
+ import pandas as pd
7
8
 
8
9
  import neuropixel
9
10
  import spikeglx
@@ -84,11 +85,11 @@ class TestEphysSpikeSortingMultiProcess(unittest.TestCase):
84
85
  shutil.rmtree(self.file_path.parent)
85
86
 
86
87
  def _assert_qc(self):
87
- sr = spikeglx.Reader(self.file_path)
88
- saturated = np.load(
89
- self.file_path.parent.joinpath("_iblqc_ephysSaturation.samples.npy")
88
+ df_saturated = pd.read_parquet(
89
+ self.file_path.parent.joinpath("_iblqc_ephysSaturation.samples.pqt")
90
90
  )
91
- self.assertEqual(sr.ns, saturated.size)
91
+ self.assertTrue(df_saturated.shape[1] == 2)
92
+
92
93
  self.assertTrue(
93
94
  self.file_path.parent.joinpath("_iblqc_ephysTimeRmsAP.rms.npy").exists()
94
95
  )
@@ -2,6 +2,7 @@ from pathlib import Path
2
2
  import shutil
3
3
  import tempfile
4
4
  import unittest
5
+ import uuid
5
6
 
6
7
  import numpy as np
7
8
  from iblutil.io import hashfile
@@ -671,9 +672,29 @@ class TestsBasicReader(unittest.TestCase):
671
672
  Tests the basic usage where there is a flat binary and no metadata associated
672
673
  """
673
674
 
674
- def test_get_companion_file(self):
675
- import uuid
675
+ def test_integration_companion_files_cbin(self):
676
+ with tempfile.TemporaryDirectory() as td:
677
+ bin_file_orig = Path(td) / "toto.ap.bin"
678
+ meta_file = Path(td) / f"toto.ap.{str(uuid.uuid4())}.meta"
679
+ ch_file = Path(td) / f"toto.ap.{str(uuid.uuid4())}.ch"
680
+ spikeglx._mock_spikeglx_file(
681
+ bin_file_orig,
682
+ meta_file=Path(TEST_PATH).joinpath("sample3B_g0_t0.imec1.ap.meta"),
683
+ ns=90_000,
684
+ nc=385,
685
+ )
686
+ sr = spikeglx.Reader(bin_file_orig)
687
+ sr.compress_file(keep_original=False)
688
+ cbin_file = Path(td) / f"toto.ap.{str(uuid.uuid4())}.cbin"
689
+ shutil.move(bin_file_orig.with_suffix(".cbin"), cbin_file)
690
+ shutil.move(bin_file_orig.with_suffix(".ch"), ch_file)
691
+ shutil.move(bin_file_orig.with_suffix(".meta"), meta_file)
692
+ sr = spikeglx.Reader(cbin_file)
693
+ self.assertEqual(sr.file_bin, cbin_file)
694
+ self.assertEqual(sr.file_meta_data, meta_file)
695
+ self.assertEqual(sr.ch_file, ch_file)
676
696
 
697
+ def test_get_companion_file(self):
677
698
  with tempfile.TemporaryDirectory() as td:
678
699
  sglx_file = Path(td) / f"sample3A_g0_t0.imec.ap.{str(uuid.uuid4())}.bin"
679
700
  meta_file = Path(td) / f"sample3A_g0_t0.imec.ap.{str(uuid.uuid4())}.meta"
tests/unit/test_utils.py CHANGED
@@ -69,8 +69,8 @@ class TestSyncTimestamps(unittest.TestCase):
69
69
 
70
70
  class TestParabolicMax(unittest.TestCase):
71
71
  # expected values
72
- maxi = np.array([np.nan, 0, 3.04166667, 3.04166667, 5, 5])
73
- ipeak = np.array([np.nan, 0, 5.166667, 2.166667, 0, 7])
72
+ maxi = np.array([0.0, 0.0, 3.04166667, 3.04166667, 5, 5])
73
+ ipeak = np.array([0.0, 0.0, 5.166667, 2.166667, 0, 7])
74
74
  # input
75
75
  x = np.array(
76
76
  [
@@ -131,6 +131,24 @@ class TestSaturation(unittest.TestCase):
131
131
  self.assertGreater(np.sum(saturated), 5)
132
132
  self.assertGreater(np.sum(mute == 0), np.sum(saturated))
133
133
 
134
+ def test_saturation_intervals_output(self):
135
+ saturation = np.zeros(50_000, dtype=bool)
136
+ # we test empty files, make sure we can read/write from empty parquet
137
+ with tempfile.TemporaryDirectory() as temp_dir:
138
+ # Create a file path within the temporary directory
139
+ temp_file = Path(temp_dir).joinpath("saturation.pqt")
140
+ df_nothing = ibldsp.voltage.saturation_samples_to_intervals(
141
+ saturation, output_file=Path(temp_dir).joinpath("saturation.pqt")
142
+ )
143
+ df_nothing2 = pd.read_parquet(temp_file)
144
+ self.assertEqual(df_nothing.shape[0], 0)
145
+ self.assertEqual(df_nothing2.shape[0], 0)
146
+ # for the case with saturation intervals, we simply test the number of rows correspond to the events
147
+ saturation[3441:3509] = True
148
+ saturation[45852:45865] = True
149
+ df_sat = ibldsp.voltage.saturation_samples_to_intervals(saturation)
150
+ self.assertEqual(81, np.sum(df_sat["stop_sample"] - df_sat["start_sample"]))
151
+
134
152
 
135
153
  class TestCadzow(unittest.TestCase):
136
154
  def test_trajectory_matrixes(self):