ibl-neuropixel 1.9.3__py3-none-any.whl → 1.10.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,29 +1,63 @@
1
1
  import random
2
2
  import shutil
3
3
  import unittest
4
+ import tempfile
4
5
  from pathlib import Path
5
6
 
6
7
  import numpy as np
7
8
  from neuropixel import NP2Converter, NP2Reconstructor
8
9
  import spikeglx
9
10
 
11
+ FIXTURE_PATH = Path(__file__).parents[1].joinpath("fixtures", "np2split")
12
+
10
13
 
11
14
  class BaseEphysNP2(unittest.TestCase):
12
- data_path = Path(__file__).parents[1].joinpath("fixtures", "np2split")
15
+ nc = None
16
+ folder_test_case = None
13
17
 
14
18
  @classmethod
15
19
  def setUpClass(cls):
16
- dat = np.tile(np.arange(385)[np.newaxis, :] + 10000, [30000, 1]).astype(
20
+ # create a temporary directory for the test
21
+ cls._temp_dir_obj = tempfile.TemporaryDirectory(prefix="test_np2_")
22
+ cls.data_path = Path(cls._temp_dir_obj.name)
23
+ # we create a small dummy spikeglx file for testing
24
+ cls.orig_file = cls.data_path.joinpath("_spikeglx_ephysData_g0_t0.imec0.ap.bin")
25
+ # the metadata file is copied from fixtures
26
+ cls.orig_meta_file = cls.orig_file.with_suffix(".meta")
27
+ shutil.copy(
28
+ FIXTURE_PATH.joinpath(
29
+ cls.folder_test_case, "_spikeglx_ephysData_g0_t0.imec0.ap.meta"
30
+ ),
31
+ cls.orig_meta_file,
32
+ )
33
+ # generation of a dummy binary file for testing
34
+ dat = np.tile(np.arange(cls.nc)[np.newaxis, :] + 10000, [30000, 1]).astype(
17
35
  np.int16
18
36
  )
19
- with open(
20
- cls.data_path.joinpath("_spikeglx_ephysData_g0_t0.imec0.ap.bin"), "bw+"
21
- ) as fid:
37
+ with open(cls.orig_file, "bw+") as fid:
22
38
  dat.tofile(fid)
23
39
 
24
40
  @classmethod
25
41
  def tearDownClass(cls):
26
- cls.data_path.joinpath("_spikeglx_ephysData_g0_t0.imec0.ap.bin").unlink()
42
+ cls._temp_dir_obj.cleanup()
43
+
44
+ def setUp(self):
45
+ """
46
+ :param folder_test_case: NP1_meta | NP21_meta | NP24_meta
47
+ :return:
48
+ """
49
+ self._temp_dir_obj_case = tempfile.TemporaryDirectory(prefix="test_np2_case_")
50
+ current_case_path = Path(self._temp_dir_obj_case.name).joinpath("probe00")
51
+ current_case_path.mkdir(parents=True, exist_ok=True)
52
+ self.file_path = current_case_path.joinpath(self.orig_file.name)
53
+ self.meta_file = self.file_path.with_suffix(".meta")
54
+ shutil.copy(self.orig_file, self.file_path)
55
+ shutil.copy(self.orig_meta_file, self.meta_file)
56
+ self.sglx_instances = []
57
+
58
+ def tearDown(self):
59
+ self._temp_dir_obj_case.cleanup()
60
+ _ = [sglx.close() for sglx in self.sglx_instances]
27
61
 
28
62
 
29
63
  class TestNeuropixel2ConverterNP24(BaseEphysNP2):
@@ -31,30 +65,8 @@ class TestNeuropixel2ConverterNP24(BaseEphysNP2):
31
65
  Check NP2 converter with NP2.4 type probes
32
66
  """
33
67
 
34
- def setUp(self) -> None:
35
- file_path = self.data_path.joinpath("_spikeglx_ephysData_g0_t0.imec0.ap.bin")
36
- self.file_path = file_path.parent.parent.joinpath(
37
- "probe00_temp", file_path.name
38
- )
39
- self.file_path.parent.mkdir(exist_ok=True, parents=True)
40
- meta_file = file_path.parent.joinpath(
41
- "NP24_meta", "_spikeglx_ephysData_g0_t0.imec0.ap.meta"
42
- )
43
- self.meta_file = self.file_path.parent.joinpath(
44
- "_spikeglx_ephysData_g0_t0.imec0.ap.meta"
45
- )
46
- shutil.copy(file_path, self.file_path)
47
- shutil.copy(meta_file, self.meta_file)
48
- self.sglx_instances = []
49
-
50
- def tearDown(self):
51
- _ = [sglx.close() for sglx in self.sglx_instances]
52
- # here should look for any directories with test in it and delete
53
- test_dir = list(self.file_path.parent.parent.glob("*test*"))
54
- _ = [shutil.rmtree(test) for test in test_dir]
55
- # For case where we have deleted already as part of test
56
- if self.file_path.parent.exists():
57
- shutil.rmtree(self.file_path.parent)
68
+ nc = 385
69
+ folder_test_case = "NP24_meta"
58
70
 
59
71
  def testDecimate(self):
60
72
  """
@@ -62,7 +74,6 @@ class TestNeuropixel2ConverterNP24(BaseEphysNP2):
62
74
  window lengths for iterating through data
63
75
  :return:
64
76
  """
65
-
66
77
  FS = 30000
67
78
  np_a = NP2Converter(self.file_path, post_check=False, compress=False)
68
79
  np_a.init_params(nwindow=0.3 * FS, extra="_0_5s_test", nshank=[0])
@@ -189,7 +200,6 @@ class TestNeuropixel2ConverterNP24(BaseEphysNP2):
189
200
  Check that if the data has been incorrectly split we get a warning error
190
201
  :return:
191
202
  """
192
-
193
203
  np_conv = NP2Converter(self.file_path, compress=False)
194
204
  np_conv.init_params(extra="_test")
195
205
  status = np_conv.process()
@@ -204,10 +214,7 @@ class TestNeuropixel2ConverterNP24(BaseEphysNP2):
204
214
  # Now that we have changed the file we expect an assertion error when we do the check
205
215
  with self.assertRaises(AssertionError) as context:
206
216
  np_conv.check_NP24()
207
- self.assertTrue(
208
- "data in original file and split files do no match"
209
- in str(context.exception)
210
- )
217
+ self.assertTrue("Arrays are not equal" in str(context.exception))
211
218
 
212
219
  def testFromCompressed(self):
213
220
  """
@@ -232,26 +239,8 @@ class TestNeuropixel2ConverterNP21(BaseEphysNP2):
232
239
  Check NP2 converter with NP2.1 type probes
233
240
  """
234
241
 
235
- def setUp(self) -> None:
236
- file_path = self.data_path.joinpath("_spikeglx_ephysData_g0_t0.imec0.ap.bin")
237
- self.file_path = file_path.parent.parent.joinpath(
238
- "probe00_temp", file_path.name
239
- )
240
- self.file_path.parent.mkdir(exist_ok=True, parents=True)
241
- meta_file = file_path.parent.joinpath(
242
- "NP21_meta", "_spikeglx_ephysData_g0_t0.imec0.ap.meta"
243
- )
244
- self.meta_file = self.file_path.parent.joinpath(
245
- "_spikeglx_ephysData_g0_t0.imec0.ap.meta"
246
- )
247
- shutil.copy(file_path, self.file_path)
248
- shutil.copy(meta_file, self.meta_file)
249
- self.sglx_instances = []
250
-
251
- def tearDown(self):
252
- _ = [sglx.close() for sglx in self.sglx_instances]
253
- # here should look for anything with test in it and delete
254
- shutil.rmtree(self.file_path.parent)
242
+ nc = 385
243
+ folder_test_case = "NP21_meta"
255
244
 
256
245
  def testProcessNP21(self):
257
246
  """
@@ -293,29 +282,13 @@ class TestNeuropixel2ConverterNP21(BaseEphysNP2):
293
282
  np_conv.sr.close()
294
283
 
295
284
 
296
- class TestNeuropixel2ConverterNP1(NP2Converter):
285
+ class TestNeuropixel2ConverterNP1(BaseEphysNP2):
297
286
  """
298
287
  Check NP2 converter with NP1 type probes
299
288
  """
300
289
 
301
- def setUp(self) -> None:
302
- self.file_path = self.data_path.joinpath(
303
- "_spikeglx_ephysData_g0_t0.imec0.ap.bin"
304
- )
305
- meta_file = self.file_path.parent.joinpath(
306
- "NP1_meta", "_spikeglx_ephysData_g0_t0.imec0.ap.meta"
307
- )
308
- self.meta_file = self.file_path.with_suffix(".meta")
309
- # Back up current meta file
310
- shutil.move(self.meta_file, self.meta_file.with_suffix(".meta.bk"))
311
- # Copy the neuropixels v1 meta file into the probe00 folder
312
- shutil.copy(meta_file, self.meta_file)
313
- self.sglx_instances = []
314
- self.temp_directories = []
315
-
316
- def tearDown(self):
317
- # replace meta file with backup
318
- shutil.move(self.meta_file.with_suffix(".meta.bk"), self.meta_file)
290
+ nc = 385
291
+ folder_test_case = "NP1_meta"
319
292
 
320
293
  def testProcessNP1(self):
321
294
  """
@@ -327,38 +300,15 @@ class TestNeuropixel2ConverterNP1(NP2Converter):
327
300
 
328
301
 
329
302
  class TestNeuropixelReconstructor(BaseEphysNP2):
330
- def setUp(self) -> None:
331
- self.orig_file = self.data_path.joinpath(
332
- "_spikeglx_ephysData_g0_t0.imec0.ap.bin"
333
- )
334
- self.file_path = self.orig_file.parent.parent.joinpath(
335
- "probe00_temp", self.orig_file.name
336
- )
337
- self.file_path.parent.mkdir(exist_ok=True, parents=True)
338
- self.orig_meta_file = self.orig_file.parent.joinpath(
339
- "NP24_meta", "_spikeglx_ephysData_g0_t0.imec0.ap.meta"
340
- )
341
- self.meta_file = self.file_path.parent.joinpath(
342
- "_spikeglx_ephysData_g0_t0.imec0.ap.meta"
343
- )
344
- shutil.copy(self.orig_file, self.file_path)
345
- shutil.copy(self.orig_meta_file, self.meta_file)
346
- self.sglx_instances = []
347
-
348
- def tearDown(self):
349
- _ = [sglx.close() for sglx in self.sglx_instances]
350
- # here should look for any directories with test in it and delete
351
- test_dir = list(self.file_path.parent.parent.glob("*test*"))
352
- _ = [shutil.rmtree(test) for test in test_dir]
353
- # For case where we have deleted already as part of test
354
- if self.file_path.parent.exists():
355
- shutil.rmtree(self.file_path.parent)
303
+ nc = 385
304
+ folder_test_case = "NP24_meta"
356
305
 
357
306
  def test_reconstruction(self):
358
307
  # First split the probes
359
308
  np_conv = NP2Converter(self.file_path)
360
309
  np_conv.init_params(extra="_test")
361
- _ = np_conv.process()
310
+ status = np_conv.process()
311
+ self.assertTrue(status)
362
312
  np_conv.sr.close()
363
313
 
364
314
  # Delete the original file
@@ -367,7 +317,7 @@ class TestNeuropixelReconstructor(BaseEphysNP2):
367
317
 
368
318
  # Now reconstruct
369
319
  np_recon = NP2Reconstructor(
370
- self.file_path.parent.parent, pname="probe00_temp", compress=True
320
+ self.file_path.parents[1], pname="probe00", compress=True
371
321
  )
372
322
  status = np_recon.process()
373
323
 
@@ -388,7 +338,27 @@ class TestNeuropixelReconstructor(BaseEphysNP2):
388
338
  self.assertEqual(orig_meta, recon_meta)
389
339
 
390
340
 
391
- if __name__ == "__main__":
392
- import unittest
341
+ class TestNeuropixel2ConverterNP2QB(BaseEphysNP2):
342
+ nc = 385 * 4
343
+ folder_test_case = "NP2QB_meta"
344
+
345
+ def testProcessNP2QB(self):
346
+ # Make sure normal workflow runs without problems
347
+ np_conv = NP2Converter(self.file_path)
348
+ np_conv.init_params(extra="_test")
349
+ status = np_conv.process()
350
+ self.assertFalse(np_conv.already_exists)
351
+ self.assertTrue(status)
393
352
 
394
- unittest.main(exit=False)
353
+ # Test a random ap metadata file and make sure it all makes sense
354
+ shank_n = random.randint(0, 3)
355
+ sr_ap = spikeglx.Reader(
356
+ np_conv.shank_info[f"shank{shank_n}"]["ap_file"], sort=False
357
+ )
358
+ np.testing.assert_array_equal(sr_ap.meta["acqApLfSy"], [384, 0, 1])
359
+ np.testing.assert_array_equal(sr_ap.meta["snsApLfSy"], [384, 0, 1])
360
+ self.assertEqual(sr_ap.meta["nSavedChans"], 385)
361
+ self.assertEqual(sr_ap.meta["snsSaveChanSubset"], "0:384")
362
+ self.assertEqual(sr_ap.meta["NP2.4_shank"], shank_n)
363
+ self.assertEqual(sr_ap.meta["original_meta"], "False")
364
+ sr_ap.close()
@@ -1,5 +1,6 @@
1
1
  import neuropixel
2
2
  import numpy as np
3
+ import re
3
4
 
4
5
 
5
6
  def test_sites_coordinates_deprecated():
@@ -82,3 +83,39 @@ def test_geom_npultra():
82
83
  h = neuropixel.trace_header("NPultra")
83
84
  for k, v in gt.items():
84
85
  np.testing.assert_equal(v, h[k])
86
+
87
+
88
+ def test_geom_quad():
89
+ assert "NP2QB" in neuropixel.CHANNEL_GRID.keys()
90
+ neuropixel.adc_shifts(version="NP2QB")
91
+ pass
92
+
93
+
94
+ def test_get_probe_table():
95
+ df_tables, probe_table = neuropixel.load_spike_glx_probe_table()
96
+ df_tables = df_tables.loc[df_tables["is_commercial"] == "Y", :]
97
+ df_tables["mux_table_format_type"].unique()
98
+
99
+ # Get the the mux tables from the probe tables
100
+ def get_mux_table(mux_string):
101
+ groups = re.findall(r"\(([0-9\s]+)\)", mux_string)
102
+ mux = np.array([[int(x) for x in g.split()] for g in groups], dtype=np.int32)
103
+ adc_vector = np.zeros(mux.size)
104
+ for i in range(mux.shape[1]):
105
+ adc_vector[mux[:, i]] = i
106
+ return adc_vector
107
+
108
+ # Check neuropixel 1
109
+ th1 = neuropixel.trace_header(version=1)
110
+ mux1 = get_mux_table(probe_table["z_mux_tables"]["mux_np1000"])
111
+ np.testing.assert_allclose(np.diff(np.c_[th1["adc"], mux1], axis=1), 0)
112
+
113
+ # Check Neuropixel 2
114
+ th2 = neuropixel.trace_header(version=2)
115
+ mux2 = get_mux_table(probe_table["z_mux_tables"]["mux_np2000"])
116
+ np.testing.assert_allclose(np.diff(np.c_[th2["adc"], mux2], axis=1), 0)
117
+
118
+ # Check Neuropixel 2 Quadbase
119
+ thqb = neuropixel.trace_header(version="NP2QB")
120
+ muxqb = get_mux_table(probe_table["z_mux_tables"]["mux_np2020"])
121
+ np.testing.assert_allclose(np.diff(np.c_[thqb["adc"], muxqb], axis=1), 0)
@@ -382,11 +382,11 @@ class TestsSpikeGLX_Meta(unittest.TestCase):
382
382
  )
383
383
  self.assert_read_glx(bin_3b)
384
384
 
385
- def test_read_NP20_prototype(self):
385
+ def test_read_NHP_prototype(self):
386
386
  with tempfile.TemporaryDirectory(prefix="glx_test") as tdir:
387
387
  bin_3b = spikeglx._mock_spikeglx_file(
388
- Path(tdir).joinpath("sampleNP2.1_prototype.ap.bin"),
389
- self.workdir / "sampleNP2.1_prototype.ap.meta",
388
+ Path(tdir).joinpath("sampleNHPlong_prototype.ap.bin"),
389
+ self.workdir / "sampleNHPlong_prototype.ap.meta",
390
390
  ns=32,
391
391
  nc=385,
392
392
  sync_depth=16,
@@ -516,7 +516,7 @@ class TestsSpikeGLX_Meta(unittest.TestCase):
516
516
  )
517
517
  for k in h_expected.keys():
518
518
  np.testing.assert_equal(
519
- h_expected[k][sr.raw_channel_order[: -sr.nsync]], th[k]
519
+ th[k], h_expected[k][sr.raw_channel_order[: -sr.nsync]]
520
520
  )
521
521
 
522
522
  def testGetSerialNumber(self):
@@ -557,7 +557,16 @@ class TestsSpikeGLX_Meta(unittest.TestCase):
557
557
  spikeglx._get_neuropixel_major_version_from_meta(md),
558
558
  )
559
559
  continue
560
-
560
+ elif "NP2QB" in meta_data_file.name:
561
+ self.assertEqual(
562
+ "NP2QB", spikeglx._get_neuropixel_version_from_meta(md)
563
+ )
564
+ continue
565
+ elif "NHPlong" in meta_data_file.name:
566
+ self.assertEqual(
567
+ "NHPlong", spikeglx._get_neuropixel_version_from_meta(md)
568
+ )
569
+ continue
561
570
  # for ap and lf look for version number
562
571
  # test getting revision
563
572
  revision = meta_data_file.name[6:8]
@@ -580,12 +589,16 @@ class TestsSpikeGLX_Meta(unittest.TestCase):
580
589
  continue
581
590
  md = spikeglx.read_meta_data(meta_data_file)
582
591
  cg = spikeglx._conversion_sample2v_from_meta(md)
592
+ if spikeglx._get_neuropixel_version_from_meta(md) == "NP2QB":
593
+ continue
583
594
  if "NP2" in spikeglx._get_neuropixel_version_from_meta(md):
595
+ gain = spikeglx._get_gain_from_meta(md)
584
596
  maxint = spikeglx._get_max_int_from_meta(md)
597
+ print(meta_data_file.name, maxint, gain)
585
598
  self.assertEqual(int(md.get("imMaxInt")), maxint)
586
599
  i2v = md.get("imAiRangeMax") / maxint
587
- self.assertTrue(np.all(cg["lf"][0:-1] == i2v / 80))
588
- self.assertTrue(np.all(cg["ap"][0:-1] == i2v / 80))
600
+ self.assertTrue(np.all(cg["lf"][0:-1] == i2v / gain))
601
+ self.assertTrue(np.all(cg["ap"][0:-1] == i2v / gain))
589
602
  else:
590
603
  maxint = spikeglx._get_max_int_from_meta(md)
591
604
  self.assertEqual(512, maxint)
@@ -0,0 +1,104 @@
1
+ import datetime
2
+ import unittest
3
+ import tempfile
4
+ from pathlib import Path
5
+
6
+ import numpy as np
7
+ import pandas as pd
8
+
9
+ import spikeglx
10
+ import ibldsp.sync
11
+
12
+ TEST_PATH = Path(__file__).parents[1].joinpath("fixtures")
13
+
14
+
15
+ class TestSyncTimestamps(unittest.TestCase):
16
+ def test_deprecation(self):
17
+ if datetime.datetime.now() > datetime.datetime(2026, 10, 12):
18
+ raise NotImplementedError(
19
+ "Time to deprecate ibldsp.utils.sync_timestamps()"
20
+ )
21
+
22
+ def test_sync_timestamps_linear(self):
23
+ ta = np.cumsum(np.abs(np.random.randn(100))) * 10
24
+ tb = ta * 1.0001 + 100
25
+ fcn, drif, ia, ib = ibldsp.sync.sync_timestamps(
26
+ ta, tb, return_indices=True, linear=True
27
+ )
28
+ np.testing.assert_almost_equal(drif, 100)
29
+ np.testing.assert_almost_equal(tb, fcn(ta))
30
+
31
+ def test_timestamps(self):
32
+ np.random.seed(4132)
33
+ n = 50
34
+ drift = 17.14
35
+ offset = 34.323
36
+ tsa = np.cumsum(np.random.random(n) * 10)
37
+ tsb = tsa * (1 + drift / 1e6) + offset
38
+
39
+ # test linear drift
40
+ _fcn, _drift = ibldsp.sync.sync_timestamps(tsa, tsb)
41
+ assert np.all(np.isclose(_fcn(tsa), tsb))
42
+ assert np.isclose(drift, _drift)
43
+
44
+ # test missing indices on a
45
+ imiss = np.setxor1d(np.arange(n), [1, 2, 34, 35])
46
+ _fcn, _drift, _ia, _ib = ibldsp.sync.sync_timestamps(
47
+ tsa[imiss], tsb, return_indices=True
48
+ )
49
+ assert np.all(np.isclose(_fcn(tsa[imiss[_ia]]), tsb[_ib]))
50
+
51
+ # test missing indices on b
52
+ _fcn, _drift, _ia, _ib = ibldsp.sync.sync_timestamps(
53
+ tsa, tsb[imiss], return_indices=True
54
+ )
55
+ assert np.all(np.isclose(_fcn(tsa[_ia]), tsb[imiss[_ib]]))
56
+
57
+ # test missing indices on both
58
+ imiss2 = np.setxor1d(np.arange(n), [14, 17])
59
+ _fcn, _drift, _ia, _ib = ibldsp.sync.sync_timestamps(
60
+ tsa[imiss], tsb[imiss2], return_indices=True
61
+ )
62
+ assert np.all(np.isclose(_fcn(tsa[imiss[_ia]]), tsb[imiss2[_ib]]))
63
+
64
+ # test timestamps with huge offset (previously caused ArrayMemoryError)
65
+ # tsb -= 1e15
66
+ # _fcn, _drift = utils.sync_timestamps(tsa, tsb)
67
+ # assert np.all(np.isclose(_fcn(tsa), tsb))
68
+
69
+
70
+ class TestSyncSpikeGlx:
71
+ # def setUp(self):
72
+ # self.workdir = Path(__file__).parents[1] / 'fixtures' / 'sync_ephys_fpga'
73
+ # self.meta_files = list(Path.glob(self.workdir, '*.meta'))
74
+
75
+ def test_sync_nidq(self):
76
+ self.sync_gen(fn="sample3B_g0_t0.nidq.meta", ns=32, nc=2, sync_depth=8)
77
+
78
+ def test_sync_NP1(self):
79
+ self.sync_gen(fn="sample3B_g0_t0.imec1.ap.meta", ns=32, nc=385, sync_depth=16)
80
+
81
+ def sync_gen(self, fn, ns, nc, sync_depth):
82
+ # nidq has 1 analog and 1 digital sync channels
83
+ with tempfile.TemporaryDirectory() as tdir:
84
+ ses_path = Path(tdir).joinpath("raw_ephys_data")
85
+ ses_path.mkdir(parents=True, exist_ok=True)
86
+ meta_file = ses_path.joinpath(fn)
87
+ bin_file = meta_file.with_suffix(".bin")
88
+ import shutil
89
+
90
+ shutil.copy(TEST_PATH.joinpath(fn), meta_file)
91
+ _ = spikeglx._mock_spikeglx_file(
92
+ bin_file,
93
+ meta_file=TEST_PATH.joinpath(fn),
94
+ ns=ns,
95
+ nc=nc,
96
+ sync_depth=sync_depth,
97
+ )
98
+ sr = spikeglx.Reader(bin_file)
99
+ # for a nidq file, there can be additional analog sync channels shown in the sync
100
+ csel = spikeglx._get_analog_sync_trace_indices_from_meta(sr.meta)
101
+ df_sync = pd.DataFrame(ibldsp.sync.extract_spikeglx_sync(bin_file))
102
+ np.testing.assert_equal(
103
+ len(df_sync["channels"].unique()), sync_depth + len(csel)
104
+ )
tests/unit/test_utils.py CHANGED
@@ -18,55 +18,6 @@ import shutil
18
18
  FIXTURE_PATH = Path(__file__).parents[1].joinpath("fixtures")
19
19
 
20
20
 
21
- class TestSyncTimestamps(unittest.TestCase):
22
- def test_sync_timestamps_linear(self):
23
- ta = np.cumsum(np.abs(np.random.randn(100))) * 10
24
- tb = ta * 1.0001 + 100
25
- fcn, drif, ia, ib = utils.sync_timestamps(
26
- ta, tb, return_indices=True, linear=True
27
- )
28
- np.testing.assert_almost_equal(drif, 100)
29
- np.testing.assert_almost_equal(tb, fcn(ta))
30
-
31
- def test_timestamps(self):
32
- np.random.seed(4132)
33
- n = 50
34
- drift = 17.14
35
- offset = 34.323
36
- tsa = np.cumsum(np.random.random(n) * 10)
37
- tsb = tsa * (1 + drift / 1e6) + offset
38
-
39
- # test linear drift
40
- _fcn, _drift = utils.sync_timestamps(tsa, tsb)
41
- assert np.all(np.isclose(_fcn(tsa), tsb))
42
- assert np.isclose(drift, _drift)
43
-
44
- # test missing indices on a
45
- imiss = np.setxor1d(np.arange(n), [1, 2, 34, 35])
46
- _fcn, _drift, _ia, _ib = utils.sync_timestamps(
47
- tsa[imiss], tsb, return_indices=True
48
- )
49
- assert np.all(np.isclose(_fcn(tsa[imiss[_ia]]), tsb[_ib]))
50
-
51
- # test missing indices on b
52
- _fcn, _drift, _ia, _ib = utils.sync_timestamps(
53
- tsa, tsb[imiss], return_indices=True
54
- )
55
- assert np.all(np.isclose(_fcn(tsa[_ia]), tsb[imiss[_ib]]))
56
-
57
- # test missing indices on both
58
- imiss2 = np.setxor1d(np.arange(n), [14, 17])
59
- _fcn, _drift, _ia, _ib = utils.sync_timestamps(
60
- tsa[imiss], tsb[imiss2], return_indices=True
61
- )
62
- assert np.all(np.isclose(_fcn(tsa[imiss[_ia]]), tsb[imiss2[_ib]]))
63
-
64
- # test timestamps with huge offset (previously caused ArrayMemoryError)
65
- # tsb -= 1e15
66
- # _fcn, _drift = utils.sync_timestamps(tsa, tsb)
67
- # assert np.all(np.isclose(_fcn(tsa), tsb))
68
-
69
-
70
21
  class TestParabolicMax(unittest.TestCase):
71
22
  # expected values
72
23
  maxi = np.array([0.0, 0.0, 3.04166667, 3.04166667, 5, 5])