myokit 1.35.0__py3-none-any.whl → 1.35.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. myokit/__init__.py +11 -14
  2. myokit/__main__.py +0 -3
  3. myokit/_config.py +1 -3
  4. myokit/_datablock.py +914 -12
  5. myokit/_model_api.py +1 -3
  6. myokit/_myokit_version.py +1 -1
  7. myokit/_protocol.py +14 -28
  8. myokit/_sim/cable.c +1 -1
  9. myokit/_sim/cable.py +3 -2
  10. myokit/_sim/cmodel.h +1 -0
  11. myokit/_sim/cvodessim.c +79 -42
  12. myokit/_sim/cvodessim.py +20 -8
  13. myokit/_sim/fiber_tissue.c +1 -1
  14. myokit/_sim/fiber_tissue.py +3 -2
  15. myokit/_sim/openclsim.c +1 -1
  16. myokit/_sim/openclsim.py +8 -11
  17. myokit/_sim/pacing.h +121 -106
  18. myokit/_unit.py +1 -1
  19. myokit/formats/__init__.py +178 -0
  20. myokit/formats/axon/_abf.py +911 -841
  21. myokit/formats/axon/_atf.py +62 -59
  22. myokit/formats/axon/_importer.py +2 -2
  23. myokit/formats/heka/__init__.py +38 -0
  24. myokit/formats/heka/_importer.py +39 -0
  25. myokit/formats/heka/_patchmaster.py +2512 -0
  26. myokit/formats/wcp/_wcp.py +318 -133
  27. myokit/gui/datablock_viewer.py +144 -77
  28. myokit/gui/datalog_viewer.py +212 -231
  29. myokit/tests/ansic_event_based_pacing.py +3 -3
  30. myokit/tests/{ansic_fixed_form_pacing.py → ansic_time_series_pacing.py} +6 -6
  31. myokit/tests/data/formats/abf-v2.abf +0 -0
  32. myokit/tests/test_datablock.py +84 -0
  33. myokit/tests/test_datalog.py +2 -1
  34. myokit/tests/test_formats_axon.py +589 -136
  35. myokit/tests/test_formats_wcp.py +191 -22
  36. myokit/tests/test_pacing_system_c.py +51 -23
  37. myokit/tests/test_pacing_system_py.py +18 -0
  38. myokit/tests/test_simulation_1d.py +62 -22
  39. myokit/tests/test_simulation_cvodes.py +52 -3
  40. myokit/tests/test_simulation_fiber_tissue.py +35 -4
  41. myokit/tests/test_simulation_opencl.py +28 -4
  42. {myokit-1.35.0.dist-info → myokit-1.35.2.dist-info}/LICENSE.txt +1 -1
  43. {myokit-1.35.0.dist-info → myokit-1.35.2.dist-info}/METADATA +1 -1
  44. {myokit-1.35.0.dist-info → myokit-1.35.2.dist-info}/RECORD +47 -44
  45. {myokit-1.35.0.dist-info → myokit-1.35.2.dist-info}/WHEEL +0 -0
  46. {myokit-1.35.0.dist-info → myokit-1.35.2.dist-info}/entry_points.txt +0 -0
  47. {myokit-1.35.0.dist-info → myokit-1.35.2.dist-info}/top_level.txt +0 -0
@@ -1,8 +1,8 @@
1
1
  #
2
2
  # This module reads files in Axon Binary File format v1 or v2 used by Axon
3
3
  # Technologies and Molecular Devices.
4
- # The v1 format was used until Clampex version 9. Clampex 10 and onwards use
5
- # the v2 format.
4
+ # The v1 format was used until Clampex version 9.
5
+ # Clampex 10 and onwards use the v2 format.
6
6
  #
7
7
  # WARNING: This file hasn't been extensively tested.
8
8
  #
@@ -57,13 +57,6 @@
57
57
  # an 'epoch'. The protocol section of a file defines one or more stimuli, each
58
58
  # containing a list of epochs.
59
59
  #
60
- # Conversion to myokit formats
61
- # ----------------------------
62
- # There is no problem-free mapping of ABF data onto myokit structures, such as
63
- # the simulation log. A fundamental difference is that "sampling" during a
64
- # simulation happens at the same time for every signal. Channels in an ABF file
65
- # each have their own sampling rate.
66
- #
67
60
  #--------------------------------- license ----------------------------------
68
61
  #
69
62
  # This file is part of Myokit.
@@ -71,12 +64,18 @@
71
64
  #
72
65
  #--------------------------------- credits ----------------------------------
73
66
  #
74
- # This module is derived in part from code found in the Neo package for
75
- # representing electrophysiology data, specifically from a python module
76
- # authored by sgarcia and jnowacki.
67
+ # A lot of this code uses the (sadly somewhat outdated) information made public
68
+ # by Axon, e.g. at https://support.moleculardevices.com/s/article/
69
+ # Axon-pCLAMP-ABF-File-Support-Pack-Download-Page
70
+ # This information comes without a specific license, but states that
71
+ # "Permission is granted to freely use, modify and copy the code in this file."
72
+ #
73
+ # In addition, this module was in part derived from an early version of the
74
+ # Neo package for representing electrophysiology data, specifically from a
75
+ # Python module authored by sgarcia and jnowacki.
77
76
  # Neo can be found at: http://neuralensemble.org/trac/neo
78
77
  #
79
- # The Neo package is licensed using the following BSD License:
78
+ # The Neo package used was licensed using the following BSD License:
80
79
  #
81
80
  #---------------------------------- start -----------------------------------
82
81
  # Copyright (c) 2010-2012, Neo authors and contributors
@@ -140,21 +139,21 @@
140
139
  # was also used: http://neurodata.hg.sourceforge.net/hgweb/neurodata/neurodata/
141
140
  #------------------------------------------------------------------------------
142
141
  import datetime
143
- import logging
144
142
  import os
145
143
  import struct
146
- import traceback
144
+ import warnings
147
145
 
148
146
  import numpy as np
149
147
 
150
- from collections import OrderedDict
148
+ import myokit
149
+ import myokit.formats
151
150
 
152
151
 
153
152
  # Encoding for text parts of files
154
153
  _ENC = 'latin-1'
155
154
 
156
155
 
157
- class AbfFile:
156
+ class AbfFile(myokit.formats.SweepSource):
158
157
  """
159
158
  Represents a read-only Axon Binary Format file (``.abf``), stored at the
160
159
  location pointed to by ``filepath``.
@@ -164,37 +163,63 @@ class AbfFile:
164
163
  assumption can be overruled by setting the ``is_protocol_file`` argument
165
164
  to either ``True`` or ``False``.
166
165
 
167
- The "data" in an AbfFile is recorded (analog-to-digital) data. Any output
168
- signals from the amplifier to the cell (digital-to-analog) are termed the
169
- "protocol".
166
+ Data in ABF files is recorded in *sweeps*, where each sweep contains one or
167
+ more *channels* with recorded (A/D) data. In addition, zero or more output
168
+ waveforms may be defined (also called "protocol" or D/A channels). Where
169
+ possible, the :class`AbfFile` class will convert these embedded protocols
170
+ to time series and include them as additional channels.
170
171
 
171
- Data in AbfFiles is recorded in episodes called "sweeps". Each sweep
172
- contains the data from all recorded channels. The number of channels is
173
- constant: channel 1 in sweep 1 contains data recorded from the same source
174
- as channel 1 in sweep 10.
172
+ For example::
175
173
 
176
- The data in an ``AbfFile`` can be read by iterating over it::
177
-
178
- f = AbfFile('some_file.abf')
179
- for sweep in f:
174
+ abf = AbfFile('some_file.abf')
175
+ for sweep in abf:
180
176
  for channel in sweep:
181
- plt.plot(channel.times(), channel.values())
177
+ print(channel.name())
178
+ break
179
+
180
+ might show
182
181
 
183
- Similarly, protocol data can be accessed using::
182
+ IN 0
183
+ 10xVm
184
+ Cmd 0
184
185
 
185
- for sweep in f.protocol():
186
+ where the first two channels are recorded A/D channels and the final one is
187
+ a reconstructed D/A output channel.
188
+
189
+ Sweeps and channels are represented by :class:`Sweep` and :class:`Channel`
190
+ objects respectively, and these can be used to obtain the data from a
191
+ file::
192
+
193
+ abf = AbfFile('some_file.abf')
194
+ for sweep in abf:
186
195
  for channel in sweep:
187
- plt.plot(channel.times(), channel.values())
196
+ plot(channel.times(), channel.values())
197
+
198
+ In addition the ``AbfFile`` class implements the
199
+ :class`myokit.formats.SweepSource` interface. Note that this interface
200
+ treats A/D and D/A as separate things, so :meth:`channel_count` returns the
201
+ number of A/D channels, not the total number of channels in a
202
+ :class:`Sweep` object (which can include D/A channels).
203
+
204
+ Support notes:
188
205
 
189
- Note that the number of output ("protocol") channels need not equal the
190
- number of input ("data") channels.
206
+ - Protocol (D/A) conversion is only supported for "episodic stimulation"
207
+ with constant valued steps (so e.g. no ramps) and without "user lists".
208
+ - Protocols with more than one sampling rate are not supported.
209
+ - The publicly available information on the ABF format is not great, so
210
+ there will be several other issues and shortcomings.
191
211
 
192
- Because each channel can have a different sampling rate, AbfFile data is
193
- not one-on-one compatible with myokit Simulation logs. To obtain a
194
- :class:`myokit.DataLog` version of the file's data, use:meth:`myokit_log`.
212
+ Arguments:
213
+
214
+ ``filepath``
215
+ The path to load the data from. Data will be read into memory
216
+ immediately upon construction.
217
+ ``is_protocol_file``
218
+ If set to ``True``, no attempt to read A/D data will be made and only
219
+ D/A "protocol" information will be read. If left at its default value
220
+ of ``None`` files with the extension ``.pro`` will be recognized as
221
+ protocol files.
195
222
 
196
- In some cases, a myokit protocol can be created from a stored stimulus
197
- protocol. To do this, use the method:meth:`myokit_protocol`.
198
223
  """
199
224
  def __init__(self, filepath, is_protocol_file=None):
200
225
  # The path to the file and its basename
@@ -202,504 +227,90 @@ class AbfFile:
202
227
  self._filepath = os.path.abspath(filepath)
203
228
  self._filename = os.path.basename(filepath)
204
229
 
205
- # Abf format version
206
- self._version = None
207
-
208
- # Protocol info
209
- self._epoch_functions = None
210
- self._numberOfTrials = None
211
- self._trialStartToStart = None
212
- self._runsPerTrial = None
213
- self._runStartToStart = None
214
- self._sweepsPerRun = None
215
- self._sweepStartToStart = None
216
-
217
230
  # Read as protocol file yes?
218
231
  if is_protocol_file is None:
219
232
  self._is_protocol_file = os.path.splitext(filepath)[1] == '.pro'
220
233
  else:
221
234
  self._is_protocol_file = bool(is_protocol_file)
222
235
 
223
- # The file header (an ordered dictionary)
224
- self._header = self._read_header()
225
-
226
- # Date/time of recording
227
- self._datetime = self._read_datetime()
228
-
229
- # Number of channels, sampling rate (Hz) and acquisition mode
230
- if self._version < 2:
231
- self._nc = self._header['nADCNumChannels']
232
- self._rate = 1e6 / (self._header['fADCSampleInterval'] * self._nc)
233
- self._mode = self._header['nOperationMode']
234
- else:
235
- self._nc = self._header['sections']['ADC']['length']
236
- self._rate = 1e6 / self._header['protocol']['fADCSequenceInterval']
237
- self._mode = self._header['protocol']['nOperationMode']
238
- if self._mode not in acquisition_modes:
239
- raise NotImplementedError(
240
- 'Unknown acquisition mode: ' + str(mode))
241
-
242
- # Conversion factors for integer data in the channels
243
- self._adc_factors = None
244
- self._adc_offsets = None
245
- self._set_conversion_factors()
246
-
247
- # The protocol used (a list of sweeps)
248
- try:
249
- self._protocol = self._read_protocol()
250
- except Exception: # pragma: no cover
251
- # This is not something we _want_ to happen, so if we have test
252
- # cases that trigger this error they should be resolved. At the
253
- # same time, if it happens to a user we want it to "sort-of work"
254
- # (an experimental rather than a production setting)
255
- log = logging.getLogger(__name__)
256
- log.warning('Unable to read protocol from ' + self._filepath)
257
- log.warning(traceback.format_exc())
258
- self._protocol = []
259
-
260
- # The measured data as a list of sweeps
261
- if self._is_protocol_file:
262
- self._sweeps = []
263
- else:
264
- self._sweeps = self._read_sweeps()
265
-
266
- def data_channels(self):
267
- """
268
- Returns the number of channels in this file's sweeps.
269
- """
270
- if len(self._sweeps) == 0: # pragma: no cover
271
- return 0
272
- return len(self._sweeps[0])
273
-
274
- def extract_channel(self, channel=0, join=False):
275
- """
276
- Extracts a selected data ``channel`` and returns its data in a tuple
277
- containing::
278
-
279
- A numpy array representing time
280
- A numpy array representing the first sweep
281
- A numpy array representing the second sweep
282
- ...
283
-
284
- An optional argument ``join=True`` can be set to join all sweeps
285
- together and return just two arrays, one for time and one for data.
286
-
287
- If no data is available, ``None`` is returned.
288
- """
289
- if len(self._sweeps) == 0: # pragma: no cover
290
- return None
291
-
292
- # Join all sweeps
293
- if join:
294
- time, data = [], []
295
- t = np.array(self._sweeps[0][channel].times())
296
- for i, sweep in enumerate(self._sweeps):
297
- time.append(t + i * self._sweepStartToStart)
298
- data.append(np.array(sweep[channel].values()))
299
- return (np.concatenate(time), np.concatenate(data))
300
-
301
- # Standard reading
302
- data = []
303
- data.append(np.array(self._sweeps[0][channel].times()))
304
- for sweep in self._sweeps:
305
- data.append(np.array(sweep[channel].values()))
306
- return tuple(data)
307
-
308
- def extract_channel_as_myokit_log(self, channel=0):
309
- """
310
- Extracts the given data channel and returns it as a myokit
311
- DataLog.
312
-
313
- The log will contain an entry "time" that contains the time vector.
314
- Each sweep will be in an entry "0.sweep", "1.sweep", "2.sweep" etc.
315
- """
316
- import myokit
317
- log = myokit.DataLog()
318
- if len(self._sweeps) == 0: # pragma: no cover
319
- return log
320
- log.set_time_key('time')
321
- log['time'] = np.array(self._sweeps[0][channel].times())
322
- for k, sweep in enumerate(self._sweeps):
323
- log['sweep', k] = np.array(sweep[channel].values())
324
- return log
325
-
326
- def filename(self):
327
- """
328
- Returns this AbfFile's filename.
329
- """
330
- return self._filepath
331
-
332
- def __getitem__(self, key):
333
- return self._sweeps.__getitem__(key)
334
-
335
- def _get_conversion_factors(self, channel):
336
- """
337
- Returns the conversion factor and shift for the selected channel as a
338
- tuple of floats ``(factor, shift)``.
339
- """
340
- return self._adc_factors[channel], self._adc_offsets[channel]
341
-
342
- def info(self, show_header=False):
343
- """
344
- Returns a string with lots of info on this file.
345
-
346
- The optional argument ``show_header`` can be used to add the full
347
- header contents to the output.
348
- """
349
- out = []
350
-
351
- # Show file info
352
- if self._is_protocol_file:
353
- out.append('Axon Protocol File: ' + self._filename)
354
- else:
355
- out.append('Axon Binary File: ' + self._filename)
356
- out.append('ABF Format version ' + str(self._version))
357
- out.append('Recorded on: ' + str(self._datetime))
358
-
359
- # Show protocol info
360
- out.append(
361
- 'Acquisition mode: ' + str(self._mode) + ': '
362
- + acquisition_modes[self._mode])
363
- if self._numberOfTrials:
364
- out.append(
365
- 'Protocol set for ' + str(self._numberOfTrials)
366
- + ' trials, spaced ' + str(self._trialStartToStart)
367
- + 's apart.')
368
- out.append(
369
- ' with ' + str(self._runsPerTrial)
370
- + ' runs per trial, spaced ' + str(self._runStartToStart)
371
- + 's apart.')
372
- out.append(
373
- ' and ' + str(self._sweepsPerRun)
374
- + ' sweeps per run, spaced ' + str(self._sweepStartToStart)
375
- + 's apart.')
376
- else: # pragma: no cover
377
- out.append('Protocol data could not be determined.')
378
- out.append('Sampling rate: ' + str(self._rate) + ' Hz')
236
+ # Cached string-to-unit conversions
237
+ self._unit_cache = {}
379
238
 
380
- # Show channel info
381
- if self._sweeps:
382
- for i, c in enumerate(self._sweeps[0]._channels):
383
- out.append('Channel ' + str(i) + ': "' + c._name + '"')
384
- if c._type: # pragma: no cover
385
- # Cover pragma: Don't have appropriate test file
386
- out.append(' Type: ' + type_mode_names[c._type])
387
- out.append(' Unit: ' + c._unit.strip())
388
- if c._lopass:
389
- out.append(' Low-pass filter: ' + str(c._lopass) + ' Hz')
390
- if c._cm:
391
- out.append(' Cm (telegraphed): ' + str(c._cm) + ' pF')
392
- if c._rs: # pragma: no cover
393
- # Cover pragma: Don't have appropriate test file
394
- out.append(' Rs (telegraphed): ' + str(c._rs))
395
-
396
- # Methods
397
- def show_dict(name, d, tab=''):
398
- m = max(0, 38 - len(tab) - int(0.1 + len(name) / 2))
399
- out.append(tab + '-' * m + ' ' + name + ' ' + '-' * m)
400
- for n, v in d.items():
401
- n = str(n)
402
- if type(v) == OrderedDict:
403
- show_dict(n, v, tab + ' ')
404
- elif type(v) == list:
405
- show_list(n, v, tab)
406
- else:
407
- out.append(tab + n + ': ' + str(v))
408
- m = max(0, 80 - 2 * len(tab))
409
- out.append(tab + m * '-')
410
-
411
- def show_list(name, d, tab=''):
412
- for index, item in enumerate(d):
413
- n = name + '[' + str(index) + ']'
414
- if type(item) == OrderedDict:
415
- show_dict(n, item, tab)
416
- elif type(item) == list: # pragma: no cover
417
- # Cover pragma: Don't have appropriate test file
418
- show_list(n, item, tab)
419
- else:
420
- out.append(tab + n + ': ' + str(item))
421
-
422
- # Show full header info
423
- if show_header:
424
- if self.strings:
425
- show_dict('Strings', {'strings': self.strings})
426
- show_dict('file header', self._header)
427
- return '\n'.join(out)
428
-
429
- def matplotlib_figure(self):
430
- """
431
- Creates and returns a matplotlib figure of this abf file's contents.
432
- """
433
- import matplotlib.pyplot as plt
434
- f = plt.figure()
435
- plt.suptitle(self.filename())
436
-
437
- # Show data channel
438
- ax = plt.subplot(2, 1, 1)
439
- ax.set_title('Measured data')
440
- times = None
441
- for sweep in self:
442
- for channel in sweep:
443
- if times is None:
444
- times = channel.times()
445
- plt.plot(times, channel.values())
446
-
447
- # Show protocol channels
448
- n = self.protocol_channels()
449
- ax = [plt.subplot(2, n, n + 1 + i) for i in range(n)]
450
-
451
- for sweep in self.protocol():
452
- times = None
453
- for i, channel in enumerate(sweep):
454
- if times is None:
455
- times = channel.times()
456
- ax[i].set_title(channel.name())
457
- ax[i].plot(times, channel.values())
458
- return f
459
-
460
- def myokit_log(self):
461
- """
462
- Converts the data in this ABF file to a:class:`myokit.DataLog` with an
463
- entry for every channel. All sweeps will be joined together into a
464
- single time series.
465
-
466
- The log will contain an entry "time" that contains the time vector.
467
- Channels will be stored using "0.ad", "1.ad" etc. for the recorded
468
- (analog-to-digital) channels and "0.da", "1.da" etc. for the output
469
- (digital-to-analog) channels.
470
- """
471
- import myokit
472
- log = myokit.DataLog()
473
- if self._sweeps:
474
- # Gather parts of time and channel vectors
475
- time = []
476
- ad_channels = []
477
- da_channels = []
478
- for i in range(self.data_channels()):
479
- ad_channels.append([])
480
- for i in range(self.protocol_channels()):
481
- da_channels.append([])
482
-
483
- # Add ad channels
484
- for sweep in self:
485
- for channel in sweep:
486
- time.append(channel.times())
487
- break
488
- for i, channel in enumerate(sweep):
489
- ad_channels[i].append(channel.values())
490
-
491
- # Add da channels
492
- for sweep in self.protocol():
493
- for i, channel in enumerate(sweep):
494
- da_channels[i].append(channel.values())
495
-
496
- # Combine into time series, store in log
497
- log['time'] = np.concatenate(time)
498
- log.set_time_key('time')
499
- for i, channel in enumerate(ad_channels):
500
- log['ad', i] = np.concatenate(channel)
501
- for i, channel in enumerate(da_channels):
502
- log['da', i] = np.concatenate(channel)
503
-
504
- return log
505
-
506
- def myokit_protocol(self, channel=None, ms=True):
507
- """
508
- Returns a single channel from an embedded protocol as a
509
- :class:`myokit.Protocol`. The channel to return is specified by setting
510
- ``channel`` to the correct index.
511
-
512
- Only works for episodic stimulation, without user lists.
513
-
514
- By default, all times are converted to milliseconds. To disable this
515
- function, set ``ms=False``.
516
- """
517
- import myokit
518
-
519
- # Only episodic stimulation is supported.
520
- if self._mode != ACMODE_EPISODIC_STIMULATION: # pragma: no cover
521
- return myokit.Protocol()
522
-
523
- # Check channel
524
- if channel is None:
525
- channel = 0
526
- else:
527
- channel = int(channel)
528
-
529
- # User lists are not supported
530
- if self._version < 2: # pragma: no cover
531
- if self._header['nULEnable'][channel]:
532
- raise NotImplementedError('User lists are not supported.')
533
- else: # pragma: no cover
534
- for userlist in self._header['listUserListInfo']:
535
- en1 = 'nULEnable' in userlist and userlist['nULEnable']
536
- en2 = 'nConditEnable' in userlist and userlist['nConditEnable']
537
- if en1 or en2:
538
- raise NotImplementedError('User lists are not supported.')
539
-
540
- # Create protocol
541
- p = myokit.Protocol()
542
-
543
- # Get epoch functions set by _read_protocol
544
- dinfo, einfo_exists, einfo = self._epoch_functions
545
- start = 0
546
- next_start = 0
547
- f = 1e3 if ms else 1
548
- for iSweep in range(self._sweepsPerRun):
549
-
550
- if not einfo_exists(channel): # pragma: no cover
551
- # Not sure if this can happen, if so, would need to update code
552
- raise Exception('Missing protocol data')
553
-
554
- for e in einfo(channel):
555
- kind = e['type']
556
-
557
- if kind not in epoch_types:
558
- raise NotImplementedError(
559
- 'Unknown epoch type: ' + str(kind))
560
-
561
- if kind == EPOCH_DISABLED:
562
- continue
563
-
564
- elif kind == EPOCH_STEPPED:
565
- # Event at step
566
- dur = f * e['init_duration'] / self._rate
567
- inc = f * e['duration_inc'] / self._rate
568
- e_level = e['init_level'] + e['level_inc'] * iSweep
569
- e_start = start
570
- e_length = dur + iSweep * inc
571
- p.schedule(e_level, e_start, e_length)
572
- start += e_length
573
-
574
- else: # pragma: no cover
575
- raise NotImplementedError(
576
- 'Usupported epoch type: ' + epoch_types(kind))
577
-
578
- # Event at holding potential
579
- next_start += f * self._sweepStartToStart
580
- e_level = dinfo(channel, 'fDACHoldingLevel')
581
- e_start = start
582
- e_length = next_start - start
583
- p.schedule(e_level, e_start, e_length)
584
- start = next_start
585
-
586
- return p
587
-
588
- def protocol_channels(self):
589
- """
590
- Returns the number of channels in this file's protocol.
591
- """
592
- if self._version < 2:
593
- return len(self._header['sDACChannelName'])
594
- else:
595
- return int(self._header['sections']['DAC']['length'])
596
-
597
- def protocol_holding_level(self, channel=0):
598
- """
599
- Returns the holding level used by the requested output channel of the
600
- embedded protocol.
601
- """
602
- dinfo, einfo_exists, einfo = self._epoch_functions
603
- return dinfo(channel, 'fDACHoldingLevel')
604
-
605
- def protocol_steps(self, channel=0):
606
- """
607
- For a stepped protocol, this function returns a tuple of lists of the
608
- successive values (not including the holding value).
609
-
610
- For example, for a protocol that has holding value ``-120mV`` and
611
- performs steps to ``-100mV``, ``-80mV``, and ``-40mV`` the returned
612
- output will be::
239
+ # Read the file header as an ordered dictionary
240
+ self._header = None
241
+ self._strings = None
242
+ self._version = None
243
+ self._version_str = None
244
+ self._read_1_header()
613
245
 
614
- ([-100, -80, -40])
246
+ # Read the time of recording
247
+ self._datetime = None
248
+ self._read_2_time_of_recording()
615
249
 
616
- For a more complicated protocol, where each step is followed by a step
617
- down to ``-140mV``, the output would be::
250
+ # Read the protocol information
251
+ self._n_adc = None
252
+ self._n_dac = None
618
253
 
619
- ([-100, -80, -40], [-140, -140, -140])
254
+ self._rate = None
255
+ self._mode = None
256
+
257
+ self._number_of_trials = None
258
+ self._trial_start_to_start = None
259
+ self._runs_per_trial = None
260
+ self._run_start_to_start = None
261
+ self._sweeps_per_run = None
262
+ self._sweep_start_to_start = None
263
+ self._samples_per_channel = None
264
+
265
+ # To be able to treat v1 and v2 slightly more easily, we define 3
266
+ # functions read epoch info from episodic stimulation protocols
267
+ self._epoch_functions = None
620
268
 
269
+ # Not all D/A channels can be converted, so we maintain an array with
270
+ # the original indices of the channels in the da_sweeps. (Note that
271
+ # this differs again from the "index" labels for user display, which
272
+ # are stored in the channels themselves).
273
+ self._dac_indices = []
621
274
 
622
- """
623
- # Get epoch functions set by _read_protocol
624
- dinfo, einfo_exists, einfo = self._epoch_functions
625
- if not einfo_exists(channel): # pragma: no cover
626
- # Not sure if this can happen, if so need to update code.
627
- raise Exception('Missing protocol data')
628
-
629
- # Create list of step lists
630
- levels = []
631
- for e in einfo(channel):
632
- kind = e['type']
633
- if kind not in epoch_types:
634
- raise NotImplementedError('Unknown epoch type: ' + str(kind))
635
- if kind == EPOCH_DISABLED:
636
- continue
637
- elif kind == EPOCH_STEPPED:
638
- levels.append([])
639
- else:
640
- raise NotImplementedError(
641
- 'Unsupported epoch type: ' + epoch_types(kind))
642
-
643
- # Gather steps
644
- levels = tuple(levels)
645
- for i in range(self._sweepsPerRun):
646
- j = 0
647
- for e in einfo(channel):
648
- if e['type'] == EPOCH_STEPPED:
649
- levels[j].append(e['init_level'] + e['level_inc'] * i)
650
- j += 1
651
- return levels
275
+ # Read protocol information and create empty sweep objects
276
+ # Sweeps contain both A/D channels and D/A reconstructions. Some files
277
+ # will have A/D but no (or no supported) D/A. Conversely protocol files
278
+ # will have D/A only. So all in one sweep is easiest.
279
+ self._sweeps = None
652
280
 
653
- def __iter__(self):
654
- """
655
- Returns an iterator over all sweeps
656
- """
657
- return iter(self._sweeps)
281
+ self._read_3_protocol_information()
658
282
 
659
- def __len__(self):
660
- """
661
- Returns the number of sweeps in this file.
662
- """
663
- return len(self._sweeps)
283
+ # Read and calculate conversion factors for integer data in ADC
284
+ self._adc_factors = None
285
+ self._adc_offsets = None
286
+ self._read_4_ad_conversion_factors()
664
287
 
665
- def protocol(self):
666
- """
667
- Returns an interator over the protocol data.
668
- """
669
- return iter(self._protocol)
288
+ # Read the A/D channel data and add it to the sweeps
289
+ if self._n_adc:
290
+ self._read_5_ad_data()
670
291
 
671
- def _read_datetime(self):
672
- """
673
- Reads the date/time this file was recorded
674
- """
675
- # Get date and time
676
- if self._version < 2:
677
- t1 = str(self._header['lFileStartDate'])
678
- t2 = float(self._header['lFileStartTime'])
679
- else:
680
- t1 = str(self._header['uFileStartDate'])
681
- t2 = float(self._header['uFileStartTimeMS']) / 1000
292
+ # Reconstruct D/A signals as additional channels and add to the sweeps
293
+ self._read_6_da_reconstructions()
682
294
 
683
- YY = int(t1[0:4])
684
- MM = int(t1[4:6])
685
- DD = int(t1[6:8])
686
- hh = int(t2 / 3600)
687
- mm = int((t2 - hh * 3600) / 60)
688
- ss = t2 - hh * 3600 - mm * 60
689
- ms = int((ss % 1) * 1e6)
690
- ss = int(ss)
691
-
692
- return datetime.datetime(YY, MM, DD, hh, mm, ss, ms)
295
+ # Copy channel names and units, for easier SweepSource implementation
296
+ self._ad_names = {}
297
+ self._da_names = {}
298
+ self._ad_units = []
299
+ self._da_units = []
300
+ for sweep in self._sweeps:
301
+ for i, channel in enumerate(sweep[:self._n_adc]):
302
+ self._ad_names[channel.name()] = i
303
+ self._ad_units.append(channel.unit())
304
+ for i, channel in enumerate(sweep[self._n_adc:]):
305
+ self._da_names[channel.name()] = i
306
+ self._da_units.append(channel.unit())
307
+ break
693
308
 
694
- def _read_header(self):
695
- """
696
- Reads the file's header.
697
- """
309
+ def _read_1_header(self):
310
+ """ Read the file header. """
698
311
 
699
312
  def read_f(f, form, offset=None):
700
- """
701
- Read and unpack a file section using the given format ``form``.
702
- """
313
+ """ Read and unpack a file section in the format ``form``. """
703
314
  form = str(form)
704
315
  if offset is not None:
705
316
  f.seek(offset)
@@ -719,7 +330,7 @@ class AbfFile:
719
330
  values = [0] * len(val)
720
331
  for i, v in enumerate(val):
721
332
  if isinstance(v, bytes):
722
- v = v.decode(_ENC)
333
+ v = v.decode(_ENC).strip()
723
334
  # Handle long \x00 lists
724
335
  if v and ord(v[0]) == 0:
725
336
  return None
@@ -741,21 +352,21 @@ class AbfFile:
741
352
  elif sig == 'ABF2':
742
353
  version = 2
743
354
  else:
744
- raise NotImplementedError(
745
- 'Unknown ABF Format "' + str(sig) + '".')
355
+ raise NotImplementedError('Unknown ABF Format "{sig}".')
746
356
 
747
357
  # Gather header fields
748
- header = OrderedDict()
749
- for key, offset, form in headerFields[version]:
358
+ header = {}
359
+ for key, offset, form in HEADER_FIELDS[version]:
750
360
  header[key] = ups(read_f(f, form, offset))
751
361
 
752
362
  # Get uniform file version number
753
363
  if version < 2:
754
- self._version = (
755
- np.round(header['fFileVersionNumber'] * 100) / 100)
364
+ self._version = np.round(header['fFileVersionNumber'], 5)
365
+ self._version_str = str(self._version)
756
366
  else:
757
- n = header['fFileVersionNumber']
758
- self._version = n[3] + 0.1 * n[2] + 0.01 * n[1] + 0.001 * n[0]
367
+ v = header['fFileVersionNumber']
368
+ self._version = v[3]
369
+ self._version_str = '.'.join([str(v) for v in reversed(v)])
759
370
 
760
371
  # Get file start time in seconds
761
372
  if version < 2:
@@ -771,21 +382,21 @@ class AbfFile:
771
382
  for i in range(header['lNumTagEntries']): # pragma: no cover
772
383
  # Cover pragma: Don't have appropriate test file
773
384
  f.seek(header['lTagSectionPtr'] + i * 64)
774
- tag = OrderedDict()
775
- for key, form in TagInfoDescription:
385
+ tag = {}
386
+ for key, form in ABF2_TAG_INFO_DESCRIPTION:
776
387
  tag[key] = ups(read_f(f, form))
777
388
  tags.append(tag)
778
389
  header['tags'] = tags
779
- self.strings = []
390
+ self._strings = []
780
391
 
781
392
  else:
782
393
 
783
394
  # Version 2
784
395
  # Find location of file sections
785
- sections = OrderedDict()
786
- for i, s in enumerate(abf2FileSections):
396
+ sections = {}
397
+ for i, s in enumerate(ABF2_FILE_SECTIONS):
787
398
  index, data, length = read_f(f, 'IIl', 76 + i * 16)
788
- sections[s] = OrderedDict()
399
+ sections[s] = {}
789
400
  sections[s]['index'] = index
790
401
  sections[s]['data'] = data
791
402
  sections[s]['length'] = length
@@ -807,8 +418,8 @@ class AbfFile:
807
418
 
808
419
  # C-style string termination
809
420
  strings = strings.split(b'\x00')
810
- strings = [s.decode(_ENC) for s in strings]
811
- self.strings = strings
421
+ strings = [s.decode(_ENC).strip() for s in strings]
422
+ self._strings = strings
812
423
 
813
424
  # Read tag section
814
425
  tags = []
@@ -817,17 +428,17 @@ class AbfFile:
817
428
  for i in range(sections['Tag']['length']): # pragma: no cover
818
429
  # Cover pragma: Don't have appropriate test file
819
430
  f.seek(offs + i * size)
820
- tag = OrderedDict()
821
- for key, form in TagInfoDescription:
431
+ tag = {}
432
+ for key, form in ABF2_TAG_INFO_DESCRIPTION:
822
433
  tag[key] = ups(read_f(f, form))
823
434
  tags.append(tag)
824
435
  header['tags'] = tags
825
436
 
826
437
  # Read protocol section
827
- protocol = OrderedDict()
438
+ protocol = {}
828
439
  offs = sections['Protocol']['index'] * BLOCKSIZE
829
440
  f.seek(offs)
830
- for key, form in protocolFields:
441
+ for key, form in ABF2_PROTOCOL_FIELDS:
831
442
  protocol[key] = ups(read_f(f, form))
832
443
  header['protocol'] = protocol
833
444
 
@@ -836,9 +447,9 @@ class AbfFile:
836
447
  offs = sections['ADC']['index'] * BLOCKSIZE
837
448
  size = sections['ADC']['data']
838
449
  for i in range(sections['ADC']['length']):
839
- ADC = OrderedDict()
450
+ ADC = {}
840
451
  f.seek(offs + i * size)
841
- for key, form in ADCFields:
452
+ for key, form in ABF2_ADC_FIELDS:
842
453
  ADC[key] = ups(read_f(f, form))
843
454
  # Get channel name and unit
844
455
  ADC['ADCChNames'] = (
@@ -853,8 +464,8 @@ class AbfFile:
853
464
  size = sections['DAC']['data']
854
465
  for i in range(sections['DAC']['length']):
855
466
  f.seek(offs + size * i)
856
- DAC = OrderedDict()
857
- for key, form in DACFields:
467
+ DAC = {}
468
+ for key, form in ABF2_DAC_FIELDS:
858
469
  DAC[key] = ups(read_f(f, form))
859
470
  DAC['sDACChannelName'] = \
860
471
  strings[DAC['lDACChannelNameIndex'] - 1]
@@ -864,81 +475,141 @@ class AbfFile:
864
475
  header['listDACInfo'] = dac
865
476
 
866
477
  # Read UserList section
867
- userlists = []
478
+ user_lists = []
479
+ offs = sections['UserList']['index'] * BLOCKSIZE
480
+ size = sections['UserList']['data']
868
481
  r = range(sections['UserList']['length'])
869
482
  for i in r: # pragma: no cover
870
483
  # Cover pragma: User lists are not supported
871
484
  f.seek(offs + size * i)
872
- UserList = OrderedDict()
873
- for key, form in UserListFields:
874
- UserList[key] = ups(read_f(f, form))
875
- userlists.append(DAC)
876
- header['listUserListInfo'] = userlists
485
+ user_list = {}
486
+ for key, form in ABF2_USER_LIST_FIELDS:
487
+ user_list[key] = ups(read_f(f, form))
488
+ user_lists.append(user_list)
489
+ header['listUserListInfo'] = user_lists
877
490
 
878
491
  # Read epoch-per-DAC section
879
492
  # The resulting OrderedDict has the following structure:
880
493
  # - the first index is the DAC number
881
494
  # - the second index is the epoch number
882
- header['epochInfoPerDAC'] = OrderedDict()
495
+ header['epochInfoPerDAC'] = {}
883
496
  offs = sections['EpochPerDAC']['index'] * BLOCKSIZE
884
497
  size = sections['EpochPerDAC']['data']
885
- info = OrderedDict()
498
+ info = {}
886
499
  for i in range(sections['EpochPerDAC']['length']):
887
500
  f.seek(offs + size * i)
888
- einf = OrderedDict()
889
- for key, form in EpochInfoPerDACFields:
501
+ einf = {}
502
+ for key, form in ABF2_EPOCH_INFO_PER_DAC_FIELD:
890
503
  einf[key] = ups(read_f(f, form))
891
504
  DACNum = einf['nDACNum']
892
505
  EpochNum = einf['nEpochNum']
893
506
  if DACNum not in info:
894
- info[DACNum] = OrderedDict()
507
+ info[DACNum] = {}
895
508
  info[DACNum][EpochNum] = einf
896
509
  header['epochInfoPerDAC'] = info
897
510
 
898
- return header
511
+ self._header = header
899
512
 
900
- def _read_protocol(self):
901
- """
902
- Reads the protocol stored in the ABF file and converts it to an analog
903
- signal.
513
+ def _read_2_time_of_recording(self):
514
+ """ Read and process the time when this file was recorded. """
515
+
516
+ if self._version < 2:
517
+ t1 = str(self._header['lFileStartDate'])
518
+ t2 = float(self._header['lFileStartTime'])
519
+ else:
520
+ t1 = str(self._header['uFileStartDate'])
521
+ t2 = float(self._header['uFileStartTimeMS']) / 1000
904
522
 
905
- Only works for episodic stimulation, without any user lists.
523
+ YY = int(t1[0:4])
524
+ MM = int(t1[4:6])
525
+ DD = int(t1[6:8])
526
+ hh = int(t2 / 3600)
527
+ mm = int((t2 - hh * 3600) / 60)
528
+ ss = t2 - hh * 3600 - mm * 60
529
+ ms = int((ss % 1) * 1e6)
530
+ ss = int(ss)
906
531
 
907
- The resulting analog signal has the same size as the recorded signals,
908
- so not the full length of the protocol! This is different from the
909
- values returned by the Myokit
532
+ self._datetime = datetime.datetime(YY, MM, DD, hh, mm, ss, ms)
533
+
534
+ def _read_3_protocol_information(self):
535
+ """
536
+ Reads the header fields detailing the number of runs, sweeps, and the
537
+ type of protocol used. Create empty sweeps.
910
538
  """
539
+ h = self._header
540
+
541
+ # Number of channels, sampling rate (Hz) and acquisition mode
542
+ # Note: Number of A/D channels will be set to 0 if this is a
543
+ # protocol-only file
544
+ # Note: Number of D/A channels will be adjusted after checking support
545
+ if self._version < 2:
546
+ # In (newer versions of) version 1.x, only 2 D/A channels have
547
+ # full "waveform" support. There are still 4 D/A channels but I
548
+ # don't understand what the other 2 do.
549
+ # 1.x versions only seem to have 1 DAC channel, but this is not
550
+ # supported here.
551
+ self._n_adc = int(h['nADCNumChannels'])
552
+ self._n_dac = min(len(h['sDACChannelName']), 2)
553
+ self._rate = 1e6 / (h['fADCSampleInterval'] * self._n_adc)
554
+ self._mode = h['nOperationMode']
555
+ else:
556
+ # In version 2, there are up to 8 "waveform" D/A channels
557
+
558
+ self._n_adc = int(h['sections']['ADC']['length'])
559
+ self._n_dac = int(h['sections']['DAC']['length'])
560
+ self._rate = 1e6 / h['protocol']['fADCSequenceInterval']
561
+ self._mode = h['protocol']['nOperationMode']
562
+
563
+ if self._mode not in acquisition_modes: # pragma: no cover
564
+ raise NotImplementedError(f'Unknown acquisition mode: {mode}')
565
+
566
+ # Protocol files don't have A/D channels by definition
567
+ if self._is_protocol_file:
568
+ self._n_adc = 0
569
+
911
570
  # Only episodic stimulation is supported.
912
571
  if self._mode != ACMODE_EPISODIC_STIMULATION: # pragma: no cover
913
- return []
572
+ warnings.warn(
573
+ 'Unsupported acquisition method '
574
+ + acquisition_modes[self._mode] + '; unable to read D/A'
575
+ ' channels.')
914
576
 
915
- # Start reading
916
- h = self._header
577
+ # Remaining code is all about reading D/A info for episodic
578
+ # stimulation, so return
579
+ self._n_dac = 0
580
+ return
917
581
 
918
- # Step 1: Gather information about the protocol
582
+ # Gather protocol information
919
583
  if self._version < 2:
920
584
 
921
585
  # Before version 2: Sections are fixed length, locations absolute
922
- self._numberOfTrials = h['lNumberOfTrials']
923
- self._trialStartToStart = h['fTrialStartToStart']
924
- self._runsPerTrial = h['lRunsPerTrial']
925
- self._runStartToStart = h['fRunStartToStart']
926
- self._sweepsPerRun = h['lSweepsPerRun']
927
- self._sweepStartToStart = h['fEpisodeStartToStart']
586
+ self._number_of_trials = h['lNumberOfTrials']
587
+ self._trial_start_to_start = h['fTrialStartToStart']
588
+ self._runs_per_trial = h['lRunsPerTrial']
589
+ self._run_start_to_start = h['fRunStartToStart']
590
+ self._sweeps_per_run = h['lSweepsPerRun']
591
+ self._sweep_start_to_start = h['fEpisodeStartToStart']
928
592
 
929
593
  # Number of samples in a channel for each sweep
930
594
  # (Only works for fixed-length, high-speed-osc or episodic)
931
- nSam = h['lNumSamplesPerEpisode'] // h['nADCNumChannels']
595
+ self._samples_per_channel = \
596
+ h['lNumSamplesPerEpisode'] // h['nADCNumChannels']
932
597
 
933
598
  def dinfo(index, name):
599
+ """ Return DAC channel info, ABF1 version. """
934
600
  return h[name][index]
935
601
 
936
602
  def einfo_exists(index):
937
- return True
603
+ """ Check that epoch info exists for a DAC, ABF1 version. """
604
+ # Fields always exist for 2 channels, not always set.
605
+ # But not useful to look at unset ones, so using n_dac instead
606
+ # of hardcoded 2!
607
+ return 0 <= index < self._n_dac
938
608
 
939
609
  def einfo(index):
610
+ """ Return epoch info for a DAC, ABF1 version. """
940
611
  lo = index * 8
941
- hi = index + 8
612
+ hi = lo + 8
942
613
  for i in range(lo, hi):
943
614
  yield {
944
615
  'type': h['nEpochType'][i],
@@ -956,24 +627,28 @@ class AbfFile:
956
627
 
957
628
  # Trials, runs, sweeps
958
629
  # (According to the manual, there should only be 1 trial!)
959
- self._numberOfTrials = p['lNumberOfTrials']
960
- self._trialStartToStart = p['fTrialStartToStart']
961
- self._runsPerTrial = p['lRunsPerTrial']
962
- self._runStartToStart = p['fRunStartToStart']
963
- self._sweepsPerRun = p['lSweepsPerRun']
964
- self._sweepStartToStart = p['fSweepStartToStart']
630
+ self._number_of_trials = p['lNumberOfTrials']
631
+ self._trial_start_to_start = p['fTrialStartToStart']
632
+ self._runs_per_trial = p['lRunsPerTrial']
633
+ self._run_start_to_start = p['fRunStartToStart']
634
+ self._sweeps_per_run = p['lSweepsPerRun']
635
+ self._sweep_start_to_start = p['fSweepStartToStart']
965
636
 
966
637
  # Number of samples in a channel in a single sweep
967
- nSam = p['lNumSamplesPerEpisode'] // h['sections']['ADC']['length']
638
+ self._samples_per_channel = \
639
+ p['lNumSamplesPerEpisode'] // h['sections']['ADC']['length']
968
640
 
969
641
  # Compatibility functions
970
642
  def dinfo(index, name):
643
+ """ Return DAC info, ABF2 version. """
971
644
  return h['listDACInfo'][index][name]
972
645
 
973
646
  def einfo_exists(index):
647
+ """ Check that epoch info exists for a DAC, ABF2 version. """
974
648
  return index in h['epochInfoPerDAC']
975
649
 
976
650
  def einfo(index):
651
+ """ Return epoch info for a DAC, ABF2 version. """
977
652
  for e in h['epochInfoPerDAC'][index].values():
978
653
  yield {
979
654
  'type': e['nEpochType'],
@@ -985,190 +660,245 @@ class AbfFile:
985
660
  self._epoch_functions = (dinfo, einfo_exists, einfo)
986
661
 
987
662
  # If sweepStartToStart == 0, we set it to the duration of a sweep
988
- if self._sweepStartToStart == 0: # pragma: no cover
989
- self._sweepStartToStart = nSam / self._rate
663
+ if self._sweep_start_to_start == 0: # pragma: no cover
664
+ self._sweep_start_to_start = self._samples_per_channel / self._rate
990
665
 
991
- # Step 2: Generate analog signals corresponding to the waveforms
992
- # suggested by the 'epochs' in the protocol
993
- # User lists are not supported
666
+ # Create empty sweeps
667
+ n = h['lActualSweeps']
668
+ if self._is_protocol_file:
669
+ n = self._sweeps_per_run
670
+ self._sweeps = [Sweep() for i in range(n)]
671
+
672
+ # User lists are not supported for D/A reconstruction
673
+ # I haven't been able to figure out how you see if a user list is
674
+ # being used, or which channel is using it. There is an 'enable' field
675
+ # but that's been 0 in files that definitely used a UserList...
676
+ # So for now not reading ANY DAC if a userlist even exists.
677
+ user_lists = False
994
678
  if self._version < 2: # pragma: no cover
995
- if any(self._header['nULEnable']):
996
- return []
997
- else: # pragma: no cover
998
- for userlist in self._header['listUserListInfo']:
999
- if 'nULEnable' in userlist and userlist['nULEnable']:
1000
- return []
1001
- if 'nConditEnable' in userlist and userlist['nConditEnable']:
1002
- return []
1003
- sweeps = []
1004
-
1005
- # Number of DAC channels = number of channels that can be used
1006
- # to output a stimulation
1007
- nDac = self.protocol_channels()
1008
- start = 0
1009
- for iSweep in range(h['lActualSweeps']):
1010
- sweep = Sweep(nDac)
1011
-
1012
- # Create channels for this sweep
1013
- for iDac in range(nDac):
1014
- c = Channel(self)
1015
- c._name = dinfo(iDac, 'sDACChannelName').strip()
1016
- c._unit = dinfo(iDac, 'sDACChannelUnits').strip()
1017
- if self._version < 2:
1018
- c._numb = iDac
679
+ user_lists = any(self._header['nULEnable'])
680
+ else: # pragma: no cover
681
+ user_lists = len(self._header['listUserListInfo']) > 0
682
+ if user_lists: # pragma: no cover
683
+ warnings.warn(
684
+ 'Unsupported acquisition method: episodic with user lists;'
685
+ ' unable to read D/A channels.')
686
+ self._n_dac = 0
687
+ return
688
+
689
+ # Get indices of enabled and supported DAC reconstructions
690
+ supported = {EPOCH_DISABLED, EPOCH_STEPPED}
691
+ for i_dac in range(self._n_dac):
692
+ if einfo_exists(i_dac):
693
+ i = einfo(i_dac)
694
+ use = False
695
+
696
+ # Check for unsupported features (or disabled waveforms/epochs)
697
+ # Version 1 files can only have two waveform channels
698
+ if self._version < 2 and i_dac > 1: # pragma: no cover
699
+ source = DAC_DISABLED
1019
700
  else:
1020
- c._numb = int(dinfo(iDac, 'lDACChannelNameIndex'))
1021
- c._data = np.ones(nSam) * dinfo(iDac, 'fDACHoldingLevel')
1022
- c._rate = self._rate
1023
- c._start = start
1024
- sweep[iDac] = c
701
+ source = dinfo(i_dac, 'nWaveformSource')
702
+ if source == DAC_EPOCHTABLEWAVEFORM:
703
+ # Any epoch types besides disabled/stepped? Then don't use
704
+ # Also don't use if exclusively disabled
705
+ for e in i:
706
+ t = e['type']
707
+ if t == EPOCH_STEPPED:
708
+ use = True
709
+ elif t != EPOCH_DISABLED: # pragma: no cover
710
+ use = False
711
+ warnings.warn(
712
+ f'Unsupported epoch type: {epoch_types(t)}')
713
+ break
714
+ elif source == DAC_DACFILEWAVEFORM: # pragma: no cover
715
+ # Stimulus file? Then don't use
716
+ warnings.warn('Stimulus file D/A channel not supported.')
1025
717
 
1026
- # No stimulation info for this channel? Then continue
1027
- if not einfo_exists(iDac):
1028
- continue
718
+ if use:
719
+ self._dac_indices.append(i_dac)
1029
720
 
1030
- # Save last sample index
1031
- i_last = int(nSam * 15625 / 1e6) # TODO: What's this?
721
+ # Set true number of D/A outputs
722
+ self._n_dac = len(self._dac_indices)
1032
723
 
1033
- # For each 'epoch' in the stimulation signal
1034
- for e in einfo(iDac):
1035
- kind = e['type']
1036
- if kind not in epoch_types:
1037
- raise NotImplementedError(
1038
- 'Unknown epoch type: ' + str(kind))
724
+ def _read_4_ad_conversion_factors(self):
725
+ """ Calculate the factors to convert any integer data to float. """
726
+ self._adc_factors = []
727
+ self._adc_offsets = []
728
+ h = self._header
729
+ if self._version < 2:
730
+ for i in range(self._n_adc):
731
+ j = h['nADCSamplingSeq'][i]
732
+
733
+ # Multiplier
734
+ f = (
735
+ h['fInstrumentScaleFactor'][j]
736
+ * h['fADCProgrammableGain'][j]
737
+ * h['lADCResolution']
738
+ / h['fADCRange'])
739
+
740
+ # Signal conditioner used?
741
+ if h['nSignalType'] != 0: # pragma: no cover
742
+ # Cover pragma: Don't have appropriate test file
743
+ f *= h['fSignalGain'][j]
1039
744
 
1040
- if kind == EPOCH_DISABLED:
745
+ # Additional gain?
746
+ if h['nTelegraphEnable'][j]:
747
+ f *= h['fTelegraphAdditGain'][j]
1041
748
 
1042
- continue
749
+ # Set final gain factor
750
+ self._adc_factors.append(1 / f)
1043
751
 
1044
- elif kind == EPOCH_STEPPED:
752
+ # Shift
753
+ s = h['fInstrumentOffset'][j]
1045
754
 
1046
- dur = e['init_duration']
1047
- inc = e['duration_inc']
1048
- i1 = i_last
1049
- i2 = i_last + dur + iSweep * inc
1050
- if i2 > nSam:
1051
- # The protocol may extend beyond the number of
1052
- # samples in the recording
1053
- i2 = nSam
1054
- level = e['init_level'] + e['level_inc'] * iSweep
1055
- c._data[i1:i2] = level * np.ones(len(range(i2 - i1)))
1056
- i_last += dur
1057
- if i_last > nSam:
1058
- # The protocol may extend beyond the number of
1059
- # samples in the recording
1060
- break
755
+ # Signal conditioner used?
756
+ if h['nSignalType'] != 0: # pragma: no cover
757
+ # Cover pragma: Don't have appropriate test file
758
+ s -= h['fSignalOffset'][j]
1061
759
 
1062
- else: # pragma: no cover
760
+ # Set final offset
761
+ self._adc_offsets.append(s)
1063
762
 
1064
- log = logging.getLogger(__name__)
1065
- log.warning(
1066
- 'Unsupported epoch type: ' + epoch_types(kind))
1067
- continue
763
+ else:
1068
764
 
1069
- sweeps.append(sweep)
1070
- start += self._sweepStartToStart
1071
- return sweeps
765
+ a = h['listADCInfo']
766
+ p = h['protocol']
767
+ for i in range(self._n_adc):
768
+ # Multiplier
769
+ f = (
770
+ a[i]['fInstrumentScaleFactor']
771
+ * a[i]['fADCProgrammableGain']
772
+ * p['lADCResolution']
773
+ / p['fADCRange'])
1072
774
 
1073
- def _read_sweeps(self):
1074
- """
1075
- Reads the data from an ABF file and returns a list of sweeps
1076
- """
1077
- header = self._header
1078
- version = self._version
1079
- nc = self._nc
775
+ # Signal conditioner used?
776
+ if h.get('nSignalType', 0) != 0: # pragma: no cover
777
+ # Cover pragma: Don't have appropriate test file
778
+ f *= a[i]['fSignalGain']
779
+
780
+ # Additional gain?
781
+ if a[i]['nTelegraphEnable']:
782
+ f *= a[i]['fTelegraphAdditGain']
783
+
784
+ # Set final gain factor
785
+ self._adc_factors.append(1 / f)
786
+
787
+ # Shift
788
+ s = a[i]['fInstrumentOffset']
789
+
790
+ # Signal conditioner used?
791
+ if h.get('nSignalType', 0) != 0: # pragma: no cover
792
+ # Cover pragma: Don't have appropriate test file
793
+ s -= a[i]['fSignalOffset']
794
+
795
+ # Set final offset
796
+ self._adc_offsets.append(s)
797
+
798
+ def _read_5_ad_data(self):
799
+ """ Reads the A/D data and appends it to the list of sweeps. """
800
+
801
+ h = self._header
1080
802
 
1081
803
  # Sampling rate is constant for all sweeps and channels
1082
804
  # TODO: This won't work for 2-rate protocols
1083
805
  rate = self._rate
1084
806
 
1085
807
  # Get binary integer format
1086
- dt = np.dtype('i2') if header['nDataFormat'] == 0 else np.dtype('f4')
808
+ dt = np.dtype('i2') if h['nDataFormat'] == 0 else np.dtype('f4')
1087
809
 
1088
810
  # Get number of channels, create a numpy memory map
1089
- if version < 2:
811
+ if self._version < 2:
1090
812
  # Old files, get info from fields stored directly in header
1091
- o = header['lDataSectionPtr'] * BLOCKSIZE \
1092
- + header['nNumPointsIgnored'] * dt.itemsize
1093
- n = header['lActualAcqLength']
813
+ o = h['lDataSectionPtr'] * BLOCKSIZE \
814
+ + h['nNumPointsIgnored'] * dt.itemsize
815
+ n = h['lActualAcqLength']
1094
816
  else:
1095
817
  # New files, get info from appropriate header section
1096
- o = header['sections']['Data']['index'] * BLOCKSIZE
1097
- n = header['sections']['Data']['length']
818
+ o = h['sections']['Data']['index'] * BLOCKSIZE
819
+ n = h['sections']['Data']['length']
1098
820
  data = np.memmap(self._filepath, dt, 'r', shape=(n,), offset=o)
1099
821
 
1100
822
  # Load list of sweeps (Sweeps are called 'episodes' in ABF < 2)
1101
- if version < 2:
1102
- n = header['lSynchArraySize']
1103
- o = header['lSynchArrayPtr'] * BLOCKSIZE
823
+ if self._version < 2:
824
+ n = h['lSynchArraySize']
825
+ o = h['lSynchArrayPtr'] * BLOCKSIZE
1104
826
  else:
1105
- n = header['sections']['SynchArray']['length']
1106
- o = header['sections']['SynchArray']['index'] * BLOCKSIZE
827
+ n = h['sections']['SynchArray']['length']
828
+ o = h['sections']['SynchArray']['index'] * BLOCKSIZE
1107
829
  if n > 0:
1108
830
  dt = [(str('offset'), str('i4')), (str('len'), str('i4'))]
1109
- sdata = np.memmap(self._filepath, dt, 'r', shape=(n), offset=o)
831
+ sweep_data = np.memmap(
832
+ self._filepath, dt, 'r', shape=(n,), offset=o)
1110
833
  else: # pragma: no cover
1111
834
  # Cover pragma: Don't have appropriate test file
1112
- sdata = np.empty((1), dt)
1113
- sdata[0]['len'] = data.size
1114
- sdata[0]['offset'] = 0
835
+ sweep_data = np.empty((1), dt)
836
+ sweep_data[0]['len'] = data.size
837
+ sweep_data[0]['offset'] = 0
1115
838
 
1116
- # Get data
1117
- pos = 0
1118
-
1119
- # Data structure
1120
- sweeps = []
839
+ # Number of sweeps must equal n
840
+ if n != h['lActualSweeps']:
841
+ raise NotImplementedError(
842
+ 'Unable to read file with different sizes per sweep.')
1121
843
 
1122
- # Time-offset at start of sweep
1123
- start = sdata[0]['offset'] / rate
1124
- for j in range(sdata.size):
844
+ # Time-offset at start of first sweep
845
+ start = sweep_data[0]['offset'] / rate
1125
846
 
1126
- # Create a new sweep
1127
- sweep = Sweep(nc)
847
+ # Get data
848
+ pos = 0
849
+ for i_sweep, sdat in enumerate(sweep_data):
1128
850
 
1129
851
  # Get the number of data points
1130
- size = sdata[j]['len']
852
+ size = sdat['len']
1131
853
 
1132
854
  # Calculate the correct size for variable-length event mode
1133
855
  if self._mode == ACMODE_VARIABLE_LENGTH_EVENTS: # pragma: no cover
1134
856
  # Cover pragma: Only episodic stimulus is supported.
1135
- if version < 2:
1136
- f = float(header['fSynchTimeUnit'])
857
+ if self._version < 2:
858
+ f = float(h['fSynchTimeUnit'])
1137
859
  else:
1138
- f = float(header['protocol']['fSynchTimeUnit'])
860
+ f = float(h['protocol']['fSynchTimeUnit'])
1139
861
  if f != 0:
1140
862
  size /= f
1141
863
 
1142
864
  # Get a memory map to the relevant part of the data
1143
865
  part = data[pos: pos + size]
1144
866
  pos += size
1145
- part = part.reshape((part.size // nc, nc)).astype('f')
867
+ part = part.reshape(
868
+ (part.size // self._n_adc, self._n_adc)).astype('f')
1146
869
 
1147
870
  # If needed, reformat the integers
1148
- if header['nDataFormat'] == 0:
871
+ if h['nDataFormat'] == 0:
1149
872
  # Data given as integers? Convert to floating point
1150
- for i in range(nc):
1151
- factor, offset = self._get_conversion_factors(i)
1152
- part[:, i] *= factor
1153
- part[:, i] += offset
1154
873
 
1155
- # Create channel
874
+ for i in range(self._n_adc):
875
+ part[:, i] *= self._adc_factors[i]
876
+ part[:, i] += self._adc_offsets[i]
877
+
878
+ # Get start in other modes
1156
879
  if self._mode != ACMODE_EPISODIC_STIMULATION: # pragma: no cover
1157
880
  # All modes except episodic stimulation
1158
- start = sdata[j]['offset'] / rate
881
+ start = data['offset'] / rate
1159
882
 
1160
- for i in range(nc):
883
+ # Create and populate sweep
884
+ sweep = self._sweeps[i_sweep]
885
+ for i in range(self._n_adc):
1161
886
  c = Channel(self)
1162
- c._data = part[:, i]
1163
- if version < 2:
1164
- c._name = str(header['sADCChannelName'][i])
1165
- c._unit = str(header['sADCUnits'][i])
1166
- c._numb = int(header['nADCPtoLChannelMap'][i])
887
+ c._data = part[:, i] # Actually store the data
888
+ c._rate = rate
889
+ c._start = start
890
+
891
+ if self._version < 2:
892
+ j = h['nADCSamplingSeq'][i]
893
+
894
+ c._name = h['sADCChannelName'][j]
895
+ c._index = int(h['nADCPtoLChannelMap'][j])
896
+ c._unit = self._unit(h['sADCUnits'][j])
1167
897
 
1168
898
  # Get telegraphed info
1169
899
  def get(field):
1170
900
  try:
1171
- return float(header[field][i])
901
+ return float(h[field][j])
1172
902
  except KeyError:
1173
903
  return None
1174
904
 
@@ -1179,7 +909,7 @@ class AbfFile:
1179
909
  c._lopass = get('fTelegraphFilter')
1180
910
 
1181
911
  # Updated low-pass cutoff
1182
- if header['nSignalType'] != 0: # pragma: no cover
912
+ if h['nSignalType'] != 0: # pragma: no cover
1183
913
  # Cover pragma: Don't have appropriate test file
1184
914
  # If a signal conditioner is used, the cutoff frequency
1185
915
  # is an undescribed "complex function" of both low-pass
@@ -1187,135 +917,446 @@ class AbfFile:
1187
917
  c._lopass = None
1188
918
 
1189
919
  else:
1190
-
1191
- c._name = str(header['listADCInfo'][i]['ADCChNames'])
1192
- c._unit = str(header['listADCInfo'][i]['ADCChUnits'])
1193
- c._numb = int(header['listADCInfo'][i]['nADCNum'])
920
+ c._name = h['listADCInfo'][i]['ADCChNames']
921
+ c._index = int(h['listADCInfo'][i]['nADCNum'])
922
+ c._unit = self._unit(h['listADCInfo'][i]['ADCChUnits'])
1194
923
 
1195
924
  # Get telegraphed info
1196
- if header['listADCInfo'][i]['nTelegraphEnable']:
1197
- c._type = int(
1198
- header['listADCInfo'][i]['nTelegraphMode'])
925
+ if h['listADCInfo'][i]['nTelegraphEnable']:
926
+ c._type = int(h['listADCInfo'][i]['nTelegraphMode'])
1199
927
  c._cm = float(
1200
- header['listADCInfo'][i]['fTelegraphMembraneCap'])
928
+ h['listADCInfo'][i]['fTelegraphMembraneCap'])
1201
929
  c._rs = float(
1202
- header['listADCInfo'][i][
1203
- 'fTelegraphAccessResistance'])
930
+ h['listADCInfo'][i]['fTelegraphAccessResistance'])
1204
931
  c._lopass = float(
1205
- header['listADCInfo'][i]['fTelegraphFilter'])
932
+ h['listADCInfo'][i]['fTelegraphFilter'])
1206
933
 
1207
934
  # Updated low-pass cutoff
1208
- if 'nSignalType' in header['protocol']: # pragma: no cover
935
+ if 'nSignalType' in h['protocol']: # pragma: no cover
1209
936
  # Cover pragma: Don't have appropriate test file
1210
- if header['protocol']['nSignalType'] != 0:
937
+ if h['protocol']['nSignalType'] != 0:
1211
938
  # If a signal conditioner is used, the cutoff
1212
939
  # frequency is an undescribed "complex function" of
1213
940
  # both low-pass settings...
1214
941
  c._lopass = None
1215
- c._rate = rate
1216
- c._start = start
1217
- sweep[i] = c
942
+
943
+ sweep._channels.append(c)
1218
944
 
1219
945
  if self._mode == ACMODE_EPISODIC_STIMULATION:
1220
946
  # Increase time according to sweeps in episodic stim. mode
1221
- start += self._sweepStartToStart
947
+ start += self._sweep_start_to_start
948
+
949
+ def _read_6_da_reconstructions(self):
950
+ """
951
+ Convert supported D/A waveforms to channels.
1222
952
 
1223
- # Store sweep
1224
- sweeps.append(sweep)
953
+ Only works for episodic stimulation, with step protocols and no
954
+ user lists.
1225
955
 
1226
- return sweeps
956
+ The resulting analog signal has the same size as the recorded
957
+ signals, so not always the full length of the protocol!
1227
958
 
1228
- def _set_conversion_factors(self):
1229
959
  """
1230
- Calculates the conversion factors to convert integer data from the ABF
1231
- file to floats.
960
+ dinfo, einfo_exists, einfo = self._epoch_functions
961
+
962
+ ns = self._samples_per_channel
963
+ start = 0
964
+ for i_sweep, sweep in enumerate(self._sweeps):
965
+ for i_dac in self._dac_indices:
966
+
967
+ # Create a channel
968
+ c = Channel(self)
969
+ c._name = dinfo(i_dac, 'sDACChannelName')
970
+ if self._version < 2:
971
+ c._index = i_dac
972
+ else:
973
+ c._index = int(dinfo(i_dac, 'lDACChannelNameIndex'))
974
+ c._data = np.ones(ns) * dinfo(i_dac, 'fDACHoldingLevel')
975
+ c._rate = self._rate
976
+ c._start = start
977
+ c._unit = self._unit(dinfo(i_dac, 'sDACChannelUnits'))
978
+
979
+ # Find start of first epoch. This is defined as being at t=0
980
+ # but axon likes to add some samples before the first and after
981
+ # the last epoch. We can find out the number of samples using
982
+ # a procedure found in ABF v1's _GetHoldingLength()
983
+ if self._is_protocol_file:
984
+ i2 = 0
985
+ else:
986
+ i2 = ns // 64 # ABFH_HOLDINGFRACTION = 64
987
+ i2 -= i2 % self._n_adc
988
+ if (i2 < self._n_adc): # pragma: no cover
989
+ i2 = self._n_adc
990
+
991
+ # For each 'epoch' in the stimulation signal
992
+ for e in einfo(i_dac):
993
+ if e['type'] == EPOCH_STEPPED:
994
+ dur = e['init_duration']
995
+ inc = e['duration_inc']
996
+ i1 = i2
997
+ i2 += dur + i_sweep * inc
998
+ level = e['init_level'] + e['level_inc'] * i_sweep
999
+ c._data[i1:i2] = level * np.ones(len(range(i2 - i1)))
1000
+
1001
+ # Store channel
1002
+ sweep._channels.append(c)
1003
+
1004
+ # Update start for next sweep
1005
+ start += self._sweep_start_to_start
1006
+
1007
+ def __getitem__(self, key):
1008
+ return self._sweeps[key]
1009
+
1010
+ def __iter__(self):
1011
+ return iter(self._sweeps)
1012
+
1013
+ def __len__(self):
1014
+ return len(self._sweeps)
1015
+
1016
+ def _channel_id(self, channel_id):
1017
+ """ Checks an int or str channel id and returns a valid int. """
1018
+ if len(self._sweeps) == 0: # pragma: no cover
1019
+ raise KeyError(f'Channel {channel_id} not found (empty file).')
1020
+
1021
+ # Handle string
1022
+ if isinstance(channel_id, str):
1023
+ int_id = self._ad_names[channel_id] # Bubble KeyError to user
1024
+ else:
1025
+ int_id = int(channel_id) # Propagate TypeError
1026
+ if int_id < 0 or int_id >= self._n_adc:
1027
+ raise IndexError(f'channel_id out of range: {channel_id}')
1028
+
1029
+ return int_id
1030
+
1031
+ def channel(self, channel_id, join_sweeps=False):
1032
+ # Docstring in SweepSource
1033
+ channel_id = self._channel_id(channel_id)
1034
+ time, data = [], []
1035
+ for i, sweep in enumerate(self._sweeps):
1036
+ time.append(sweep[channel_id].times())
1037
+ data.append(sweep[channel_id].values())
1038
+ if join_sweeps:
1039
+ return (np.concatenate(time), np.concatenate(data))
1040
+ return time, data
1041
+
1042
+ def channel_count(self):
1043
+ # Docstring in SweepSource
1044
+ return self._n_adc
1045
+
1046
+ def channel_names(self, index=None):
1047
+ # Docstring in SweepSource
1048
+ if index is None:
1049
+ return list(self._ad_names.keys())
1050
+ return list(self._ad_names.keys())[index]
1051
+
1052
+ def channel_units(self, index=None):
1053
+ # Docstring in SweepSource
1054
+ if index is None:
1055
+ return list(self._ad_units)
1056
+ return self._ad_units[index]
1057
+
1058
+ def _da_id(self, output_id):
1232
1059
  """
1233
- self._adc_factors = []
1234
- self._adc_offsets = []
1235
- h = self._header
1236
- if self._version < 2:
1237
- for i in range(self._nc):
1238
- # Multiplier
1239
- f = (
1240
- h['fInstrumentScaleFactor'][i]
1241
- * h['fADCProgrammableGain'][i]
1242
- * h['lADCResolution']
1243
- / h['fADCRange'])
1060
+ Checks an int or str D/A channel id and returns a valid int.
1244
1061
 
1245
- # Signal conditioner used?
1246
- if h['nSignalType'] != 0: # pragma: no cover
1247
- # Cover pragma: Don't have appropriate test file
1248
- f *= h['fSignalGain'][i]
1062
+ Note: The integer here is from 0 to da_count(), so not equal to the
1063
+ channel :meth:`index()` shown in pclamp.
1064
+ """
1065
+ if len(self._sweeps) == 0: # pragma: no cover
1066
+ raise KeyError(f'D/A output {output_id} not found (empty file).')
1249
1067
 
1250
- # Additional gain?
1251
- if h['nTelegraphEnable'][i]:
1252
- f *= h['fTelegraphAdditGain'][i]
1068
+ # Handle string
1069
+ if isinstance(output_id, str):
1070
+ int_id = self._da_names[output_id] # Propagate KeyError to user
1071
+ else:
1072
+ int_id = int(output_id) # Propagate TypeError
1073
+ if int_id < 0 or int_id >= self._n_dac:
1074
+ raise IndexError(f'output_id out of range: {output_id}')
1075
+
1076
+ return int_id
1077
+
1078
+ def da(self, output_id, join_sweeps=False):
1079
+ # Docstring in SweepSource
1080
+ channel_id = self._n_adc + self._da_id(output_id)
1081
+ time, data = [], []
1082
+ for i, sweep in enumerate(self._sweeps):
1083
+ time.append(sweep[channel_id].times())
1084
+ data.append(sweep[channel_id].values())
1085
+ if join_sweeps:
1086
+ return (np.concatenate(time), np.concatenate(data))
1087
+ return time, data
1253
1088
 
1254
- # Set fina gain factor
1255
- self._adc_factors.append(1.0 / f)
1089
+ def da_count(self):
1090
+ # Docstring in SweepSource
1091
+ return self._n_dac
1256
1092
 
1257
- # Shift
1258
- s = h['fInstrumentOffset'][i]
1093
+ def da_names(self, index=None):
1094
+ # Docstring in SweepSource
1095
+ if index is None:
1096
+ return list(self._da_names.keys())
1097
+ return list(self._da_names.keys())[index]
1259
1098
 
1260
- # Signal conditioner used?
1261
- if h['nSignalType'] != 0: # pragma: no cover
1262
- # Cover pragma: Don't have appropriate test file
1263
- s -= h['fSignalOffset'][i]
1099
+ def da_protocol(self, output_id=None, tu='ms', vu='mV', cu='pA',
1100
+ n_digits=9, include_initial_holding=False):
1101
+ """
1102
+ See :meth:`myokit.formats.SweepSource.da_protocol()`.
1264
1103
 
1265
- # Set final offset
1266
- self._adc_offsets.append(s)
1104
+ This implementation adds a keyword argument ``include_initial_holding``
1105
+ that lets you switch between the declared protocol (``False``) and the
1106
+ protocol as actually implemented (``True``). In the latter case, a
1107
+ short holding time is added before the first epoch in every sweep.
1108
+ """
1109
+
1110
+ # Check the output id. This also raises an error if no supported D/A
1111
+ # channels are present.
1112
+ output_id = self._da_id(output_id or 0)
1113
+
1114
+ # Get the index in dinfo
1115
+ i_dac = self._dac_indices[output_id]
1116
+ dinfo, einfo_exists, einfo = self._epoch_functions
1267
1117
 
1118
+ # Get the time and data conversion factors
1119
+ units = myokit.units
1120
+ tf = myokit.Unit.conversion_factor(units.s, tu)
1121
+ if myokit.Unit.can_convert(self._da_units[output_id], units.V):
1122
+ df = myokit.Unit.conversion_factor(self._da_units[output_id], vu)
1123
+ elif myokit.Unit.can_convert(
1124
+ self._da_units[output_id], units.A): # pragma: no cover
1125
+ df = myokit.Unit.conversion_factor(self._da_units[output_id], cu)
1126
+ else: # pragma: no cover
1127
+ # Not a voltage or current? Then don't convert
1128
+ df = 1
1129
+ tf, df = float(tf), float(df)
1130
+
1131
+ # Axon has the annoying habit of adding some extra holding at the start
1132
+ # We can include this if we want. See _read_6 for details.
1133
+ if self._is_protocol_file:
1134
+ offset = 0
1268
1135
  else:
1136
+ offset = self._samples_per_channel // 64
1137
+ offset -= offset % self._n_adc
1138
+ if (offset < self._n_adc): # pragma: no cover
1139
+ # Don't have a test for this, but this is part of the
1140
+ # established procedure.
1141
+ offset = self._n_adc
1142
+ offset /= self._rate
1269
1143
 
1270
- a = h['listADCInfo']
1271
- p = h['protocol']
1272
- for i in range(self._nc):
1273
- # Multiplier
1274
- f = (
1275
- a[i]['fInstrumentScaleFactor']
1276
- * a[i]['fADCProgrammableGain']
1277
- * p['lADCResolution']
1278
- / p['fADCRange'])
1144
+ # Holding level (converted and rounded)
1145
+ holding = round(df * dinfo(i_dac, 'fDACHoldingLevel'), n_digits)
1279
1146
 
1280
- # Signal conditioner used?
1281
- if 'nSignalType' in h: # pragma: no cover
1282
- # Cover pragma: Don't have appropriate test file
1283
- if h['nSignalType'] != 0:
1284
- f *= a[i]['fSignalGain']
1147
+ # Create protocol
1148
+ p = myokit.Protocol()
1149
+ start = 0
1150
+ next_start = self._sweep_start_to_start
1151
+ for i_sweep in range(self._sweeps_per_run):
1152
+ # Start of sweep: secret event at holding potential
1153
+ if include_initial_holding:
1154
+ e_start = round(tf * start, n_digits)
1155
+ e_length = round(tf * offset, n_digits)
1156
+ p.schedule(holding, e_start, e_length)
1157
+ start += offset
1158
+
1159
+ for e in einfo(i_dac):
1160
+ if e['type'] == EPOCH_STEPPED:
1161
+ dur = e['init_duration'] / self._rate
1162
+ inc = e['duration_inc'] / self._rate
1163
+ duration = dur + i_sweep * inc
1164
+ level = e['init_level'] + e['level_inc'] * i_sweep
1165
+
1166
+ e_level = round(df * level, n_digits)
1167
+ e_start = round(tf * start, n_digits)
1168
+ e_length = round(tf * duration, n_digits)
1169
+ p.schedule(e_level, e_start, e_length)
1285
1170
 
1286
- # Additional gain?
1287
- if a[i]['nTelegraphEnable']:
1288
- f *= a[i]['fTelegraphAdditGain']
1171
+ start += duration
1172
+ # Note: Only other type can be EPOCH_DISABLED at this point
1289
1173
 
1290
- # Set final gain factor
1291
- self._adc_factors.append(1.0 / f)
1174
+ # End of sweep: event at holding potential
1175
+ e_start = round(tf * start, n_digits)
1176
+ e_length = round(tf * (next_start - start), n_digits)
1177
+ p.schedule(holding, e_start, e_length)
1178
+ start = next_start
1179
+ next_start += self._sweep_start_to_start
1292
1180
 
1293
- # Shift
1294
- s = a[i]['fInstrumentOffset']
1181
+ return p
1295
1182
 
1296
- # Signal conditioner used?
1297
- if 'nSignalType' in h: # pragma: no cover
1183
+ def da_units(self, index=None):
1184
+ # Docstring in SweepSource
1185
+ if index is None:
1186
+ return list(self._da_units)
1187
+ return self._da_units[index]
1188
+
1189
+ def equal_length_sweeps(self):
1190
+ # Always true for ABF
1191
+ return True
1192
+
1193
+ def filename(self):
1194
+ """ Returns this ABF file's filename. """
1195
+ return self._filename
1196
+
1197
+ def log(self, join_sweeps=False, use_names=False, include_da=True):
1198
+ # Docstring in SweepSource
1199
+
1200
+ # Create log, return if no sweeps or channels
1201
+ log = myokit.DataLog()
1202
+ ns = len(self._sweeps)
1203
+ if ns == 0 or (self._n_adc + self._n_dac) == 0: # pragma: no cover
1204
+ return log
1205
+
1206
+ # Get channel names
1207
+ if use_names:
1208
+ nc = self._n_adc + (self._n_dac if include_da else 0)
1209
+ names = [c.name() for c in self._sweeps[0][:nc]]
1210
+ else:
1211
+ names = [f'{i}.channel' for i in range(self._n_adc)]
1212
+ if include_da:
1213
+ names += [f'{i}.da' for i in range(self._n_dac)]
1214
+
1215
+ # Gather data and return
1216
+ t = self._sweeps[0][0].times()
1217
+ if not join_sweeps:
1218
+ log['time'] = t
1219
+ for i_sweep, sweep in enumerate(self._sweeps):
1220
+ for channel, name in zip(sweep, names):
1221
+ log[name, i_sweep] = channel.values()
1222
+ else:
1223
+ log['time'] = np.concatenate(
1224
+ [t + i * self._sweep_start_to_start for i in range(ns)])
1225
+ for i_channel, name in enumerate(names):
1226
+ log[name] = np.concatenate(
1227
+ [sweep[i_channel].values() for sweep in self._sweeps])
1228
+ log.set_time_key('time')
1229
+ return log
1230
+
1231
+ def matplotlib_figure(self):
1232
+ """ Creates and returns a matplotlib figure with this file's data. """
1233
+ import matplotlib.pyplot as plt
1234
+ f = plt.figure()
1235
+ plt.suptitle(self.filename())
1236
+
1237
+ # Plot AD channels
1238
+ ax = plt.subplot(2, 1, 1)
1239
+ ax.set_title('Measured data')
1240
+ times = None
1241
+ for sweep in self._sweeps:
1242
+ for channel in sweep[:self._n_adc]:
1243
+ if times is None:
1244
+ times = channel.times()
1245
+ plt.plot(times, channel.values())
1246
+
1247
+ # Plot DA channels
1248
+ n = self._n_dac
1249
+ ax = [plt.subplot(2, n, n + 1 + i) for i in range(n)]
1250
+ for sweep in self._sweeps:
1251
+ for i, channel in enumerate(sweep[self._n_adc:]):
1252
+ ax[i].set_title(channel.name())
1253
+ ax[i].plot(times, channel.values())
1254
+
1255
+ return f
1256
+
1257
+ def meta_str(self, show_header=False):
1258
+ """
1259
+ Returns a multi-line string with meta data about this file.
1260
+
1261
+ The optional argument ``show_header`` can be used to add the full
1262
+ header contents to the output.
1263
+ """
1264
+ out = []
1265
+
1266
+ # File info
1267
+ if self._is_protocol_file:
1268
+ out.append(f'Axon Protocol File: {self._filename}')
1269
+ else:
1270
+ out.append(f'Axon Binary File: {self._filename}')
1271
+ out.append(f'ABF Format version {self._version_str}')
1272
+ out.append(f'Recorded on: {self._datetime}')
1273
+
1274
+ # AProtocol info
1275
+ out.append(
1276
+ f'Acquisition mode: {self._mode}: {acquisition_modes[self._mode]}')
1277
+ if self._number_of_trials:
1278
+ out.append(
1279
+ f'Protocol set for {self._number_of_trials} trials,'
1280
+ f' spaced {self._trial_start_to_start}s apart.')
1281
+ out.append(
1282
+ f' with {self._runs_per_trial} runs per trial,'
1283
+ f' spaced {self._run_start_to_start}s apart.')
1284
+ out.append(
1285
+ f' and {self._sweeps_per_run} sweeps per run,'
1286
+ f' spaced {self._sweep_start_to_start}s apart.')
1287
+ else: # pragma: no cover
1288
+ out.append('Protocol data could not be determined.')
1289
+ out.append(f'Sampling rate: {self._rate} Hz')
1290
+
1291
+ # Channel info
1292
+ if len(self._sweeps) > 0:
1293
+
1294
+ # A/D recordings
1295
+ for i, c in enumerate(self._sweeps[0][:self._n_adc]):
1296
+ out.append(f'A/D Channel {i}: "{c._name}"')
1297
+ if c._type: # pragma: no cover
1298
1298
  # Cover pragma: Don't have appropriate test file
1299
- if h['nSignalType'] != 0:
1300
- s -= a[i]['fSignalOffset']
1299
+ out.append(f' Type: {type_mode_names[c._type]}')
1300
+ out.append(f' Unit: {c._unit}')
1301
+ if c._lopass:
1302
+ out.append(f' Low-pass filter: {c._lopass} Hz')
1303
+ if c._cm:
1304
+ out.append(f' Cm (telegraphed): {c._cm} pF')
1305
+ if c._rs: # pragma: no cover
1306
+ # Cover pragma: Don't have appropriate test file
1307
+ out.append(f' Rs (telegraphed): {c._rs}')
1301
1308
 
1302
- # Set final offset
1303
- self._adc_offsets.append(s)
1309
+ # Reconstructed D/A outputs
1310
+ for i, c in enumerate(self._sweeps[0][self._n_adc:]):
1311
+ out.append(f'D/A Channel {i}: "{c._name}"')
1312
+ if c._type: # pragma: no cover
1313
+ # Cover pragma: Don't have appropriate test file
1314
+ out.append(' Type: {type_mode_names[c._type]}')
1315
+ out.append(f' Unit: {c._unit}')
1316
+
1317
+ # Add full header info
1318
+ if show_header:
1319
+ if self._strings:
1320
+ dict_to_string(out, 'Strings', {'strings': self._strings})
1321
+ dict_to_string(out, 'file header', self._header)
1322
+
1323
+ return '\n'.join(out)
1324
+
1325
+ def path(self):
1326
+ """ Returns the path to the underlying ABF file. """
1327
+ return self._filepath
1328
+
1329
+ def sweep_count(self):
1330
+ # Docstring in SweepSource
1331
+ return len(self._sweeps)
1332
+
1333
+ def time_unit(self):
1334
+ # Docstring in SweepSource
1335
+ # For ABF, this is always seconds
1336
+ return myokit.units.s
1337
+
1338
+ def _unit(self, unit_string):
1339
+ """ Parses a unit string and returns a :class:`myokit.Unit`. """
1340
+ try:
1341
+ return self._unit_cache[unit_string]
1342
+ except KeyError:
1343
+ unit = myokit.parse_unit(unit_string.replace(MU, 'u'))
1344
+ self._unit_cache[unit_string] = unit
1345
+ return unit
1346
+
1347
+ def version(self):
1348
+ """ Returns a string representation of this file's version number. """
1349
+ return self._version_str
1304
1350
 
1305
1351
 
1306
1352
  class Sweep:
1307
1353
  """
1308
- Represents a single sweep (also called an 'episode')
1354
+ Represents a single sweep (also called an *episode*).
1309
1355
 
1310
- A sweep is represented as a fixed-size list of channels.
1356
+ Each sweep contains a fixed number of :class:`channels<Channel>`.
1311
1357
  """
1312
- def __init__(self, n):
1313
- super().__init__()
1314
- n = int(n)
1315
- if n < 0: # pragma: no cover
1316
- raise ValueError('Number channels cannot be negative.')
1317
- self._nc = n # Number of channels
1318
- self._channels = [None] * n
1358
+ def __init__(self):
1359
+ self._channels = []
1319
1360
 
1320
1361
  def __getitem__(self, key):
1321
1362
  return self._channels[key] # Handles slices etc.
@@ -1324,31 +1365,26 @@ class Sweep:
1324
1365
  return iter(self._channels)
1325
1366
 
1326
1367
  def __len__(self):
1327
- return self._nc
1328
-
1329
- def __setitem__(self, key, value):
1330
- if type(key) == slice:
1331
- raise NotImplementedError(
1332
- 'Assignment with slices is not supported.')
1333
- self._channels[key] = value
1368
+ return len(self._channels)
1334
1369
 
1335
1370
 
1336
1371
  class Channel:
1337
1372
  """
1338
- Represents an analog signal for a single channel.
1373
+ Represents a signal for a single channel.
1339
1374
 
1340
- To obtain this channel's formatted data, use times() and trace()
1375
+ To obtain its data, use :meth:`times` and :meth:`values`.
1341
1376
  """
1342
1377
  def __init__(self, parent_file):
1343
- super().__init__()
1344
1378
  self._parent_file = parent_file # The abf file this channel is from
1345
1379
  self._type = TYPE_UNKNOWN # Type of recording
1346
1380
 
1347
1381
  # This channel's name
1348
1382
  self._name = None
1349
1383
 
1350
- # This channel's index (see note below)
1351
- self._numb = None
1384
+ # This channel's index in the file. This is basically a name, and does
1385
+ # not correspond to e.g. its index in the ADC/DAC info or its index in
1386
+ # the sweep's list of channels.
1387
+ self._index = None
1352
1388
 
1353
1389
  # The units this channel's data is in
1354
1390
  self._unit = None
@@ -1371,40 +1407,61 @@ class Channel:
1371
1407
  # The reported low-pass filter cut-off frequency
1372
1408
  self._lopass = None
1373
1409
 
1374
- # Note that the channel indices are not necessarily sequential! So a
1375
- # file with 2 channels can have indices 0 and 3.
1410
+ def index(self):
1411
+ """ Returns the index set for this channel. """
1412
+ return self._index
1376
1413
 
1377
1414
  def name(self):
1378
- """
1379
- Returns the name set for this channel.
1380
- """
1415
+ """ Returns the name set for this channel. """
1381
1416
  return self._name
1382
1417
 
1383
- def number(self):
1384
- """
1385
- Returns the channel index used by pClamp. Note that this does not
1386
- necessarily equal its index in the Python sweep data!
1387
- """
1388
- return self._numb
1389
-
1390
1418
  def __str__(self):
1391
- return 'Channel(' + str(self._numb) + ' "' + str(self._name) \
1392
- + '"); ' + str(len(self._data)) + ' points sampled at ' \
1393
- + str(self._rate) + 'Hz, starts at t=' + str(self._start)
1419
+ return (
1420
+ f'Channel({self._index} "{self._name}"); {len(self._data)} points'
1421
+ f' sampled at {self._rate}Hz, starts at t={self._start}.')
1394
1422
 
1395
1423
  def times(self):
1396
- """
1397
- Returns a copy of the values on the time axis.
1398
- """
1424
+ """ Returns a copy of the values on the time axis. """
1399
1425
  n = len(self._data)
1400
- f = 1.0 / self._rate
1426
+ f = 1 / self._rate
1401
1427
  return np.arange(self._start, self._start + n * f, f)[0:n]
1402
1428
 
1429
+ def unit(self):
1430
+ """ Returns the units this channel is in. """
1431
+ return self._unit
1432
+
1403
1433
  def values(self):
1404
- """
1405
- Returns a copy of the values on the data axis.
1406
- """
1407
- return np.array(self._data, copy=True)
1434
+ """ Returns a copy of the values on the data axis. """
1435
+ return np.copy(self._data)
1436
+
1437
+
1438
+ def dict_to_string(out, name, d, tab=''):
1439
+ """ Used by AbfFile.info(). """
1440
+ m = max(0, 38 - len(tab) - int(0.1 + len(name) / 2))
1441
+ out.append(f'{tab}{"-" * m} {name} {"-" * m}')
1442
+ for n, v in d.items():
1443
+ n = str(n)
1444
+ if type(v) == dict:
1445
+ dict_to_string(out, n, v, f'{tab} ')
1446
+ elif type(v) == list:
1447
+ list_to_string(out, n, v, tab)
1448
+ else:
1449
+ out.append(f'{tab}{n}: {v}')
1450
+ m = max(0, 80 - 2 * len(tab))
1451
+ out.append(f'{tab}{m * "-"}')
1452
+
1453
+
1454
+ def list_to_string(out, name, d, tab=''):
1455
+ """ Used by AbfFile.info(). """
1456
+ for index, item in enumerate(d):
1457
+ n = f'{name}[{index}]'
1458
+ if type(item) == dict:
1459
+ dict_to_string(out, n, item, tab)
1460
+ elif type(item) == list: # pragma: no cover
1461
+ # Cover pragma: Don't have appropriate test file
1462
+ list_to_string(out, n, item, tab)
1463
+ else:
1464
+ out.append(f'{tab}{n}: {item}')
1408
1465
 
1409
1466
 
1410
1467
  # Some python struct types:
@@ -1416,11 +1473,17 @@ class Channel:
1416
1473
  BLOCKSIZE = 512
1417
1474
 
1418
1475
 
1476
+ # A mu, sometimes found in unit strings
1477
+ MU = '\u00b5'
1478
+
1479
+
1419
1480
  # Header fields for versions 1 and 2
1420
1481
  # Stored as (key, offset, format) where format corresponds to a struct
1421
1482
  # unpacking format as documented in:
1422
1483
  # http://docs.python.org/library/struct.html#format-characters
1423
- headerFields = {
1484
+ HEADER_FIELDS = {
1485
+ # Note that a lot of the groups in the version 1 header start with obsolete
1486
+ # fields, followed later by their newer equivalents.
1424
1487
  1: [
1425
1488
  ('fFileSignature', 0, '4s'), # Coarse file version indication
1426
1489
  # Group 1, File info and sizes
@@ -1517,7 +1580,7 @@ headerFields = {
1517
1580
  ('nDigitalHolding', 1584, 'h'),
1518
1581
  ('nDigitalInterEpisode', 1586, 'h'),
1519
1582
  ('nDigitalValue', 2588, '10h'),
1520
- ('lDACFilePtr', 2048, '2i'), # Pointer to protocol?
1583
+ ('lDACFilePtr', 2048, '2i'),
1521
1584
  ('lDACFileNumEpisodes', 2056, '2i'),
1522
1585
  ('fDACCalibrationFactor', 2074, '4f'),
1523
1586
  ('fDACCalibrationOffset', 2090, '4f'),
@@ -1529,13 +1592,12 @@ headerFields = {
1529
1592
  ('fEpochLevelInc', 2428, '20f'),
1530
1593
  ('lEpochInitDuration', 2508, '20i'),
1531
1594
  ('lEpochDurationInc', 2588, '20i'),
1532
- # Group 10, DAC Output file
1595
+ # Group 10, DAC Output file (Stimulus file)
1533
1596
  ('fDACFileScale', 2708, 'd'),
1534
1597
  ('fDACFileOffset', 2716, 'd'),
1535
1598
  ('lDACFileEpisodeNum', 2724, 'i'),
1536
1599
  ('nDACFileADCNum', 2732, '2h'),
1537
- # 256 * 2char = utf8? or 2 strings?
1538
- ('sDACFilePath', 2736, '256s' * 2),
1600
+ ('sDACFilePath', 2736, '256s' * 2), # Two strings
1539
1601
  # Group 11,
1540
1602
  # Group 12, User list parameters
1541
1603
  ('nULEnable', 3360, '4h'),
@@ -1554,7 +1616,7 @@ headerFields = {
1554
1616
  # Group 22
1555
1617
  # Group 23 Post-processing
1556
1618
  # Group 24 Legacy stuff
1557
- # Group 6 again?
1619
+ # Group 6 extended
1558
1620
  ('nTelegraphEnable', 4512, '16h'),
1559
1621
  ('fTelegraphAdditGain', 4576, '16f'),
1560
1622
  ],
@@ -1582,7 +1644,7 @@ headerFields = {
1582
1644
 
1583
1645
 
1584
1646
  # ABF2 File sections
1585
- abf2FileSections = [
1647
+ ABF2_FILE_SECTIONS = [
1586
1648
  'Protocol',
1587
1649
  'ADC',
1588
1650
  'DAC',
@@ -1605,7 +1667,7 @@ abf2FileSections = [
1605
1667
 
1606
1668
 
1607
1669
  # ABF2 Fields in the tag section
1608
- TagInfoDescription = [
1670
+ ABF2_TAG_INFO_DESCRIPTION = [
1609
1671
  ('lTagTime', 'i'),
1610
1672
  ('sComment', '56s'),
1611
1673
  ('nTagType', 'h'),
@@ -1614,7 +1676,7 @@ TagInfoDescription = [
1614
1676
 
1615
1677
 
1616
1678
  # ABF2 Fields in the protocol section
1617
- protocolFields = [
1679
+ ABF2_PROTOCOL_FIELDS = [
1618
1680
  ('nOperationMode', 'h'), # 0
1619
1681
  ('fADCSequenceInterval', 'f'), # 2
1620
1682
  ('bEnableFileCompression', 'b'), # 6
@@ -1690,7 +1752,7 @@ protocolFields = [
1690
1752
 
1691
1753
 
1692
1754
  # ABF2 Fields in the ADC section
1693
- ADCFields = [
1755
+ ABF2_ADC_FIELDS = [
1694
1756
  ('nADCNum', 'h'),
1695
1757
  ('nTelegraphEnable', 'h'),
1696
1758
  ('nTelegraphInstrument', 'h'),
@@ -1722,7 +1784,7 @@ ADCFields = [
1722
1784
 
1723
1785
 
1724
1786
  # ABF2 Fields in the DAC section
1725
- DACFields = [
1787
+ ABF2_DAC_FIELDS = [
1726
1788
  ('nDACNum', 'h'),
1727
1789
  ('nTelegraphDACScaleFactorEnable', 'h'),
1728
1790
  ('fInstrumentHoldingLevel', 'f'),
@@ -1769,7 +1831,7 @@ DACFields = [
1769
1831
 
1770
1832
 
1771
1833
  # ABF2 Fields in the DAC-Epoch section
1772
- EpochInfoPerDACFields = [
1834
+ ABF2_EPOCH_INFO_PER_DAC_FIELD = [
1773
1835
  ('nEpochNum', 'h'),
1774
1836
  ('nDACNum', 'h'),
1775
1837
  ('nEpochType', 'h'),
@@ -1781,7 +1843,9 @@ EpochInfoPerDACFields = [
1781
1843
  ('lEpochPulseWidth', 'i'),
1782
1844
  ('sUnused', '18s'),
1783
1845
  ]
1784
- UserListFields = [
1846
+
1847
+ # ABF2 User list fields
1848
+ ABF2_USER_LIST_FIELDS = [
1785
1849
  ('nListNum', 'h'),
1786
1850
  ('nULEnable', 'h'),
1787
1851
  ('nULParamToVary', 'h'),
@@ -1860,6 +1924,12 @@ type_mode_names = {
1860
1924
  }
1861
1925
 
1862
1926
 
1927
+ # DAC waveform types
1928
+ DAC_DISABLED = 0
1929
+ DAC_EPOCHTABLEWAVEFORM = 1 # Epochs
1930
+ DAC_DACFILEWAVEFORM = 2 # Stimulus file
1931
+
1932
+
1863
1933
  # User list parameter to vary
1864
1934
  '''
1865
1935
  CONDITNUMPULSES 0