acoular 24.3__py3-none-any.whl → 24.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (139) hide show
  1. acoular/__init__.py +118 -50
  2. acoular/calib.py +29 -38
  3. acoular/configuration.py +116 -73
  4. acoular/demo/__init__.py +10 -4
  5. acoular/demo/acoular_demo.py +78 -53
  6. acoular/environments.py +265 -262
  7. acoular/fastFuncs.py +361 -191
  8. acoular/fbeamform.py +1460 -1404
  9. acoular/grids.py +501 -545
  10. acoular/h5cache.py +50 -59
  11. acoular/h5files.py +154 -137
  12. acoular/internal.py +10 -11
  13. acoular/microphones.py +57 -53
  14. acoular/sdinput.py +47 -52
  15. acoular/signals.py +167 -179
  16. acoular/sources.py +818 -693
  17. acoular/spectra.py +349 -359
  18. acoular/tbeamform.py +414 -413
  19. acoular/tfastfuncs.py +178 -101
  20. acoular/tools/__init__.py +25 -0
  21. acoular/tools/aiaa.py +186 -0
  22. acoular/tools/helpers.py +189 -0
  23. acoular/tools/metrics.py +165 -0
  24. acoular/tprocess.py +1201 -1143
  25. acoular/traitsviews.py +513 -501
  26. acoular/trajectory.py +50 -52
  27. acoular/version.py +5 -6
  28. acoular/xml/minidsp_uma-16.xml +20 -0
  29. acoular/xml/{minidsp_uma16.xml → minidsp_uma-16_mirrored.xml} +3 -0
  30. {acoular-24.3.dist-info → acoular-24.5.dist-info}/METADATA +45 -46
  31. acoular-24.5.dist-info/RECORD +50 -0
  32. {acoular-24.3.dist-info → acoular-24.5.dist-info}/WHEEL +1 -1
  33. acoular-24.5.dist-info/licenses/LICENSE +28 -0
  34. acoular/fileimport.py +0 -380
  35. acoular/nidaqimport.py +0 -273
  36. acoular/tests/reference_data/BeamformerBase.npy +0 -0
  37. acoular/tests/reference_data/BeamformerBaseFalse1.npy +0 -0
  38. acoular/tests/reference_data/BeamformerBaseFalse2.npy +0 -0
  39. acoular/tests/reference_data/BeamformerBaseFalse3.npy +0 -0
  40. acoular/tests/reference_data/BeamformerBaseFalse4.npy +0 -0
  41. acoular/tests/reference_data/BeamformerBaseTrue1.npy +0 -0
  42. acoular/tests/reference_data/BeamformerBaseTrue2.npy +0 -0
  43. acoular/tests/reference_data/BeamformerBaseTrue3.npy +0 -0
  44. acoular/tests/reference_data/BeamformerBaseTrue4.npy +0 -0
  45. acoular/tests/reference_data/BeamformerCMFLassoLarsBIC.npy +0 -0
  46. acoular/tests/reference_data/BeamformerCMFNNLS.npy +0 -0
  47. acoular/tests/reference_data/BeamformerCapon.npy +0 -0
  48. acoular/tests/reference_data/BeamformerClean.npy +0 -0
  49. acoular/tests/reference_data/BeamformerCleansc.npy +0 -0
  50. acoular/tests/reference_data/BeamformerCleant.npy +0 -0
  51. acoular/tests/reference_data/BeamformerCleantSq.npy +0 -0
  52. acoular/tests/reference_data/BeamformerCleantSqTraj.npy +0 -0
  53. acoular/tests/reference_data/BeamformerCleantTraj.npy +0 -0
  54. acoular/tests/reference_data/BeamformerDamas.npy +0 -0
  55. acoular/tests/reference_data/BeamformerDamasPlus.npy +0 -0
  56. acoular/tests/reference_data/BeamformerEig.npy +0 -0
  57. acoular/tests/reference_data/BeamformerEigFalse1.npy +0 -0
  58. acoular/tests/reference_data/BeamformerEigFalse2.npy +0 -0
  59. acoular/tests/reference_data/BeamformerEigFalse3.npy +0 -0
  60. acoular/tests/reference_data/BeamformerEigFalse4.npy +0 -0
  61. acoular/tests/reference_data/BeamformerEigTrue1.npy +0 -0
  62. acoular/tests/reference_data/BeamformerEigTrue2.npy +0 -0
  63. acoular/tests/reference_data/BeamformerEigTrue3.npy +0 -0
  64. acoular/tests/reference_data/BeamformerEigTrue4.npy +0 -0
  65. acoular/tests/reference_data/BeamformerFunctional.npy +0 -0
  66. acoular/tests/reference_data/BeamformerGIB.npy +0 -0
  67. acoular/tests/reference_data/BeamformerGridlessOrth.npy +0 -0
  68. acoular/tests/reference_data/BeamformerMusic.npy +0 -0
  69. acoular/tests/reference_data/BeamformerOrth.npy +0 -0
  70. acoular/tests/reference_data/BeamformerSODIX.npy +0 -0
  71. acoular/tests/reference_data/BeamformerTime.npy +0 -0
  72. acoular/tests/reference_data/BeamformerTimeSq.npy +0 -0
  73. acoular/tests/reference_data/BeamformerTimeSqTraj.npy +0 -0
  74. acoular/tests/reference_data/BeamformerTimeTraj.npy +0 -0
  75. acoular/tests/reference_data/Environment.npy +0 -0
  76. acoular/tests/reference_data/Example1_numerical_values_testsum.h5 +0 -0
  77. acoular/tests/reference_data/FiltFiltOctave__.npy +0 -0
  78. acoular/tests/reference_data/FiltFiltOctave_band_100_0_fraction_Thirdoctave_.npy +0 -0
  79. acoular/tests/reference_data/FiltFreqWeight_weight_A_.npy +0 -0
  80. acoular/tests/reference_data/FiltFreqWeight_weight_C_.npy +0 -0
  81. acoular/tests/reference_data/FiltFreqWeight_weight_Z_.npy +0 -0
  82. acoular/tests/reference_data/FiltOctave__.npy +0 -0
  83. acoular/tests/reference_data/FiltOctave_band_100_0_fraction_Thirdoctave_.npy +0 -0
  84. acoular/tests/reference_data/Filter__.npy +0 -0
  85. acoular/tests/reference_data/GeneralFlowEnvironment.npy +0 -0
  86. acoular/tests/reference_data/OctaveFilterBank__.npy +0 -0
  87. acoular/tests/reference_data/OpenJet.npy +0 -0
  88. acoular/tests/reference_data/PointSource.npy +0 -0
  89. acoular/tests/reference_data/PowerSpectra_csm.npy +0 -0
  90. acoular/tests/reference_data/PowerSpectra_ev.npy +0 -0
  91. acoular/tests/reference_data/RotatingFlow.npy +0 -0
  92. acoular/tests/reference_data/SlotJet.npy +0 -0
  93. acoular/tests/reference_data/TimeAverage__.npy +0 -0
  94. acoular/tests/reference_data/TimeCumAverage__.npy +0 -0
  95. acoular/tests/reference_data/TimeExpAverage_weight_F_.npy +0 -0
  96. acoular/tests/reference_data/TimeExpAverage_weight_I_.npy +0 -0
  97. acoular/tests/reference_data/TimeExpAverage_weight_S_.npy +0 -0
  98. acoular/tests/reference_data/TimeInOut__.npy +0 -0
  99. acoular/tests/reference_data/TimePower__.npy +0 -0
  100. acoular/tests/reference_data/TimeReverse__.npy +0 -0
  101. acoular/tests/reference_data/UniformFlowEnvironment.npy +0 -0
  102. acoular/tests/reference_data/beamformer_traj_time_data.h5 +0 -0
  103. acoular/tests/run_tests.sh +0 -18
  104. acoular/tests/run_tests_osx.sh +0 -16
  105. acoular/tests/test.npy +0 -0
  106. acoular/tests/test_beamformer_results.py +0 -213
  107. acoular/tests/test_classes.py +0 -60
  108. acoular/tests/test_digest.py +0 -125
  109. acoular/tests/test_environments.py +0 -73
  110. acoular/tests/test_example1.py +0 -124
  111. acoular/tests/test_grid.py +0 -92
  112. acoular/tests/test_integrate.py +0 -102
  113. acoular/tests/test_signals.py +0 -60
  114. acoular/tests/test_sources.py +0 -65
  115. acoular/tests/test_spectra.py +0 -38
  116. acoular/tests/test_timecache.py +0 -35
  117. acoular/tests/test_tprocess.py +0 -90
  118. acoular/tests/test_traj_beamformer_results.py +0 -164
  119. acoular/tests/unsupported/SpeedComparison/OvernightTestcasesBeamformer_nMics32_nGridPoints100_nFreqs4_nTrials10.png +0 -0
  120. acoular/tests/unsupported/SpeedComparison/cythonBeamformer.pyx +0 -237
  121. acoular/tests/unsupported/SpeedComparison/mainForCython.py +0 -103
  122. acoular/tests/unsupported/SpeedComparison/mainForParallelJit.py +0 -143
  123. acoular/tests/unsupported/SpeedComparison/setupCythonOpenMP.py +0 -63
  124. acoular/tests/unsupported/SpeedComparison/sharedFunctions.py +0 -153
  125. acoular/tests/unsupported/SpeedComparison/timeOverNMics_AllImportantMethods.png +0 -0
  126. acoular/tests/unsupported/SpeedComparison/timeOverNMics_faverage.png +0 -0
  127. acoular/tests/unsupported/SpeedComparison/vglOptimierungFAverage.py +0 -204
  128. acoular/tests/unsupported/SpeedComparison/vglOptimierungGaussSeidel.py +0 -182
  129. acoular/tests/unsupported/SpeedComparison/vglOptimierungR_BEAMFULL_INVERSE.py +0 -764
  130. acoular/tests/unsupported/SpeedComparison/vglOptimierungR_BEAM_OS.py +0 -231
  131. acoular/tests/unsupported/SpeedComparison/whatsFastestWayFor_absASquared.py +0 -48
  132. acoular/tests/unsupported/functionalBeamformer.py +0 -123
  133. acoular/tests/unsupported/precisionTest.py +0 -153
  134. acoular/tests/unsupported/validationOfBeamformerFuncsPOSTAcoularIntegration.py +0 -254
  135. acoular/tests/unsupported/validationOfBeamformerFuncsPREeAcoularIntegration.py +0 -531
  136. acoular/tools.py +0 -422
  137. acoular-24.3.dist-info/RECORD +0 -148
  138. acoular-24.3.dist-info/licenses/LICENSE +0 -29
  139. {acoular-24.3.dist-info → acoular-24.5.dist-info}/licenses/AUTHORS.rst +0 -0
acoular/fileimport.py DELETED
@@ -1,380 +0,0 @@
1
- # -*- coding: utf-8 -*-
2
- #pylint: disable-msg=E0611, E1101, C0103, R0901, R0902, R0903, R0904, W0232
3
- #------------------------------------------------------------------------------
4
- # Copyright (c) Acoular Development Team.
5
- #------------------------------------------------------------------------------
6
- """
7
- Contains classes for importing time data in several file formats.
8
- Standard HDF (`*.h5`) import can be done using
9
- :class:`~acoular.sources.TimeSamples` objects.
10
-
11
- .. autosummary::
12
- :toctree: generated/
13
-
14
- time_data_import
15
- csv_import
16
- td_import
17
- bk_mat_import
18
- datx_import
19
- """
20
-
21
- from numpy import fromstring, float32, newaxis, empty, sort, zeros
22
- from traits.api import HasPrivateTraits, Float, Int, \
23
- File, CArray, Property, Any, Str
24
- from os import path
25
- import pickle
26
- import configparser
27
- import struct
28
-
29
- # acoular imports
30
- from .h5files import H5CacheFileBase, _get_h5file_class
31
- from .configuration import config
32
- class time_data_import( HasPrivateTraits ):
33
- """
34
- Base class for import of time data.
35
- """
36
-
37
- def get_data (self, td):
38
- """
39
- Imports the data into an arbitrary time_data object td.
40
- This is a dummy function and should not be used directly.
41
- """
42
- td.data = None
43
- td.numsamples = 0
44
- td.numchannels = 0
45
- td.sample_freq = 0
46
-
47
- class csv_import( time_data_import ):
48
- """
49
- Class that supports the import of CSV data as saved by NI VI Logger.
50
- """
51
-
52
- #: Name of the comma delimited file to import.
53
- from_file = File(filter = ['*.txt'],
54
- desc = "name of the comma delimited file to import")
55
-
56
- #: Header length, defaults to 6.
57
- header_length = Int(6,
58
- desc = "length of the header to ignore during import")
59
-
60
- #: Number of leading columns (will be ignored during import), defaults to 1.
61
- dummy_columns = Int(1,
62
- desc = "number of leading columns to ignore during import")
63
-
64
- def get_data (self, td):
65
- """
66
- Imports the data from CSV file into a
67
- :class:`~acoular.sources.TimeSamples` object `td`.
68
- Also, a `*.h5` file will be written, so this import
69
- need not be performed every time the data is needed
70
- """
71
- if not path.isfile(self.from_file):
72
- # no file there
73
- time_data_import.get_data(self, td)
74
- return
75
- #import data
76
- c = self.header_length
77
- d = self.dummy_columns
78
- f = open(self.from_file)
79
- #read header
80
- for line in f:
81
- c -= 1
82
- h = line.split(':')
83
- if h[0] == 'Scan rate':
84
- sample_freq = int(1./float(h[1].split(' ')[1]))
85
- if c == 0:
86
- break
87
- line = next(f)
88
- data = fromstring(line, dtype = float32, sep = ', ')[d:]
89
- numchannels = len(data)
90
- name = td.name
91
- if name == "":
92
- name = path.join(config.td_dir, \
93
- path.splitext(path.basename(self.from_file))[0]+'.h5')
94
- else:
95
- if td.h5f != None:
96
- td.h5f.close()
97
- # TODO problems with already open h5 files from other instances
98
- file = _get_h5file_class()
99
- f5h = file(name, mode = 'w')
100
- f5h.create_extendable_array(
101
- 'time_data', (0, numchannels), "float32")
102
- ac = f5h.get_data_by_reference('time_data')
103
- f5h.set_node_attribute(ac,'sample_freq',sample_freq)
104
- f5h.append_data(ac,data[newaxis, :])
105
- for line in f:
106
- f5h.append_data(ac,fromstring(line, dtype=float32, sep=', ')[newaxis, d:])
107
- f5h.close()
108
- td.name = name
109
- td.load_data()
110
-
111
- class td_import( time_data_import ):
112
- """
113
- Import of `*.td` data as saved by earlier versions
114
- """
115
-
116
- #: Name of the comma delimited file to import.
117
- from_file = File(filter = ['*.td'],
118
- desc = "name of the *.td file to import")
119
-
120
- def get_data (self, td):
121
- """
122
- Main work is done here: imports the data from `*.td` file into
123
- TimeSamples object `td` and saves also a `*.h5` file so this import
124
- need not be performed only once.
125
- """
126
- if not path.isfile(self.from_file):
127
- # no file there
128
- time_data_import.get_data(self, td)
129
- return
130
- f = open(self.from_file, 'rb')
131
- h = pickle.load(f)
132
- f.close()
133
- sample_freq = h['sample_freq']
134
- data = h['data']
135
- numchannels = data.shape[1]
136
- name = td.name
137
- if name == "":
138
- name = path.join(config.td_dir, \
139
- path.splitext(path.basename(self.from_file))[0]+'.h5')
140
- else:
141
- if td.h5f != None:
142
- td.h5f.close()
143
- # TODO problems with already open h5 files from other instances
144
- file = _get_h5file_class()
145
- f5h = file(name, mode = 'w')
146
- f5h.create_extendable_array(
147
- 'time_data', (0, numchannels), "float32")
148
- ac = f5h.get_data_by_reference('time_data')
149
- f5h.set_node_attribute(ac,'sample_freq',sample_freq)
150
- f5h.append_data(ac,data)
151
- f5h.close()
152
- td.name = name
153
- td.load_data()
154
-
155
-
156
- class bk_mat_import( time_data_import ):
157
- """
158
- Import of BK pulse matlab data.
159
- """
160
-
161
- #: Name of the mat file to import
162
- from_file = File(filter = ['*.mat'],
163
- desc = "name of the BK pulse mat file to import")
164
-
165
- def get_data (self, td):
166
- """
167
- Main work is done here: imports the data from pulse .mat file into
168
- time_data object 'td' and saves also a `*.h5` file so this import
169
- need not be performed every time the data is needed.
170
- """
171
- if not path.isfile(self.from_file):
172
- # no file there
173
- time_data_import.get_data(self, td)
174
- return
175
- #import data
176
- from scipy.io import loadmat
177
- m = loadmat(self.from_file)
178
- fh = m['File_Header']
179
- numchannels = int(fh.NumberOfChannels)
180
- l = int(fh.NumberOfSamplesPerChannel)
181
- sample_freq = float(fh.SampleFrequency.replace(', ', '.'))
182
- data = empty((l, numchannels), 'f')
183
- for i in range(numchannels):
184
- # map SignalName "Point xx" to channel xx-1
185
- ii = int(m["Channel_%i_Header" % (i+1)].SignalName[-2:])-1
186
- data[:, ii] = m["Channel_%i_Data" % (i+1)]
187
- name = td.name
188
- if name == "":
189
- name = path.join(config.td_dir, \
190
- path.splitext(path.basename(self.from_file))[0]+'.h5')
191
- else:
192
- if td.h5f != None:
193
- td.h5f.close()
194
- # TODO problems with already open h5 files from other instances
195
- file = _get_h5file_class()
196
- f5h = file(name, mode = 'w')
197
- f5h.create_extendable_array(
198
- 'time_data', (0, numchannels), "float32")
199
- ac = f5h.get_data_by_reference('time_data')
200
- f5h.set_node_attribute(ac,'sample_freq',sample_freq)
201
- f5h.append_data(ac,data)
202
- f5h.close()
203
- td.name = name
204
- td.load_data()
205
-
206
- class datx_d_file(HasPrivateTraits):
207
- """
208
- Helper class for import of `*.datx` data, represents
209
- datx data file.
210
- """
211
- # File name
212
- name = File(filter = ['*.datx'],
213
- desc = "name of datx data file")
214
-
215
- # File object
216
- f = Any()
217
-
218
- # Properties
219
- data_offset = Int()
220
- channel_count = Int()
221
- num_samples_per_block = Int()
222
- bytes_per_sample = Int()
223
- block_size = Property()
224
-
225
- # Number of blocks to read in one pull
226
- blocks = Int()
227
- # The actual block data
228
- data = CArray()
229
-
230
- def _get_block_size( self ):
231
- return self.channel_count*self.num_samples_per_block*\
232
- self.bytes_per_sample
233
-
234
- def get_next_blocks( self ):
235
- """ pulls next blocks """
236
- s = self.f.read(self.blocks*self.block_size)
237
- ls = len(s)
238
- if ls == 0:
239
- return -1
240
- bl_no = ls/self.block_size
241
- self.data = fromstring(s, dtype = 'Int16').reshape((bl_no, \
242
- self.channel_count, self.num_samples_per_block)).swapaxes(0, \
243
- 1).reshape((self.channel_count, bl_no*self.num_samples_per_block))
244
-
245
- def __init__(self, name, blocks = 128):
246
- self.name = name
247
- self.f = open(self.name, 'rb')
248
- s = self.f.read(32)
249
- # header
250
- s0 = struct.unpack('IIIIIIHHf', s)
251
- # Getting information about Properties of data-file
252
- # 3 = Offset to data 4 = channel count
253
- # 5 = number of samples per block 6 = bytes per sample
254
- self.data_offset = s0[3]
255
- self.channel_count = s0[4]
256
- self.num_samples_per_block = s0[5]
257
- self.bytes_per_sample = s0[6]
258
- self.blocks = blocks
259
- self.f.seek(self.data_offset)
260
-
261
- class datx_channel(HasPrivateTraits):
262
- """
263
- Helper class for import of .datx data, represents
264
- one channel.
265
- """
266
-
267
- label = Str()
268
- d_file = Str()
269
- ch_no = Int()
270
- ch_K = Str()
271
- volts_per_count = Float()
272
- msl_ccf = Float()
273
- cal_corr_factor = Float()
274
- internal_gain = Float()
275
- external_gain = Float()
276
- tare_volts = Float()
277
- cal_coeff_2 = Float()
278
- cal_coeff_1 = Float()
279
- tare_eu = Float()
280
- z0 = Float()
281
-
282
-
283
- def __init__(self, config, channel):
284
- d_file, ch_no, ch_K = config.get('channels', channel).split(', ')
285
- # Extraction and Splitting of Channel information
286
- self.d_file = d_file
287
- self.ch_no = int(ch_no)
288
- self.label = config.get(ch_K, 'channel_label')
289
- self.ch_K = ch_K
290
- # V
291
- # Reading conversion factors
292
- self.volts_per_count = float(config.get(ch_K, 'volts_per_count'))
293
- self.msl_ccf = float(config.get(ch_K, 'msl_ccf'))
294
- self.cal_corr_factor = float(config.get(ch_K, 'cal_corr_factor'))
295
- self.internal_gain = float(config.get(ch_K, 'internal_gain'))
296
- self.external_gain = float(config.get(ch_K, 'external_gain'))
297
- self.tare_volts = float(config.get(ch_K, 'tare_volts'))
298
- # EU
299
- self.cal_coeff_2 = float(config.get(ch_K, 'cal_coeff_2'))
300
- self.cal_coeff_1 = float(config.get(ch_K, 'cal_coeff_1'))
301
- self.tare_eu = float(config.get(ch_K, 'tare_eu'))
302
- self.z0 = (self.volts_per_count * self.msl_ccf * self.cal_corr_factor) \
303
- / (self.internal_gain * self.external_gain)
304
-
305
- def scale(self, x):
306
- """ scale function to produce output in engineering units """
307
- return (x * self.z0 - self.tare_volts) * self.cal_coeff_2 + \
308
- self.cal_coeff_1 - self.tare_eu
309
-
310
- class datx_import(time_data_import):
311
- """
312
- Import of .datx data
313
- """
314
-
315
- #: Name of the datx index file to import.
316
- from_file = File(filter = ['*.datx_index'],
317
- desc = "name of the datx index file to import")
318
-
319
- def get_data (self, td):
320
- """
321
- Main work is done here: imports the data from datx files into
322
- time_data object td and saves also a `*.h5` file so this import
323
- need not be performed every time the data is needed
324
- """
325
- if not path.isfile(self.from_file):
326
- # no file there
327
- time_data_import.get_data(self, td)
328
- return
329
- #browse datx information
330
- f0 = open(self.from_file)
331
- config = configparser.ConfigParser()
332
- config.readfp(f0)
333
- sample_rate = float(config.get('keywords', 'sample_rate'))
334
- # reading sample-rate from index-file
335
- channels = []
336
- d_files = {}
337
- # Loop over all channels assigned in index-file
338
- for channel in sort(config.options('channels')):
339
- ch = datx_channel(config, channel)
340
- if ch.label.find('Mic') >= 0:
341
- channels.append(ch)
342
- if not d_files.has_key(ch.d_file):
343
- d_files[ch.d_file] = \
344
- datx_d_file(path.join(path.dirname(self.from_file), \
345
- config.get(ch.d_file, 'fn')), 32)
346
- numchannels = len(channels)
347
- # prepare hdf5
348
- name = td.name
349
- if name == "":
350
- name = path.join(config.td_dir, \
351
- path.splitext(path.basename(self.from_file))[0]+'.h5')
352
- else:
353
- if td.h5f != None:
354
- td.h5f.close()
355
- # TODO problems with already open h5 files from other instances
356
- file = _get_h5file_class()
357
- f5h = file(name, mode = 'w')
358
- f5h.create_extendable_array(
359
- 'time_data', (0, numchannels), "float32")
360
- ac = f5h.get_data_by_reference('time_data')
361
- f5h.set_node_attribute(ac,'sample_freq',sample_rate)
362
- block_data = \
363
- zeros((128*d_files[channels[0].d_file].num_samples_per_block, \
364
- numchannels), 'Float32')
365
- flag = 0
366
- while(not flag):
367
- for i in d_files.values():
368
- flag = i.get_next_blocks()
369
- if flag:
370
- continue
371
- for i in range(numchannels):
372
- data = d_files[channels[i].d_file].data[channels[i].ch_no]
373
- block_data[:data.size, i] = channels[i].scale(data)
374
- f5h.append_data(ac,block_data[:data.size])
375
- f5h.close()
376
- f0.close()
377
- for i in d_files.values():
378
- i.f.close()
379
- td.name = name
380
- td.load_data()
acoular/nidaqimport.py DELETED
@@ -1,273 +0,0 @@
1
- # coding=UTF-8
2
- #------------------------------------------------------------------------------
3
- # Copyright (c) Acoular Development Team.
4
- #------------------------------------------------------------------------------
5
-
6
- """
7
- nidaqimport.py: interface to nidaq mx
8
- """
9
- from __future__ import print_function
10
- from .sources import TimeSamples
11
- from .h5cache import td_dir
12
- from .fileimport import time_data_import
13
- import ctypes
14
- import numpy
15
- import tables
16
- from traits.api import Float, List, Str, Long
17
- from datetime import datetime
18
- from os import path
19
-
20
- try:
21
- nidaq = ctypes.windll.nicaiu # load the DLL
22
- except:
23
- raise ImportError
24
- # type definitions
25
- int32 = ctypes.c_long
26
- uInt32 = ctypes.c_ulong
27
- uInt64 = ctypes.c_ulonglong
28
- float64 = ctypes.c_double
29
- TaskHandle = uInt32
30
-
31
- # DAQmx constants
32
- DAQmx_Val_Cfg_Default = int32(-1)
33
- DAQmx_Val_GroupByChannel = 0
34
- DAQmx_Val_GroupByScanNumber = 1
35
- DAQmx_Val_FiniteSamps = 10178 # Acquire or generate a finite number of samples.
36
- DAQmx_Val_ContSamps = 10123 # Acquire or generate samples until you stop the task.
37
- DAQmx_Val_HWTimedSinglePoint = 12522 # Acquire or generate samples continuously using hardware timing without a buffer. Hardware timed single point sample mode is supported only for the sample clock and change detection timing types.
38
-
39
- ##############################
40
-
41
- def ECFactory(func):
42
- def func_new(*args):
43
- err = func(*args)
44
- if err < 0:
45
- buf_size = 256
46
- # buf = ctypes.create_string_buffer('\000' * buf_size)
47
- buf = ctypes.create_string_buffer(b'\000' * buf_size)
48
- nidaq.DAQmxGetErrorString(err,ctypes.byref(buf),buf_size)
49
- # buf1 = ctypes.create_string_buffer('\000' * buf_size)
50
- buf1 = ctypes.create_string_buffer(b'\000' * buf_size)
51
-
52
- ## nidaq.DAQmxGetExtendedErrorInfo(ctypes.byref(buf1),buf_size)
53
- ## print buf1.value
54
- raise RuntimeError('nidaq call failed with error %d: %s'%(err,repr(buf.value)))
55
- return func_new
56
-
57
- DAQmxGetSysTasks = ECFactory(nidaq.DAQmxGetSysTasks)
58
- DAQmxLoadTask = ECFactory(nidaq.DAQmxLoadTask)
59
- DAQmxClearTask = ECFactory(nidaq.DAQmxClearTask)
60
- DAQmxStartTask = ECFactory(nidaq.DAQmxStartTask)
61
- DAQmxStopTask = ECFactory(nidaq.DAQmxStopTask)
62
- DAQmxGetTaskDevices = ECFactory(nidaq.DAQmxGetTaskDevices)
63
- #DAQmxGetTaskNumDevices = ECFactory(nidaq.DAQmxGetTaskNumDevices)
64
- DAQmxGetTaskNumChans = ECFactory(nidaq.DAQmxGetTaskNumChans)
65
- DAQmxGetTaskChannels = ECFactory(nidaq.DAQmxGetTaskChannels)
66
- DAQmxGetBufInputBufSize = ECFactory(nidaq.DAQmxGetBufInputBufSize)
67
- DAQmxReadAnalogF64 = ECFactory(nidaq.DAQmxReadAnalogF64)
68
- DAQmxIsTaskDone = ECFactory(nidaq.DAQmxIsTaskDone)
69
- DAQmxGetSampQuantSampMode = ECFactory(nidaq.DAQmxGetSampQuantSampMode)
70
- DAQmxGetSampQuantSampPerChan = ECFactory(nidaq.DAQmxGetSampQuantSampPerChan)
71
- DAQmxSetSampQuantSampPerChan = ECFactory(nidaq.DAQmxSetSampQuantSampPerChan)
72
- DAQmxGetSampClkRate = ECFactory(nidaq.DAQmxGetSampClkRate)
73
- DAQmxSetSampClkRate = ECFactory(nidaq.DAQmxSetSampClkRate)
74
-
75
- class nidaq_import( time_data_import ):
76
- """
77
- This class provides an interface to import of measurement data
78
- using NI-DAQmx.
79
- """
80
-
81
- #: Name of the NI task to use
82
- taskname = Str(
83
- desc="name of the NI task to use for the measurement")
84
-
85
- #: Sampling frequency, defaults to 48000.
86
- sample_freq = Float(48000.0,
87
- desc="sampling frequency")
88
-
89
- #: Number of time data samples, defaults to 48000.
90
- numsamples = Long(48000,
91
- desc="number of samples")
92
-
93
- #: Number of channels; is set automatically.
94
- numchannels = Long(0,
95
- desc="number of channels in the task")
96
-
97
- #: Number of devices; is set automatically.
98
- numdevices = Long(0,
99
- desc="number of devices in the task")
100
-
101
- #: Name of channels; is set automatically.
102
- namechannels = List(
103
- desc="names of channels in the task")
104
-
105
- #: Name of devices; is set automatically.
106
- namedevices = List(
107
- desc="names of devices in the task")
108
-
109
- #: Name of available and valid tasks.
110
- tasknames = List
111
-
112
- def __init__ ( self, **traits ):
113
- time_data_import.__init__(self, **traits )
114
- taskHandle = TaskHandle(0)
115
- buf_size = 1024
116
- # buf = ctypes.create_string_buffer('\000' * buf_size)
117
- buf = ctypes.create_string_buffer(b'\000' * buf_size)
118
- DAQmxGetSysTasks(ctypes.byref(buf),buf_size)
119
- # tasknamelist = buf.value.split(', ')
120
- tasknamelist = buf.value.split(b', ')
121
-
122
- self.tasknames=[]
123
- for taskname in tasknamelist:
124
- # is task valid ?? try to load
125
- try:
126
- DAQmxLoadTask(taskname,ctypes.byref(taskHandle))
127
- except RuntimeError:
128
- continue
129
- self.tasknames.append(taskname)
130
- DAQmxClearTask(taskHandle)
131
-
132
- def _taskname_changed ( self ):
133
- taskHandle = TaskHandle(0)
134
- buf_size = 1024*4
135
- # buf = ctypes.create_string_buffer('\000' * buf_size)
136
- buf = ctypes.create_string_buffer(b'\000' * buf_size)
137
-
138
- num = uInt32()
139
- fnum = float64()
140
- lnum = uInt64()
141
- try:
142
- DAQmxLoadTask(str.encode(self.taskname),ctypes.byref(taskHandle))
143
- except RuntimeError:
144
- return
145
- DAQmxGetTaskNumChans(taskHandle,ctypes.byref(num))
146
- self.numchannels = num.value
147
- # commented for compatibility with older NIDAQmx
148
- #~ DAQmxGetTaskNumDevices(taskHandle,ctypes.byref(num))
149
- #~ self.numdevices = num.value
150
- DAQmxGetTaskChannels(taskHandle,ctypes.byref(buf),buf_size)
151
- self.namechannels = buf.value.decode().split(', ')
152
- DAQmxGetTaskDevices(taskHandle,ctypes.byref(buf),buf_size)
153
- self.namedevices = buf.value.decode().split(', ')
154
- self.numdevices = len(self.namedevices)
155
- DAQmxGetSampClkRate(taskHandle,ctypes.byref(fnum))
156
- self.sample_freq = fnum.value
157
- DAQmxGetSampQuantSampMode(taskHandle,ctypes.byref(num))
158
- if num.value==DAQmx_Val_FiniteSamps:
159
- DAQmxGetSampQuantSampPerChan(taskHandle,ctypes.byref(lnum))
160
- self.numsamples = lnum.value
161
- DAQmxClearTask(taskHandle)
162
-
163
- def _sample_freq_changed(self,dispatch='ui'):
164
- taskHandle = TaskHandle(0)
165
- fnum = float64()
166
- try:
167
- DAQmxLoadTask(str.encode(self.taskname),ctypes.byref(taskHandle))
168
- except RuntimeError:
169
- return
170
- try:
171
- DAQmxSetSampClkRate(taskHandle,float64(self.sample_freq))
172
- except RuntimeError:
173
- pass
174
- DAQmxGetSampClkRate(taskHandle,ctypes.byref(fnum))
175
- self.sample_freq = fnum.value
176
- DAQmxClearTask(taskHandle)
177
- print(self.sample_freq)
178
-
179
-
180
- def get_data (self, td):
181
- """
182
- Main work is done here: loads data from buffer into
183
- :class:`~acoular.sources.TimeSamples` object `td` and saves also a
184
- '*.h5' file.
185
- """
186
- taskHandle = TaskHandle(0)
187
- read = uInt32()
188
- fnum = float64()
189
- lnum = uInt64()
190
- try:
191
- DAQmxLoadTask(str.encode(self.taskname),ctypes.byref(taskHandle))
192
- if self.numchannels<1:
193
- raise RuntimeError
194
- DAQmxSetSampClkRate(taskHandle,float64(self.sample_freq))
195
- except RuntimeError:
196
- # no valid task
197
- time_data_import.get_data(self,td)
198
- return
199
- #import data
200
- name = td.name
201
- if name=='':
202
- name = datetime.now().isoformat('_').replace(':','-').replace('.','_')
203
- name = path.join(td_dir,name+'.h5')
204
- f5h = tables.open_file(name,mode='w')
205
- ac = f5h.create_earray(f5h.root,'time_data',tables.atom.Float32Atom(),(0,self.numchannels))
206
- ac.set_attr('sample_freq',self.sample_freq)
207
- DAQmxSetSampQuantSampPerChan(taskHandle,uInt64(100000))
208
- DAQmxGetSampQuantSampPerChan(taskHandle,ctypes.byref(lnum))
209
- max_num_samples = lnum.value
210
- print("Puffergroesse: %i" % max_num_samples)
211
- data = numpy.empty((max_num_samples,self.numchannels),dtype=numpy.float64)
212
- DAQmxStartTask(taskHandle)
213
- count = 0
214
- numsamples = self.numsamples
215
- while count<numsamples:
216
- #~ DAQmxReadAnalogF64(taskHandle,-1,float64(10.0),
217
- #~ DAQmx_Val_GroupByScanNumber,data.ctypes.data,
218
- #~ data.size,ctypes.byref(read),None)
219
- DAQmxReadAnalogF64(taskHandle,1024,float64(10.0),
220
- DAQmx_Val_GroupByScanNumber,data.ctypes.data,
221
- data.size,ctypes.byref(read),None)
222
- ac.append(numpy.array(data[:min(read.value,numsamples-count)],dtype=numpy.float32))
223
- count+=read.value
224
- #~ if read.value>200:
225
- #~ print count, read.value
226
- DAQmxStopTask(taskHandle)
227
- DAQmxClearTask(taskHandle)
228
- f5h.close()
229
- td.name = name
230
- td.load_data()
231
-
232
- def get_single (self):
233
- """
234
- Gets one block of data
235
- """
236
- taskHandle = TaskHandle(0)
237
- read = uInt32()
238
- fnum = float64()
239
- lnum = uInt64()
240
- try:
241
- DAQmxLoadTask(str.encode(self.taskname),ctypes.byref(taskHandle))
242
- if self.numchannels<1:
243
- raise RuntimeError
244
- except RuntimeError:
245
- # no valid task
246
- time_data_import.get_data(self,td)
247
- return
248
- #import data
249
- ac = numpy.empty((self.numsamples,self.numchannels),numpy.float32)
250
- DAQmxGetSampQuantSampPerChan(taskHandle,ctypes.byref(lnum))
251
- max_num_samples = lnum.value
252
- data = numpy.empty((max_num_samples,self.numchannels),dtype=numpy.float64)
253
- DAQmxStartTask(taskHandle)
254
- count = 0
255
- numsamples = self.numsamples
256
- while count<numsamples:
257
- DAQmxReadAnalogF64(taskHandle,-1,float64(10.0),
258
- DAQmx_Val_GroupByScanNumber,data.ctypes.data,
259
- data.size,ctypes.byref(read),None)
260
- anz = min(read.value,numsamples-count)
261
- ac[count:count+anz]=numpy.array(data[:anz],dtype=numpy.float32)
262
- count+=read.value
263
- DAQmxStopTask(taskHandle)
264
- DAQmxClearTask(taskHandle)
265
- return ac
266
-
267
-
268
- if __name__=='__main__':
269
- x=nidaq_import()
270
- x.taskname = 'test1'
271
- x.configure_traits()
272
- td=TimeSamples()
273
- x.get_data(td)