acoular 24.5__py3-none-any.whl → 24.10__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- acoular/__init__.py +17 -11
- acoular/base.py +312 -0
- acoular/configuration.py +23 -16
- acoular/demo/acoular_demo.py +28 -35
- acoular/environments.py +20 -15
- acoular/fastFuncs.py +40 -40
- acoular/fbeamform.py +1100 -1130
- acoular/fprocess.py +368 -0
- acoular/grids.py +36 -22
- acoular/h5cache.py +34 -34
- acoular/h5files.py +13 -13
- acoular/internal.py +3 -3
- acoular/process.py +464 -0
- acoular/sdinput.py +24 -4
- acoular/signals.py +20 -6
- acoular/sources.py +140 -56
- acoular/spectra.py +34 -53
- acoular/tbeamform.py +264 -142
- acoular/tfastfuncs.py +17 -18
- acoular/tools/__init__.py +2 -0
- acoular/tools/aiaa.py +7 -8
- acoular/tools/helpers.py +2 -2
- acoular/tools/metrics.py +1 -1
- acoular/tools/utils.py +210 -0
- acoular/tprocess.py +168 -532
- acoular/traitsviews.py +5 -3
- acoular/version.py +2 -2
- {acoular-24.5.dist-info → acoular-24.10.dist-info}/METADATA +49 -8
- acoular-24.10.dist-info/RECORD +54 -0
- {acoular-24.5.dist-info → acoular-24.10.dist-info}/WHEEL +1 -1
- acoular-24.5.dist-info/RECORD +0 -50
- {acoular-24.5.dist-info → acoular-24.10.dist-info}/licenses/AUTHORS.rst +0 -0
- {acoular-24.5.dist-info → acoular-24.10.dist-info}/licenses/LICENSE +0 -0
acoular/h5files.py
CHANGED
|
@@ -33,7 +33,7 @@ class H5FileBase:
|
|
|
33
33
|
class H5CacheFileBase:
|
|
34
34
|
"""Base class for File objects that handle writing and reading of .h5 cache files."""
|
|
35
35
|
|
|
36
|
-
|
|
36
|
+
compression_filter = None
|
|
37
37
|
|
|
38
38
|
def is_cached(self, nodename, group=None):
|
|
39
39
|
pass
|
|
@@ -72,7 +72,7 @@ if config.have_tables:
|
|
|
72
72
|
node.set_attr(attrname, value)
|
|
73
73
|
|
|
74
74
|
def get_node_attribute(self, node, attrname):
|
|
75
|
-
return node._v_attrs[attrname]
|
|
75
|
+
return node._v_attrs[attrname] # noqa: SLF001
|
|
76
76
|
|
|
77
77
|
def append_data(self, node, data):
|
|
78
78
|
node.append(data)
|
|
@@ -93,10 +93,10 @@ if config.have_tables:
|
|
|
93
93
|
"""Recursively convert an HDF5 node to a dictionary."""
|
|
94
94
|
node = self.get_node(nodename)
|
|
95
95
|
# initialize node-dict with node's own attributes
|
|
96
|
-
result = {attr: node._v_attrs[attr] for attr in node._v_attrs._f_list()}
|
|
96
|
+
result = {attr: node._v_attrs[attr] for attr in node._v_attrs._f_list()} # noqa: SLF001
|
|
97
97
|
if isinstance(node, tables.Group):
|
|
98
98
|
# if node is a group, recursively add its children
|
|
99
|
-
for childname in node._v_children:
|
|
99
|
+
for childname in node._v_children: # noqa: SLF001
|
|
100
100
|
result[childname] = self.node_to_dict(f'{nodename}/{childname}')
|
|
101
101
|
elif isinstance(node, tables.Leaf):
|
|
102
102
|
# if node contains only data, add it
|
|
@@ -106,7 +106,7 @@ if config.have_tables:
|
|
|
106
106
|
return result
|
|
107
107
|
|
|
108
108
|
class H5CacheFileTables(H5FileTables, H5CacheFileBase):
|
|
109
|
-
|
|
109
|
+
compression_filter = tables.Filters(complevel=5, complib='blosc')
|
|
110
110
|
|
|
111
111
|
def is_cached(self, nodename, group=None):
|
|
112
112
|
if not group:
|
|
@@ -119,7 +119,7 @@ if config.have_tables:
|
|
|
119
119
|
if not group:
|
|
120
120
|
group = self.root
|
|
121
121
|
atom = precision_to_atom[precision]
|
|
122
|
-
self.create_carray(group, nodename, atom, shape, filters=self.
|
|
122
|
+
self.create_carray(group, nodename, atom, shape, filters=self.compression_filter)
|
|
123
123
|
|
|
124
124
|
|
|
125
125
|
if config.have_h5py:
|
|
@@ -149,10 +149,10 @@ if config.have_h5py:
|
|
|
149
149
|
return node.attrs[attrname]
|
|
150
150
|
|
|
151
151
|
def append_data(self, node, data):
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
node.resize(
|
|
155
|
-
node[
|
|
152
|
+
old_shape = node.shape
|
|
153
|
+
new_shape = (old_shape[0] + data.shape[0], data.shape[1])
|
|
154
|
+
node.resize(new_shape)
|
|
155
|
+
node[old_shape[0] : new_shape[0], :] = data
|
|
156
156
|
|
|
157
157
|
def remove_data(self, nodename, group=None):
|
|
158
158
|
in_file_path = self._get_in_file_path(nodename, group)
|
|
@@ -184,8 +184,8 @@ if config.have_h5py:
|
|
|
184
184
|
return result
|
|
185
185
|
|
|
186
186
|
class H5CacheFileH5py(H5CacheFileBase, H5FileH5py):
|
|
187
|
-
|
|
188
|
-
#
|
|
187
|
+
compression_filter = 'lzf'
|
|
188
|
+
# compression_filter = 'blosc' # unavailable...
|
|
189
189
|
|
|
190
190
|
def is_cached(self, nodename, group=None):
|
|
191
191
|
if not group:
|
|
@@ -200,7 +200,7 @@ if config.have_h5py:
|
|
|
200
200
|
in_file_path,
|
|
201
201
|
dtype=precision,
|
|
202
202
|
shape=shape,
|
|
203
|
-
compression=self.
|
|
203
|
+
compression=self.compression_filter,
|
|
204
204
|
chunks=True,
|
|
205
205
|
)
|
|
206
206
|
|
acoular/internal.py
CHANGED
|
@@ -13,13 +13,13 @@ def digest(obj, name='digest'):
|
|
|
13
13
|
for i in do_.split('.'):
|
|
14
14
|
vobj = list(vobj.trait_get(i.rstrip('[]')).values())[0]
|
|
15
15
|
str_.append(str(vobj).encode('UTF-8'))
|
|
16
|
-
except:
|
|
16
|
+
except: # noqa: E722
|
|
17
17
|
pass
|
|
18
18
|
return '_' + md5(b''.join(str_)).hexdigest()
|
|
19
19
|
|
|
20
20
|
|
|
21
|
-
def ldigest(
|
|
21
|
+
def ldigest(obj_list):
|
|
22
22
|
str_ = []
|
|
23
|
-
for i in
|
|
23
|
+
for i in obj_list:
|
|
24
24
|
str_.append(str(i.digest).encode('UTF-8'))
|
|
25
25
|
return '_' + md5(b''.join(str_)).hexdigest()
|
acoular/process.py
ADDED
|
@@ -0,0 +1,464 @@
|
|
|
1
|
+
# ------------------------------------------------------------------------------
|
|
2
|
+
# Copyright (c) Acoular Development Team.
|
|
3
|
+
# ------------------------------------------------------------------------------
|
|
4
|
+
"""Implements general purpose blockwise processing methods independent of the domain (time or frequency).
|
|
5
|
+
|
|
6
|
+
.. autosummary::
|
|
7
|
+
:toctree: generated/
|
|
8
|
+
|
|
9
|
+
Average
|
|
10
|
+
Cache
|
|
11
|
+
SampleSplitter
|
|
12
|
+
TimeAverage
|
|
13
|
+
TimeCache
|
|
14
|
+
"""
|
|
15
|
+
|
|
16
|
+
import threading
|
|
17
|
+
from collections import deque
|
|
18
|
+
from inspect import currentframe
|
|
19
|
+
from warnings import warn
|
|
20
|
+
|
|
21
|
+
from traits.api import Bool, Dict, Instance, Int, Property, Trait, cached_property, on_trait_change
|
|
22
|
+
|
|
23
|
+
from .base import Generator, InOut
|
|
24
|
+
from .configuration import config
|
|
25
|
+
from .h5cache import H5cache
|
|
26
|
+
from .h5files import H5CacheFileBase
|
|
27
|
+
from .internal import digest
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class LockedGenerator:
|
|
31
|
+
"""Creates a Thread Safe Iterator.
|
|
32
|
+
Takes an iterator/generator and makes it thread-safe by
|
|
33
|
+
serializing call to the `next` method of given iterator/generator.
|
|
34
|
+
"""
|
|
35
|
+
|
|
36
|
+
def __init__(self, it):
|
|
37
|
+
self.it = it
|
|
38
|
+
self.lock = threading.Lock()
|
|
39
|
+
|
|
40
|
+
def __next__(self):
|
|
41
|
+
with self.lock:
|
|
42
|
+
return self.it.__next__()
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class Average(InOut):
|
|
46
|
+
"""Calculates the average across consecutive time samples or frequency snapshots.
|
|
47
|
+
|
|
48
|
+
The average operation is performed differently depending on the source type.
|
|
49
|
+
If the source is a time domain source (e.g. derived from :class:`~acoular.base.SamplesGenerator`),
|
|
50
|
+
the average is calculated over a certain number of time samples given by :attr:`naverage`.
|
|
51
|
+
If the source is a frequency domain source (e.g. derived from :class:`~acoular.base.SpectraGenerator`),
|
|
52
|
+
the average is calculated over a certain number of snapshots given by :attr:`naverage`.
|
|
53
|
+
|
|
54
|
+
Examples
|
|
55
|
+
--------
|
|
56
|
+
For estimate the RMS of a white noise (time-domain) signal, the average of the squared signal can be calculated:
|
|
57
|
+
|
|
58
|
+
>>> import acoular as ac
|
|
59
|
+
>>> import numpy as np
|
|
60
|
+
>>>
|
|
61
|
+
>>> signal = ac.WNoiseGenerator(sample_freq=51200, numsamples=51200, rms=2.0).signal()
|
|
62
|
+
>>> ts = ac.TimeSamples(data=signal[:, np.newaxis], sample_freq=51200)
|
|
63
|
+
>>> tp = ac.TimePower(source=ts)
|
|
64
|
+
>>> avg = ac.Average(source=tp, naverage=512)
|
|
65
|
+
>>> mean_squared_value = next(avg.result(num=1))
|
|
66
|
+
>>> rms = np.sqrt(mean_squared_value)[0, 0]
|
|
67
|
+
>>> print(rms)
|
|
68
|
+
1.9985200025816718
|
|
69
|
+
|
|
70
|
+
Here, each evaluation of the generator created by the :meth:`result` method of the :class:`Average` object
|
|
71
|
+
via the :meth:`next` function returns :code:`num=1` average across a snapshot of 512 samples.
|
|
72
|
+
|
|
73
|
+
If the source is a frequency domain source, the average is calculated over a certain number of
|
|
74
|
+
snapshots, defined by :attr:`naverage`.
|
|
75
|
+
|
|
76
|
+
>>> fft = ac.RFFT(source=ts, block_size=64)
|
|
77
|
+
>>> ps = ac.AutoPowerSpectra(source=fft)
|
|
78
|
+
>>> avg = ac.Average(source=ps, naverage=16)
|
|
79
|
+
>>> mean_power = next(avg.result(num=1))
|
|
80
|
+
>>> print(np.sqrt(mean_power.sum()))
|
|
81
|
+
2.0024960894399295
|
|
82
|
+
|
|
83
|
+
Here, the generator created by the :meth:`result` method of the :class:`Average` object
|
|
84
|
+
returns the average across 16 snapshots in the frequency domain.
|
|
85
|
+
|
|
86
|
+
"""
|
|
87
|
+
|
|
88
|
+
#: Number of samples (time domain source) or snapshots (frequency domain source)
|
|
89
|
+
#: to average over, defaults to 64.
|
|
90
|
+
naverage = Int(64, desc='number of samples to average over')
|
|
91
|
+
|
|
92
|
+
#: Sampling frequency of the output signal, is set automatically.
|
|
93
|
+
sample_freq = Property(depends_on='source.sample_freq, naverage')
|
|
94
|
+
|
|
95
|
+
#: Number of samples (time domain) or snapshots (frequency domain) of the output signal.
|
|
96
|
+
#: Is set automatically.
|
|
97
|
+
numsamples = Property(depends_on='source.numsamples, naverage')
|
|
98
|
+
|
|
99
|
+
# internal identifier
|
|
100
|
+
digest = Property(depends_on=['source.digest', '__class__', 'naverage'])
|
|
101
|
+
|
|
102
|
+
@cached_property
|
|
103
|
+
def _get_digest(self):
|
|
104
|
+
return digest(self)
|
|
105
|
+
|
|
106
|
+
@cached_property
|
|
107
|
+
def _get_sample_freq(self):
|
|
108
|
+
if self.source:
|
|
109
|
+
return 1.0 * self.source.sample_freq / self.naverage
|
|
110
|
+
return None
|
|
111
|
+
|
|
112
|
+
@cached_property
|
|
113
|
+
def _get_numsamples(self):
|
|
114
|
+
if self.source:
|
|
115
|
+
return self.source.numsamples / self.naverage
|
|
116
|
+
return None
|
|
117
|
+
|
|
118
|
+
def result(self, num):
|
|
119
|
+
"""Python generator that yields the output block-wise.
|
|
120
|
+
|
|
121
|
+
Parameters
|
|
122
|
+
----------
|
|
123
|
+
num : integer
|
|
124
|
+
This parameter defines the size of the blocks to be yielded
|
|
125
|
+
(i.e. the number of samples per block).
|
|
126
|
+
|
|
127
|
+
Returns
|
|
128
|
+
-------
|
|
129
|
+
Average of the output of source.
|
|
130
|
+
Yields samples in blocks of shape (num, numchannels).
|
|
131
|
+
The last block may be shorter than num.
|
|
132
|
+
|
|
133
|
+
"""
|
|
134
|
+
nav = self.naverage
|
|
135
|
+
for temp in self.source.result(num * nav):
|
|
136
|
+
ns, nc = temp.shape
|
|
137
|
+
nso = int(ns / nav)
|
|
138
|
+
if nso > 0:
|
|
139
|
+
yield temp[: nso * nav].reshape((nso, -1, nc)).mean(axis=1)
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
class Cache(InOut):
|
|
143
|
+
"""Caches source output in cache file.
|
|
144
|
+
|
|
145
|
+
This class is used to cache the output of a :class:`acoular.base.Generator` derived source
|
|
146
|
+
object in a cache file to circumvent time-consuming re-calculation.
|
|
147
|
+
The cache file is created in the Acoular cache directory.
|
|
148
|
+
|
|
149
|
+
Examples
|
|
150
|
+
--------
|
|
151
|
+
>>> import acoular as ac
|
|
152
|
+
>>> import numpy as np
|
|
153
|
+
>>>
|
|
154
|
+
>>> ac.config.h5library = 'tables'
|
|
155
|
+
>>> data = np.random.rand(1024, 1)
|
|
156
|
+
>>> ts = ac.TimeSamples(data=data, sample_freq=51200)
|
|
157
|
+
>>> fft = ac.RFFT(source=ts, block_size=1024)
|
|
158
|
+
>>> cache = ac.Cache(source=fft) # cache the output of the FFT in cache file
|
|
159
|
+
>>> for block in cache.result(num=1): # read the cached data block-wise
|
|
160
|
+
... print(block.shape)
|
|
161
|
+
[('_cache.h5', 1)]
|
|
162
|
+
(1, 513)
|
|
163
|
+
|
|
164
|
+
The caching behaviour can be controlled by the :class:`~acoular.configuration.Config` instance
|
|
165
|
+
via the :attr:`~acoular.configuration.Config.global_caching` attribute.
|
|
166
|
+
To turn off caching, set :attr:`~acoular.configuration.Config.global_caching` to 'none' before
|
|
167
|
+
running the code. The cache file directory can be obtained (and set) via the
|
|
168
|
+
:attr:`~acoular.configuration.Config.cache_dir`
|
|
169
|
+
|
|
170
|
+
>>> ac.config.global_caching = 'none'
|
|
171
|
+
|
|
172
|
+
"""
|
|
173
|
+
|
|
174
|
+
# basename for cache
|
|
175
|
+
basename = Property(depends_on='digest')
|
|
176
|
+
|
|
177
|
+
# hdf5 cache file
|
|
178
|
+
h5f = Instance(H5CacheFileBase, transient=True)
|
|
179
|
+
|
|
180
|
+
# internal identifier
|
|
181
|
+
digest = Property(depends_on=['source.digest', '__class__'])
|
|
182
|
+
|
|
183
|
+
@cached_property
|
|
184
|
+
def _get_digest(self):
|
|
185
|
+
return digest(self)
|
|
186
|
+
|
|
187
|
+
@cached_property
|
|
188
|
+
def _get_basename(self):
|
|
189
|
+
obj = self.source # start with source
|
|
190
|
+
basename = 'void' # if no file source is found
|
|
191
|
+
while obj:
|
|
192
|
+
if 'basename' in obj.all_trait_names(): # at original source?
|
|
193
|
+
basename = obj.basename # get the name
|
|
194
|
+
break
|
|
195
|
+
try:
|
|
196
|
+
obj = obj.source # traverse down until original data source
|
|
197
|
+
except AttributeError:
|
|
198
|
+
obj = None
|
|
199
|
+
return basename
|
|
200
|
+
|
|
201
|
+
def _pass_data(self, num):
|
|
202
|
+
yield from self.source.result(num)
|
|
203
|
+
|
|
204
|
+
def _write_data_to_cache(self, num):
|
|
205
|
+
nodename = 'tc_' + self.digest
|
|
206
|
+
for i, data in enumerate(self.source.result(num)):
|
|
207
|
+
if i == 0:
|
|
208
|
+
self.h5f.create_extendable_array(nodename, (0, data.shape[1]), data.dtype.name)
|
|
209
|
+
ac = self.h5f.get_data_by_reference(nodename)
|
|
210
|
+
self.h5f.set_node_attribute(ac, 'sample_freq', self.sample_freq)
|
|
211
|
+
self.h5f.set_node_attribute(ac, 'complete', False)
|
|
212
|
+
self.h5f.append_data(ac, data)
|
|
213
|
+
self.h5f.flush()
|
|
214
|
+
yield data
|
|
215
|
+
self.h5f.set_node_attribute(ac, 'complete', True)
|
|
216
|
+
|
|
217
|
+
def _get_data_from_cache(self, num):
|
|
218
|
+
nodename = 'tc_' + self.digest
|
|
219
|
+
ac = self.h5f.get_data_by_reference(nodename)
|
|
220
|
+
i = 0
|
|
221
|
+
while i < ac.shape[0]:
|
|
222
|
+
yield ac[i : i + num]
|
|
223
|
+
i += num
|
|
224
|
+
|
|
225
|
+
def _get_data_from_incomplete_cache(self, num):
|
|
226
|
+
nodename = 'tc_' + self.digest
|
|
227
|
+
ac = self.h5f.get_data_by_reference(nodename)
|
|
228
|
+
i = 0
|
|
229
|
+
nblocks = 0
|
|
230
|
+
while i + num <= ac.shape[0]:
|
|
231
|
+
yield ac[i : i + num]
|
|
232
|
+
nblocks += 1
|
|
233
|
+
i += num
|
|
234
|
+
self.h5f.remove_data(nodename)
|
|
235
|
+
for j, data in enumerate(self.source.result(num)):
|
|
236
|
+
if j == 0:
|
|
237
|
+
self.h5f.create_extendable_array(nodename, (0, data.shape[1]), data.dtype.name)
|
|
238
|
+
ac = self.h5f.get_data_by_reference(nodename)
|
|
239
|
+
self.h5f.set_node_attribute(ac, 'sample_freq', self.sample_freq)
|
|
240
|
+
self.h5f.set_node_attribute(ac, 'complete', False)
|
|
241
|
+
self.h5f.append_data(ac, data)
|
|
242
|
+
if j >= nblocks:
|
|
243
|
+
self.h5f.flush()
|
|
244
|
+
yield data
|
|
245
|
+
self.h5f.set_node_attribute(ac, 'complete', True)
|
|
246
|
+
|
|
247
|
+
# result generator: delivers input, possibly from cache
|
|
248
|
+
def result(self, num):
|
|
249
|
+
"""Python generator that yields the output from cache block-wise.
|
|
250
|
+
|
|
251
|
+
Parameters
|
|
252
|
+
----------
|
|
253
|
+
num : integer
|
|
254
|
+
This parameter defines the size of the blocks to be yielded
|
|
255
|
+
(i.e. the number of samples per block).
|
|
256
|
+
|
|
257
|
+
Returns
|
|
258
|
+
-------
|
|
259
|
+
Samples in blocks of shape (num, numchannels).
|
|
260
|
+
The last block may be shorter than num.
|
|
261
|
+
Echos the source output, but reads it from cache
|
|
262
|
+
when available and prevents unnecassary recalculation.
|
|
263
|
+
|
|
264
|
+
"""
|
|
265
|
+
if config.global_caching == 'none':
|
|
266
|
+
generator = self._pass_data
|
|
267
|
+
else:
|
|
268
|
+
nodename = 'tc_' + self.digest
|
|
269
|
+
H5cache.get_cache_file(self, self.basename)
|
|
270
|
+
if not self.h5f:
|
|
271
|
+
generator = self._pass_data
|
|
272
|
+
elif self.h5f.is_cached(nodename):
|
|
273
|
+
generator = self._get_data_from_cache
|
|
274
|
+
if config.global_caching == 'overwrite':
|
|
275
|
+
self.h5f.remove_data(nodename)
|
|
276
|
+
generator = self._write_data_to_cache
|
|
277
|
+
elif not self.h5f.get_data_by_reference(nodename).attrs.__contains__('complete'):
|
|
278
|
+
if config.global_caching == 'readonly':
|
|
279
|
+
generator = self._pass_data
|
|
280
|
+
else:
|
|
281
|
+
generator = self._get_data_from_incomplete_cache
|
|
282
|
+
elif not self.h5f.get_data_by_reference(nodename).attrs['complete']:
|
|
283
|
+
if config.global_caching == 'readonly':
|
|
284
|
+
warn(
|
|
285
|
+
"Cache file is incomplete for nodename %s. With config.global_caching='readonly', the cache file will not be used!"
|
|
286
|
+
% str(nodename),
|
|
287
|
+
Warning,
|
|
288
|
+
stacklevel=1,
|
|
289
|
+
)
|
|
290
|
+
generator = self._pass_data
|
|
291
|
+
else:
|
|
292
|
+
generator = self._get_data_from_incomplete_cache
|
|
293
|
+
elif not self.h5f.is_cached(nodename):
|
|
294
|
+
generator = self._write_data_to_cache
|
|
295
|
+
if config.global_caching == 'readonly':
|
|
296
|
+
generator = self._pass_data
|
|
297
|
+
yield from generator(num)
|
|
298
|
+
|
|
299
|
+
|
|
300
|
+
class SampleSplitter(InOut):
|
|
301
|
+
"""Distributes data blocks from source to several following objects.
|
|
302
|
+
A separate block buffer is created for each registered object in
|
|
303
|
+
(:attr:`block_buffer`) .
|
|
304
|
+
"""
|
|
305
|
+
|
|
306
|
+
#: dictionary with block buffers (dict values) of registered objects (dict
|
|
307
|
+
#: keys).
|
|
308
|
+
block_buffer = Dict(key_trait=Instance(Generator))
|
|
309
|
+
|
|
310
|
+
#: max elements/blocks in block buffers.
|
|
311
|
+
buffer_size = Int(100)
|
|
312
|
+
|
|
313
|
+
#: defines behaviour in case of block_buffer overflow. Can be set individually
|
|
314
|
+
#: for each registered object.
|
|
315
|
+
#:
|
|
316
|
+
#: * 'error': an IOError is thrown by the class
|
|
317
|
+
#: * 'warning': a warning is displayed. Possibly leads to lost blocks of data
|
|
318
|
+
#: * 'none': nothing happens. Possibly leads to lost blocks of data
|
|
319
|
+
buffer_overflow_treatment = Dict(
|
|
320
|
+
key_trait=Instance(Generator),
|
|
321
|
+
value_trait=Trait('error', 'warning', 'none'),
|
|
322
|
+
desc='defines buffer overflow behaviour.',
|
|
323
|
+
)
|
|
324
|
+
|
|
325
|
+
# shadow trait to monitor if source deliver samples or is empty
|
|
326
|
+
_source_generator_exist = Bool(False)
|
|
327
|
+
|
|
328
|
+
# shadow trait to monitor if buffer of objects with overflow treatment = 'error'
|
|
329
|
+
# or warning is overfilled. Error will be raised in all threads.
|
|
330
|
+
_buffer_overflow = Bool(False)
|
|
331
|
+
|
|
332
|
+
# Helper Trait holds source generator
|
|
333
|
+
_source_generator = Trait()
|
|
334
|
+
|
|
335
|
+
def _create_block_buffer(self, obj):
|
|
336
|
+
self.block_buffer[obj] = deque([], maxlen=self.buffer_size)
|
|
337
|
+
|
|
338
|
+
def _create_buffer_overflow_treatment(self, obj):
|
|
339
|
+
self.buffer_overflow_treatment[obj] = 'error'
|
|
340
|
+
|
|
341
|
+
def _clear_block_buffer(self, obj):
|
|
342
|
+
self.block_buffer[obj].clear()
|
|
343
|
+
|
|
344
|
+
def _remove_block_buffer(self, obj):
|
|
345
|
+
del self.block_buffer[obj]
|
|
346
|
+
|
|
347
|
+
def _remove_buffer_overflow_treatment(self, obj):
|
|
348
|
+
del self.buffer_overflow_treatment[obj]
|
|
349
|
+
|
|
350
|
+
def _assert_obj_registered(self, obj):
|
|
351
|
+
if obj not in self.block_buffer:
|
|
352
|
+
raise OSError('calling object %s is not registered.' % obj)
|
|
353
|
+
|
|
354
|
+
def _get_objs_to_inspect(self):
|
|
355
|
+
return [obj for obj in self.buffer_overflow_treatment if self.buffer_overflow_treatment[obj] != 'none']
|
|
356
|
+
|
|
357
|
+
def _inspect_buffer_levels(self, inspect_objs):
|
|
358
|
+
for obj in inspect_objs:
|
|
359
|
+
if len(self.block_buffer[obj]) == self.buffer_size:
|
|
360
|
+
if self.buffer_overflow_treatment[obj] == 'error':
|
|
361
|
+
self._buffer_overflow = True
|
|
362
|
+
elif self.buffer_overflow_treatment[obj] == 'warning':
|
|
363
|
+
warn('overfilled buffer for object: %s data will get lost' % obj, UserWarning, stacklevel=1)
|
|
364
|
+
|
|
365
|
+
def _create_source_generator(self, num):
|
|
366
|
+
for obj in self.block_buffer:
|
|
367
|
+
self._clear_block_buffer(obj)
|
|
368
|
+
self._buffer_overflow = False # reset overflow bool
|
|
369
|
+
self._source_generator = LockedGenerator(self.source.result(num))
|
|
370
|
+
self._source_generator_exist = True # indicates full generator
|
|
371
|
+
|
|
372
|
+
def _fill_block_buffers(self):
|
|
373
|
+
next_block = next(self._source_generator)
|
|
374
|
+
[self.block_buffer[obj].appendleft(next_block) for obj in self.block_buffer]
|
|
375
|
+
|
|
376
|
+
@on_trait_change('buffer_size')
|
|
377
|
+
def _change_buffer_size(self): #
|
|
378
|
+
for obj in self.block_buffer:
|
|
379
|
+
self._remove_block_buffer(obj)
|
|
380
|
+
self._create_block_buffer(obj)
|
|
381
|
+
|
|
382
|
+
def register_object(self, *objects_to_register):
|
|
383
|
+
"""Function that can be used to register objects that receive blocks from this class."""
|
|
384
|
+
for obj in objects_to_register:
|
|
385
|
+
if obj not in self.block_buffer:
|
|
386
|
+
self._create_block_buffer(obj)
|
|
387
|
+
self._create_buffer_overflow_treatment(obj)
|
|
388
|
+
|
|
389
|
+
def remove_object(self, *objects_to_remove):
|
|
390
|
+
"""Function that can be used to remove registered objects."""
|
|
391
|
+
for obj in objects_to_remove:
|
|
392
|
+
self._remove_block_buffer(obj)
|
|
393
|
+
self._remove_buffer_overflow_treatment(obj)
|
|
394
|
+
|
|
395
|
+
def result(self, num):
|
|
396
|
+
"""Python generator that yields the output block-wise from block-buffer.
|
|
397
|
+
|
|
398
|
+
Parameters
|
|
399
|
+
----------
|
|
400
|
+
num : integer
|
|
401
|
+
This parameter defines the size of the blocks to be yielded
|
|
402
|
+
(i.e. the number of samples per block).
|
|
403
|
+
|
|
404
|
+
Returns
|
|
405
|
+
-------
|
|
406
|
+
Samples in blocks of shape (num, numchannels).
|
|
407
|
+
Delivers a block of samples to the calling object.
|
|
408
|
+
The last block may be shorter than num.
|
|
409
|
+
|
|
410
|
+
"""
|
|
411
|
+
calling_obj = currentframe().f_back.f_locals['self']
|
|
412
|
+
self._assert_obj_registered(calling_obj)
|
|
413
|
+
objs_to_inspect = self._get_objs_to_inspect()
|
|
414
|
+
|
|
415
|
+
if not self._source_generator_exist:
|
|
416
|
+
self._create_source_generator(num)
|
|
417
|
+
|
|
418
|
+
while not self._buffer_overflow:
|
|
419
|
+
if self.block_buffer[calling_obj]:
|
|
420
|
+
yield self.block_buffer[calling_obj].pop()
|
|
421
|
+
else:
|
|
422
|
+
self._inspect_buffer_levels(objs_to_inspect)
|
|
423
|
+
try:
|
|
424
|
+
self._fill_block_buffers()
|
|
425
|
+
except StopIteration:
|
|
426
|
+
self._source_generator_exist = False
|
|
427
|
+
return
|
|
428
|
+
else:
|
|
429
|
+
msg = 'Maximum size of block buffer is reached!'
|
|
430
|
+
raise OSError(msg)
|
|
431
|
+
|
|
432
|
+
|
|
433
|
+
class TimeAverage(Average):
|
|
434
|
+
"""Calculates average of the signal (Alias for :class:`acoular.process.Average`).
|
|
435
|
+
|
|
436
|
+
.. deprecated:: 24.10
|
|
437
|
+
Using :class:`~acoular.process.TimeAverage` is deprecated and will be removed in Acoular
|
|
438
|
+
version 25.07. Use :class:`~acoular.process.Average` instead.
|
|
439
|
+
"""
|
|
440
|
+
|
|
441
|
+
def __init__(self, *args, **kwargs):
|
|
442
|
+
super().__init__(*args, **kwargs)
|
|
443
|
+
warn(
|
|
444
|
+
'Using TimeAverage is deprecated and will be removed in Acoular version 25.07. Use Average instead.',
|
|
445
|
+
DeprecationWarning,
|
|
446
|
+
stacklevel=2,
|
|
447
|
+
)
|
|
448
|
+
|
|
449
|
+
|
|
450
|
+
class TimeCache(Cache):
|
|
451
|
+
"""Caches source signals in cache file (Alias for :class:`acoular.process.Cache`).
|
|
452
|
+
|
|
453
|
+
.. deprecated:: 24.10
|
|
454
|
+
Using :class:`~acoular.process.TimeCache` is deprecated and will be removed in Acoular
|
|
455
|
+
version 25.07. Use :class:`~acoular.process.Cache` instead.
|
|
456
|
+
"""
|
|
457
|
+
|
|
458
|
+
def __init__(self, *args, **kwargs):
|
|
459
|
+
super().__init__(*args, **kwargs)
|
|
460
|
+
warn(
|
|
461
|
+
'Using TimeCache is deprecated and will be removed in Acoular version 25.07. Use Cache instead.',
|
|
462
|
+
DeprecationWarning,
|
|
463
|
+
stacklevel=2,
|
|
464
|
+
)
|
acoular/sdinput.py
CHANGED
|
@@ -9,11 +9,14 @@
|
|
|
9
9
|
SoundDeviceSamplesGenerator
|
|
10
10
|
"""
|
|
11
11
|
|
|
12
|
-
import
|
|
13
|
-
from traits.api import Any, Bool, Int, Long, Property, cached_property, observe
|
|
12
|
+
from traits.api import Any, Bool, Float, Int, Long, Property, Trait, cached_property, observe
|
|
14
13
|
|
|
14
|
+
from .base import SamplesGenerator
|
|
15
|
+
from .configuration import config
|
|
15
16
|
from .internal import digest
|
|
16
|
-
|
|
17
|
+
|
|
18
|
+
if config.have_sounddevice:
|
|
19
|
+
import sounddevice as sd
|
|
17
20
|
|
|
18
21
|
|
|
19
22
|
class SoundDeviceSamplesGenerator(SamplesGenerator):
|
|
@@ -24,6 +27,12 @@ class SoundDeviceSamplesGenerator(SamplesGenerator):
|
|
|
24
27
|
:meth:`result`.
|
|
25
28
|
"""
|
|
26
29
|
|
|
30
|
+
def __init__(self, *args, **kwargs):
|
|
31
|
+
super().__init__(*args, **kwargs)
|
|
32
|
+
if config.have_sounddevice is False:
|
|
33
|
+
msg = 'SoundDevice library not found but is required for using the SoundDeviceSamplesGenerator class.'
|
|
34
|
+
raise ImportError(msg)
|
|
35
|
+
|
|
27
36
|
#: input device index, refers to sounddevice list
|
|
28
37
|
device = Int(0, desc='input device index')
|
|
29
38
|
|
|
@@ -40,6 +49,11 @@ class SoundDeviceSamplesGenerator(SamplesGenerator):
|
|
|
40
49
|
#: Sampling frequency of the signal, changes with sinusdevices
|
|
41
50
|
sample_freq = Property(desc='sampling frequency')
|
|
42
51
|
|
|
52
|
+
_sample_freq = Float(default_value=None)
|
|
53
|
+
|
|
54
|
+
#: Datatype (resolution) of the signal, used as `dtype` in a sd `Stream` object
|
|
55
|
+
precision = Trait('float32', 'float16', 'int32', 'int16', 'int8', 'uint8', desc='precision (resolution) of signal')
|
|
56
|
+
|
|
43
57
|
#: Indicates that the sounddevice buffer has overflown
|
|
44
58
|
overflow = Bool(False, desc='Indicates if sounddevice buffer overflow')
|
|
45
59
|
|
|
@@ -62,7 +76,12 @@ class SoundDeviceSamplesGenerator(SamplesGenerator):
|
|
|
62
76
|
self.numchannels = min(self.numchannels, sd.query_devices(self.device)['max_input_channels'])
|
|
63
77
|
|
|
64
78
|
def _get_sample_freq(self):
|
|
65
|
-
|
|
79
|
+
if self._sample_freq is None:
|
|
80
|
+
self._sample_freq = sd.query_devices(self.device)['default_samplerate']
|
|
81
|
+
return self._sample_freq
|
|
82
|
+
|
|
83
|
+
def _set_sample_freq(self, f):
|
|
84
|
+
self._sample_freq = f
|
|
66
85
|
|
|
67
86
|
def device_properties(self):
|
|
68
87
|
"""Returns
|
|
@@ -93,6 +112,7 @@ class SoundDeviceSamplesGenerator(SamplesGenerator):
|
|
|
93
112
|
channels=self.numchannels,
|
|
94
113
|
clip_off=True,
|
|
95
114
|
samplerate=self.sample_freq,
|
|
115
|
+
dtype=self.precision,
|
|
96
116
|
)
|
|
97
117
|
|
|
98
118
|
with stream_obj as stream:
|
acoular/signals.py
CHANGED
|
@@ -23,10 +23,9 @@ from numpy.random import RandomState
|
|
|
23
23
|
from scipy.signal import resample, sosfilt, tf2sos
|
|
24
24
|
from traits.api import Bool, CArray, CLong, Delegate, Float, HasPrivateTraits, Int, Property, Trait, cached_property
|
|
25
25
|
|
|
26
|
-
from .internal import digest
|
|
27
|
-
|
|
28
26
|
# acoular imports
|
|
29
|
-
from .
|
|
27
|
+
from .base import SamplesGenerator
|
|
28
|
+
from .internal import digest
|
|
30
29
|
|
|
31
30
|
|
|
32
31
|
class SignalGenerator(HasPrivateTraits):
|
|
@@ -238,10 +237,11 @@ class SineGenerator(SignalGenerator):
|
|
|
238
237
|
|
|
239
238
|
def _set_rms(self, rms):
|
|
240
239
|
warn(
|
|
240
|
+
'Using rms to set amplitude is deprecated and will be removed in version 25.01. '
|
|
241
241
|
'Up to Acoular 20.02, rms is interpreted as sine amplitude. '
|
|
242
242
|
'This has since been corrected (rms now is 1/sqrt(2) of amplitude). '
|
|
243
243
|
"Use 'amplitude' trait to directly set the ampltiude.",
|
|
244
|
-
|
|
244
|
+
DeprecationWarning,
|
|
245
245
|
stacklevel=2,
|
|
246
246
|
)
|
|
247
247
|
self._amp = rms * 2**0.5
|
|
@@ -279,9 +279,23 @@ class SineGenerator(SignalGenerator):
|
|
|
279
279
|
|
|
280
280
|
|
|
281
281
|
class GenericSignalGenerator(SignalGenerator):
|
|
282
|
-
"""Generate signal from output of :class:`~acoular.
|
|
282
|
+
"""Generate signal from output of :class:`~acoular.base.SamplesGenerator` object.
|
|
283
|
+
|
|
284
|
+
This class can be used to inject arbitrary signals into Acoular processing
|
|
285
|
+
chains. For example, it can be used to read signals from a HDF5 file or create any signal
|
|
286
|
+
by using the :class:`acoular.sources.TimeSamples` class.
|
|
287
|
+
|
|
288
|
+
Example
|
|
289
|
+
-------
|
|
290
|
+
>>> import numpy as np
|
|
291
|
+
>>> from acoular import TimeSamples, GenericSignalGenerator
|
|
292
|
+
>>> data = np.random.rand(1000, 1)
|
|
293
|
+
>>> ts = TimeSamples(data=data, sample_freq=51200)
|
|
294
|
+
>>> sig = GenericSignalGenerator(source=ts)
|
|
295
|
+
|
|
296
|
+
"""
|
|
283
297
|
|
|
284
|
-
#: Data source; :class:`~acoular.
|
|
298
|
+
#: Data source; :class:`~acoular.base.SamplesGenerator` or derived object.
|
|
285
299
|
source = Trait(SamplesGenerator)
|
|
286
300
|
|
|
287
301
|
#: Sampling frequency of output signal, as given by :attr:`source`.
|