acoular 24.10__py3-none-any.whl → 25.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
acoular/process.py CHANGED
@@ -1,7 +1,8 @@
1
1
  # ------------------------------------------------------------------------------
2
2
  # Copyright (c) Acoular Development Team.
3
3
  # ------------------------------------------------------------------------------
4
- """Implements general purpose blockwise processing methods independent of the domain (time or frequency).
4
+ """
5
+ General purpose blockwise processing methods independent of the domain (time or frequency).
5
6
 
6
7
  .. autosummary::
7
8
  :toctree: generated/
@@ -11,6 +12,7 @@
11
12
  SampleSplitter
12
13
  TimeAverage
13
14
  TimeCache
15
+ SamplesBuffer
14
16
  """
15
17
 
16
18
  import threading
@@ -18,19 +20,39 @@ from collections import deque
18
20
  from inspect import currentframe
19
21
  from warnings import warn
20
22
 
21
- from traits.api import Bool, Dict, Instance, Int, Property, Trait, cached_property, on_trait_change
23
+ import numpy as np
24
+ from traits.api import Any, Array, Bool, Dict, Enum, Instance, Int, Property, Union, cached_property, on_trait_change
22
25
 
26
+ # acoular imports
23
27
  from .base import Generator, InOut
24
28
  from .configuration import config
29
+ from .deprecation import deprecated_alias
25
30
  from .h5cache import H5cache
26
31
  from .h5files import H5CacheFileBase
27
32
  from .internal import digest
33
+ from .tools.utils import find_basename
28
34
 
29
35
 
30
36
  class LockedGenerator:
31
- """Creates a Thread Safe Iterator.
32
- Takes an iterator/generator and makes it thread-safe by
33
- serializing call to the `next` method of given iterator/generator.
37
+ """
38
+ Thread-safe wrapper for an iterator or generator.
39
+
40
+ The :class:`LockedGenerator` class ensures that calls to the ``__next__`` method of the
41
+ given iterator or generator are thread-safe, preventing race conditions when accessed by
42
+ multiple threads simultaneously.
43
+
44
+ It achieves thread safety by using a lock to serialize access to the underlying
45
+ iterator or generator.
46
+
47
+ Parameters
48
+ ----------
49
+ it : iterator or generator
50
+ The iterator or generator to be made thread-safe.
51
+
52
+ See Also
53
+ --------
54
+ :class:`acoular.process.SampleSplitter` :
55
+ Distribute data from a source to several following objects in a block-wise manner.
34
56
  """
35
57
 
36
58
  def __init__(self, it):
@@ -38,66 +60,79 @@ class LockedGenerator:
38
60
  self.lock = threading.Lock()
39
61
 
40
62
  def __next__(self):
63
+ """Fetch the next item from the iterator or generator in a thread-safe manner."""
41
64
  with self.lock:
42
65
  return self.it.__next__()
43
66
 
44
67
 
68
+ @deprecated_alias({'naverage': 'num_per_average', 'numsamples': 'num_samples'}, read_only=['numsamples'])
45
69
  class Average(InOut):
46
- """Calculates the average across consecutive time samples or frequency snapshots.
70
+ """
71
+ Calculate the average across consecutive time samples or frequency snapshots.
47
72
 
48
73
  The average operation is performed differently depending on the source type.
49
- If the source is a time domain source (e.g. derived from :class:`~acoular.base.SamplesGenerator`),
50
- the average is calculated over a certain number of time samples given by :attr:`naverage`.
51
- If the source is a frequency domain source (e.g. derived from :class:`~acoular.base.SpectraGenerator`),
52
- the average is calculated over a certain number of snapshots given by :attr:`naverage`.
74
+ If the source is a time domain source (e.g. derived from
75
+ :class:`~acoular.base.SamplesGenerator`), the average is calculated
76
+ over a certain number of time samples given by :attr:`num_per_average`.
77
+ If the source is a frequency domain source (e.g. derived from
78
+ :class:`~acoular.base.SpectraGenerator`), the average is calculated
79
+ over a certain number of frequency snapshots given by :attr:`num_per_average`.
80
+
81
+ See Also
82
+ --------
83
+ :class:`acoular.base.InOut` :
84
+ Receive data from any source domain and return signals in the same domain.
53
85
 
54
86
  Examples
55
87
  --------
56
- For estimate the RMS of a white noise (time-domain) signal, the average of the squared signal can be calculated:
57
-
58
- >>> import acoular as ac
59
- >>> import numpy as np
60
- >>>
61
- >>> signal = ac.WNoiseGenerator(sample_freq=51200, numsamples=51200, rms=2.0).signal()
62
- >>> ts = ac.TimeSamples(data=signal[:, np.newaxis], sample_freq=51200)
63
- >>> tp = ac.TimePower(source=ts)
64
- >>> avg = ac.Average(source=tp, naverage=512)
65
- >>> mean_squared_value = next(avg.result(num=1))
66
- >>> rms = np.sqrt(mean_squared_value)[0, 0]
67
- >>> print(rms)
68
- 1.9985200025816718
69
-
70
- Here, each evaluation of the generator created by the :meth:`result` method of the :class:`Average` object
71
- via the :meth:`next` function returns :code:`num=1` average across a snapshot of 512 samples.
72
-
73
- If the source is a frequency domain source, the average is calculated over a certain number of
74
- snapshots, defined by :attr:`naverage`.
75
-
76
- >>> fft = ac.RFFT(source=ts, block_size=64)
77
- >>> ps = ac.AutoPowerSpectra(source=fft)
78
- >>> avg = ac.Average(source=ps, naverage=16)
79
- >>> mean_power = next(avg.result(num=1))
80
- >>> print(np.sqrt(mean_power.sum()))
81
- 2.0024960894399295
82
-
83
- Here, the generator created by the :meth:`result` method of the :class:`Average` object
84
- returns the average across 16 snapshots in the frequency domain.
88
+ To estimate the RMS of a white noise (time-domain) signal, the average of the squared
89
+ signal can be calculated:
85
90
 
91
+ >>> import acoular as ac
92
+ >>> import numpy as np
93
+ >>>
94
+ >>> signal = ac.WNoiseGenerator(sample_freq=51200, num_samples=51200, rms=2.0).signal()
95
+ >>> ts = ac.TimeSamples(data=signal[:, np.newaxis], sample_freq=51200)
96
+ >>> tp = ac.TimePower(source=ts)
97
+ >>> avg = ac.Average(source=tp, num_per_average=512)
98
+ >>> mean_squared_value = next(avg.result(num=1))
99
+ >>> rms = np.sqrt(mean_squared_value)[0, 0]
100
+ >>> print(rms)
101
+ 1.9985200025816718
102
+
103
+ Here, each evaluation of the generator created by the :meth:`result` method of the
104
+ :class:`Average` object via the :meth:`next` function returns :code:`num=1` average across a
105
+ snapshot of 512 time samples.
106
+
107
+ If the source is a frequency domain source, the average is calculated over a certain number
108
+ of frequency snapshots, defined by :attr:`num_per_average`.
109
+
110
+ >>> fft = ac.RFFT(source=ts, block_size=64)
111
+ >>> ps = ac.AutoPowerSpectra(source=fft)
112
+ >>> avg = ac.Average(source=ps, num_per_average=16)
113
+ >>> mean_power = next(avg.result(num=1))
114
+ >>> print(np.sqrt(mean_power.sum()))
115
+ 2.0024960894399295
116
+
117
+ Here, the generator created by the :meth:`result` method of the :class:`Average` object
118
+ returns the average across 16 snapshots in the frequency domain.
86
119
  """
87
120
 
88
- #: Number of samples (time domain source) or snapshots (frequency domain source)
89
- #: to average over, defaults to 64.
90
- naverage = Int(64, desc='number of samples to average over')
121
+ #: The number of samples (time domain source) or snapshots (frequency domain source)
122
+ #: to average over. Default is ``64``.
123
+ num_per_average = Int(64, desc='number of samples/snapshots to average over')
91
124
 
92
- #: Sampling frequency of the output signal, is set automatically.
93
- sample_freq = Property(depends_on='source.sample_freq, naverage')
125
+ #: The sampling frequency of the output signal. It is set automatically as
126
+ #: (:attr:`~acoular.base.Generator.sample_freq` ``/`` :attr:`num_per_average`).
127
+ sample_freq = Property(depends_on=['source.sample_freq', 'num_per_average'])
94
128
 
95
- #: Number of samples (time domain) or snapshots (frequency domain) of the output signal.
96
- #: Is set automatically.
97
- numsamples = Property(depends_on='source.numsamples, naverage')
129
+ #: The number of samples (time domain) or snapshots (frequency domain) of the output signal.
130
+ #: It is set automatically as
131
+ #: (:attr:`~acoular.base.Generator.num_samples` ``/`` :attr:`num_per_average`).
132
+ num_samples = Property(depends_on=['source.num_samples', 'num_per_average'])
98
133
 
99
- # internal identifier
100
- digest = Property(depends_on=['source.digest', '__class__', 'naverage'])
134
+ #: A unique identifier based on the class properties.
135
+ digest = Property(depends_on=['source.digest', 'num_per_average'])
101
136
 
102
137
  @cached_property
103
138
  def _get_digest(self):
@@ -106,32 +141,52 @@ class Average(InOut):
106
141
  @cached_property
107
142
  def _get_sample_freq(self):
108
143
  if self.source:
109
- return 1.0 * self.source.sample_freq / self.naverage
144
+ return 1.0 * self.source.sample_freq / self.num_per_average
110
145
  return None
111
146
 
112
147
  @cached_property
113
- def _get_numsamples(self):
148
+ def _get_num_samples(self):
114
149
  if self.source:
115
- return self.source.numsamples / self.naverage
150
+ return self.source.num_samples / self.num_per_average
116
151
  return None
117
152
 
118
153
  def result(self, num):
119
- """Python generator that yields the output block-wise.
154
+ """
155
+ Generate averaged output blocks from the source data.
156
+
157
+ This method implements a Python generator that yields blocks of averaged data
158
+ from the source. The averaging is performed over :attr:`num_per_average` samples
159
+ (for time-domain sources) or snapshots (for frequency-domain sources).
160
+ The size of the blocks yielded is defined by the ``num`` parameter.
120
161
 
121
162
  Parameters
122
163
  ----------
123
- num : integer
124
- This parameter defines the size of the blocks to be yielded
125
- (i.e. the number of samples per block).
126
-
127
- Returns
128
- -------
129
- Average of the output of source.
130
- Yields samples in blocks of shape (num, numchannels).
131
- The last block may be shorter than num.
132
-
164
+ num : :class:`int`
165
+ The number of averaged blocks to yield at a time. Each block contains the average over
166
+ :attr:`num_per_average` time samples or frequency snapshots. The last block may be
167
+ shorter than the specified size if the remaining data is insufficient.
168
+
169
+ Yields
170
+ ------
171
+ :class:`numpy.ndarray`
172
+ A 2D NumPy array of shape ``(num, num_channels)``, where ``num`` is the number
173
+ of averaged blocks requested, and ``num_channels`` corresponds to the number of channels
174
+ in the source, as specified by :attr:`~acoular.base.Generator.num_channels`.
175
+ Each entry in the array is the average over :attr:`num_per_average` samples/snapshots.
176
+
177
+ Notes
178
+ -----
179
+ - The averaging operation depends on the source type:
180
+ - For time-domain sources (e.g., derived from :class:`~acoular.base.SamplesGenerator`),
181
+ the average is calculated over :attr:`num_per_average` time samples.
182
+ - For frequency-domain sources (e.g., derived from
183
+ :class:`~acoular.base.SpectraGenerator`), the average is calculated over
184
+ :attr:`num_per_average` frequency snapshots.
185
+ - The generator will stop yielding when the source data is exhausted.
186
+ - If the source provides fewer than ``num * num_per_average`` samples,
187
+ the final block may be smaller than the requested ``num`` size.
133
188
  """
134
- nav = self.naverage
189
+ nav = self.num_per_average
135
190
  for temp in self.source.result(num * nav):
136
191
  ns, nc = temp.shape
137
192
  nso = int(ns / nav)
@@ -140,14 +195,32 @@ class Average(InOut):
140
195
 
141
196
 
142
197
  class Cache(InOut):
143
- """Caches source output in cache file.
198
+ """
199
+ Cache the output of a source in a file to avoid redundant computations.
200
+
201
+ The :class:`Cache` class stores the output of a source (derived from
202
+ :class:`~acoular.base.Generator`) in a cache file within the Acoular cache directory.
203
+ This enables faster reuse of precomputed data by avoiding time-consuming recalculations.
204
+ The cache behavior is managed through the :class:`~acoular.configuration.Config` class by
205
+ setting the :attr:`~acoular.configuration.Config.global_caching` attribute.
206
+
207
+ The class intelligently determines whether to use the cached data, update it,
208
+ or bypass caching based on the global caching configuration and the state of the cache file.
209
+ The caching mechanism supports scenarios such as:
144
210
 
145
- This class is used to cache the output of a :class:`acoular.base.Generator` derived source
146
- object in a cache file to circumvent time-consuming re-calculation.
147
- The cache file is created in the Acoular cache directory.
211
+ - Reading from a complete or incomplete cache.
212
+ - Overwriting an existing cache.
213
+ - Operating in a read-only or no-cache mode.
214
+
215
+ See Also
216
+ --------
217
+ :class:`acoular.base.InOut` : Receive data from any source domain and return signals in the same
218
+ domain.
148
219
 
149
220
  Examples
150
221
  --------
222
+ Caching the output of an FFT computation:
223
+
151
224
  >>> import acoular as ac
152
225
  >>> import numpy as np
153
226
  >>>
@@ -158,27 +231,28 @@ class Cache(InOut):
158
231
  >>> cache = ac.Cache(source=fft) # cache the output of the FFT in cache file
159
232
  >>> for block in cache.result(num=1): # read the cached data block-wise
160
233
  ... print(block.shape)
161
- [('_cache.h5', 1)]
234
+ [('void_cache.h5', 1)]
162
235
  (1, 513)
163
236
 
164
- The caching behaviour can be controlled by the :class:`~acoular.configuration.Config` instance
165
- via the :attr:`~acoular.configuration.Config.global_caching` attribute.
166
- To turn off caching, set :attr:`~acoular.configuration.Config.global_caching` to 'none' before
167
- running the code. The cache file directory can be obtained (and set) via the
168
- :attr:`~acoular.configuration.Config.cache_dir`
237
+ Disabling caching globally:
169
238
 
170
239
  >>> ac.config.global_caching = 'none'
171
240
 
241
+ Changing the cache directory:
242
+
243
+ >>> ac.config.cache_dir = '/path/to/cache_dir' # doctest: +SKIP
172
244
  """
173
245
 
174
- # basename for cache
175
- basename = Property(depends_on='digest')
246
+ # The basename for the cache file.
247
+ # Derived from the :attr:`digest` property and used to uniquely identify the cache file.
248
+ basename = Property(depends_on=['digest'])
176
249
 
177
- # hdf5 cache file
250
+ # The HDF5 cache file instance.
251
+ # This is used to store or retrieve cached data in the Acoular cache directory.
178
252
  h5f = Instance(H5CacheFileBase, transient=True)
179
253
 
180
- # internal identifier
181
- digest = Property(depends_on=['source.digest', '__class__'])
254
+ #: A unique identifier based on the cache properties.
255
+ digest = Property(depends_on=['source.digest'])
182
256
 
183
257
  @cached_property
184
258
  def _get_digest(self):
@@ -186,17 +260,7 @@ class Cache(InOut):
186
260
 
187
261
  @cached_property
188
262
  def _get_basename(self):
189
- obj = self.source # start with source
190
- basename = 'void' # if no file source is found
191
- while obj:
192
- if 'basename' in obj.all_trait_names(): # at original source?
193
- basename = obj.basename # get the name
194
- break
195
- try:
196
- obj = obj.source # traverse down until original data source
197
- except AttributeError:
198
- obj = None
199
- return basename
263
+ return find_basename(self.source)
200
264
 
201
265
  def _pass_data(self, num):
202
266
  yield from self.source.result(num)
@@ -246,21 +310,40 @@ class Cache(InOut):
246
310
 
247
311
  # result generator: delivers input, possibly from cache
248
312
  def result(self, num):
249
- """Python generator that yields the output from cache block-wise.
313
+ """
314
+ Generate data blocks from the source, using cache when available.
315
+
316
+ This method acts as a Python generator that yields blocks of output data from the source,
317
+ reading from the cache file when possible. The size of the data blocks is determined by the
318
+ ``num`` parameter. The caching mechanism helps prevent redundant calculations by storing and
319
+ reusing the source's output.
250
320
 
251
321
  Parameters
252
322
  ----------
253
- num : integer
254
- This parameter defines the size of the blocks to be yielded
255
- (i.e. the number of samples per block).
256
-
257
- Returns
258
- -------
259
- Samples in blocks of shape (num, numchannels).
260
- The last block may be shorter than num.
261
- Echos the source output, but reads it from cache
262
- when available and prevents unnecassary recalculation.
263
-
323
+ num : :class:`int`
324
+ The number of time samples or frequency snapshots per block to yield.
325
+ The final block may be smaller if there is insufficient data.
326
+
327
+ Yields
328
+ ------
329
+ :class:`numpy.ndarray`
330
+ A 2D NumPy array of shape ``(num, num_channels)`` representing the output data.
331
+ Each block is either retrieved from the cache file or generated by the source
332
+ and cached dynamically during processing.
333
+
334
+ Notes
335
+ -----
336
+ - The behavior of the caching mechanism depends on the
337
+ :attr:`~acoular.configuration.Config.global_caching` setting:
338
+
339
+ - ``'none'``: Bypasses caching and directly retrieves data from the source.
340
+ - ``'readonly'``: Reads data from the cache if available; otherwise,
341
+ retrieves data from the source without caching.
342
+ - ``'overwrite'``: Replaces any existing cache with newly computed data.
343
+
344
+ - If the cache file is incomplete or corrupted, the method may generate new data
345
+ from the source to update the cache unless the caching mode is ``'readonly'``.
346
+ - The cache node name is based on the source's :attr:`digest` attribute.
264
347
  """
265
348
  if config.global_caching == 'none':
266
349
  generator = self._pass_data
@@ -282,8 +365,8 @@ class Cache(InOut):
282
365
  elif not self.h5f.get_data_by_reference(nodename).attrs['complete']:
283
366
  if config.global_caching == 'readonly':
284
367
  warn(
285
- "Cache file is incomplete for nodename %s. With config.global_caching='readonly', the cache file will not be used!"
286
- % str(nodename),
368
+ f"Cache file is incomplete for nodename {nodename}. With config.global_caching='readonly', \
369
+ the cache file will not be used!",
287
370
  Warning,
288
371
  stacklevel=1,
289
372
  )
@@ -298,45 +381,130 @@ class Cache(InOut):
298
381
 
299
382
 
300
383
  class SampleSplitter(InOut):
301
- """Distributes data blocks from source to several following objects.
302
- A separate block buffer is created for each registered object in
303
- (:attr:`block_buffer`) .
304
384
  """
385
+ Distribute data from a source to multiple connected objects in a block-wise manner.
386
+
387
+ The :class:`SampleSplitter` class is designed to manage the distribution of data blocks from a
388
+ single source object, derived from :class:`~acoular.base.Generator`, to multiple target
389
+ objects, also derived from :class:`~acoular.base.Generator`. Each connected target object
390
+ is assigned a dedicated buffer to hold incoming data blocks. These buffers operate in a
391
+ first-in-first-out (FIFO) manner, ensuring efficient and parallelized data handling.
392
+
393
+ This class is particularly useful when distributing data blocks from a streaming source
394
+ to multiple downstream processing objects.
395
+
396
+ Each registered target object maintains its own dedicated block buffer, allowing for independent
397
+ data management. The buffer size can be customized per object, and different overflow handling
398
+ strategies can be configured, such as raising an error, issuing a warning, or discarding old
399
+ data. This ensures efficient parallel data processing, making it well-suited for complex
400
+ workflows.
401
+
402
+ Notes
403
+ -----
404
+ - Buffers are dynamically created and managed for each registered object.
405
+ - Buffer overflow behavior can be set individually for each target object.
305
406
 
306
- #: dictionary with block buffers (dict values) of registered objects (dict
307
- #: keys).
407
+ Examples
408
+ --------
409
+ Consider a time-domain signal stream where the FFT spectra and signal power are calculated
410
+ block-by-block and in parallel using the :class:`~acoular.fprocess.RFFT`,
411
+ :class:`~acoular.tprocess.TimePower`, and :class:`~acoular.process.Average` objects.
412
+ The :class:`SampleSplitter` is responsible for distributing incoming data blocks to the buffers
413
+ of the :class:`~acoular.fprocess.RFFT` and :class:`~acoular.tprocess.TimePower` objects whenever
414
+ either object requests data via the :meth:`result` generator.
415
+
416
+ For the :class:`~acoular.tprocess.TimePower` object, the buffer size is set to 10 blocks.
417
+ If the buffer is full, an error is raised, as the buffer overflow treatment is set to
418
+ ``'error'``. For the :class:`~acoular.fprocess.RFFT` object, the buffer size is limited to 1
419
+ block, and the overflow treatment is set to ``'none'``. This setup helps reduce latency in FFT
420
+ calculations, which may take longer than signal power calculations. If new data arrives and the
421
+ :class:`~acoular.fprocess.RFFT` buffer is full, the :class:`SampleSplitter` will discard the
422
+ oldest block, ensuring that the :class:`~acoular.fprocess.RFFT`
423
+ object always receives the most recent block of data.
424
+
425
+ >>> import acoular as ac
426
+ >>> import numpy as np
427
+ >>>
428
+ >>> # create a time domain signal source
429
+ >>> ts = ac.TimeSamples(data=np.random.rand(1024, 1), sample_freq=51200)
430
+ >>>
431
+ >>> # create the sample splitter object
432
+ >>> ss = ac.SampleSplitter(source=ts)
433
+ >>>
434
+ >>> # create the FFT spectra and further objects that receive the data
435
+ >>> fft = ac.RFFT(source=ss, block_size=64)
436
+ >>> pow = ac.TimePower(source=ss)
437
+ >>> avg = ac.Average(source=pow, num_per_average=64)
438
+ >>>
439
+ >>> # register the subsequent processing block objects at the sample splitter
440
+ >>> ss.register_object(fft, buffer_size=1, buffer_overflow_treatment='none')
441
+ >>> ss.register_object(pow, buffer_size=10, buffer_overflow_treatment='error')
442
+
443
+ After object registration, the ``SampleSplitter`` object is ready to distribute the data to the
444
+ object buffers. The block buffers can be accessed via the :attr:`block_buffer` attribute of the
445
+ ``SampleSplitter`` object.
446
+
447
+ >>> ss.block_buffer.values()
448
+ dict_values([deque([], maxlen=1), deque([], maxlen=10)])
449
+
450
+ Calling the result method of the FFT object will start the data collection and distribution
451
+ process.
452
+
453
+ >>> generator = fft.result(num=1)
454
+ >>> fft_res = next(generator)
455
+
456
+ Although we haven't called the result method of the signal power object, one data block is
457
+ already available in the buffer.
458
+
459
+ >>> print(len(ss.block_buffer[pow]))
460
+ 1
461
+
462
+ To remove registered objects from the :class:`SampleSplitter`, use the :meth:`remove_object`
463
+ method.
464
+
465
+ >>> ss.remove_object(pow)
466
+ >>> print(len(ss.block_buffer))
467
+ 1
468
+ """
469
+
470
+ #: A dictionary containing block buffers for registered objects.
471
+ #: Keys are the registered objects, and values are deque structures holding data blocks.
308
472
  block_buffer = Dict(key_trait=Instance(Generator))
309
473
 
310
- #: max elements/blocks in block buffers.
311
- buffer_size = Int(100)
474
+ #: The maximum number of blocks each buffer can hold.
475
+ #: Can be set globally for all objects or individually using a dictionary.
476
+ buffer_size = Union(
477
+ Int,
478
+ Dict(key_trait=Instance(Generator), value_trait=Int),
479
+ default_value=100,
480
+ )
312
481
 
313
- #: defines behaviour in case of block_buffer overflow. Can be set individually
314
- #: for each registered object.
315
- #:
316
- #: * 'error': an IOError is thrown by the class
317
- #: * 'warning': a warning is displayed. Possibly leads to lost blocks of data
318
- #: * 'none': nothing happens. Possibly leads to lost blocks of data
482
+ #: Defines behavior when a buffer exceeds its maximum size.
319
483
  buffer_overflow_treatment = Dict(
320
484
  key_trait=Instance(Generator),
321
- value_trait=Trait('error', 'warning', 'none'),
485
+ value_trait=Enum('error', 'warning', 'none'),
322
486
  desc='defines buffer overflow behaviour.',
323
487
  )
324
488
 
325
- # shadow trait to monitor if source deliver samples or is empty
489
+ # A shadow trait to monitor if source deliver samples or is empty.
326
490
  _source_generator_exist = Bool(False)
327
491
 
328
- # shadow trait to monitor if buffer of objects with overflow treatment = 'error'
492
+ # A shadow trait to monitor if buffer of objects with overflow treatment = 'error'
329
493
  # or warning is overfilled. Error will be raised in all threads.
330
494
  _buffer_overflow = Bool(False)
331
495
 
332
496
  # Helper Trait holds source generator
333
- _source_generator = Trait()
497
+ _source_generator = Instance(LockedGenerator)
334
498
 
335
- def _create_block_buffer(self, obj):
336
- self.block_buffer[obj] = deque([], maxlen=self.buffer_size)
499
+ def _create_block_buffer(self, obj, buffer_size=None):
500
+ if buffer_size is None:
501
+ buffer_size = self.buffer_size if isinstance(self.buffer_size, int) else self.buffer_size[obj]
502
+ self.block_buffer[obj] = deque([], maxlen=buffer_size)
337
503
 
338
- def _create_buffer_overflow_treatment(self, obj):
339
- self.buffer_overflow_treatment[obj] = 'error'
504
+ def _create_buffer_overflow_treatment(self, obj, buffer_overflow_treatment=None):
505
+ if buffer_overflow_treatment is None:
506
+ buffer_overflow_treatment = 'error'
507
+ self.buffer_overflow_treatment[obj] = buffer_overflow_treatment
340
508
 
341
509
  def _clear_block_buffer(self, obj):
342
510
  self.block_buffer[obj].clear()
@@ -349,7 +517,8 @@ class SampleSplitter(InOut):
349
517
 
350
518
  def _assert_obj_registered(self, obj):
351
519
  if obj not in self.block_buffer:
352
- raise OSError('calling object %s is not registered.' % obj)
520
+ msg = f'calling object {obj} is not registered.'
521
+ raise OSError(msg)
353
522
 
354
523
  def _get_objs_to_inspect(self):
355
524
  return [obj for obj in self.buffer_overflow_treatment if self.buffer_overflow_treatment[obj] != 'none']
@@ -360,7 +529,7 @@ class SampleSplitter(InOut):
360
529
  if self.buffer_overflow_treatment[obj] == 'error':
361
530
  self._buffer_overflow = True
362
531
  elif self.buffer_overflow_treatment[obj] == 'warning':
363
- warn('overfilled buffer for object: %s data will get lost' % obj, UserWarning, stacklevel=1)
532
+ warn(f'overfilled buffer for object: {obj} data will get lost', UserWarning, stacklevel=1)
364
533
 
365
534
  def _create_source_generator(self, num):
366
535
  for obj in self.block_buffer:
@@ -379,34 +548,110 @@ class SampleSplitter(InOut):
379
548
  self._remove_block_buffer(obj)
380
549
  self._create_block_buffer(obj)
381
550
 
382
- def register_object(self, *objects_to_register):
383
- """Function that can be used to register objects that receive blocks from this class."""
551
+ def register_object(self, *objects_to_register, buffer_size=None, buffer_overflow_treatment=None):
552
+ """
553
+ Register one or more target objects to the :class:`SampleSplitter` object.
554
+
555
+ This method creates and configures block buffers for the specified target objects, enabling
556
+ them to receive data blocks from the :class:`SampleSplitter`. Each registered object is
557
+ assigned a dedicated buffer with customizable size and overflow behavior.
558
+
559
+ Parameters
560
+ ----------
561
+ objects_to_register : :class:`~acoular.base.Generator` or list of :class:`~acoular.base.Generator`
562
+ A single object or a list of objects derived from :class:`~acoular.base.Generator` to be
563
+ registered as targets for data distribution.
564
+ buffer_size : :class:`int`, optional
565
+ The maximum number of data blocks each object's buffer can hold. If not specified,
566
+ the default buffer size (100 blocks) is used, or a globally defined size if
567
+ ``buffer_size`` is a dictionary.
568
+ buffer_overflow_treatment : :attr:`str`, optional
569
+ Defines the behavior when a buffer exceeds its maximum size. Options are:
570
+
571
+ - ``'error'``: Raises an :obj:`IOError` when the buffer overflows.
572
+ - ``'warning'``: Issues a warning and may result in data loss.
573
+ - ``'none'``: Silently discards the oldest data blocks to make room for new ones.
574
+ If not specified, the default behavior is ``'error'``.
575
+
576
+ Raises
577
+ ------
578
+ :obj:`OSError`
579
+ If any of the specified objects is already registered.
580
+ """ # noqa: W505
384
581
  for obj in objects_to_register:
385
582
  if obj not in self.block_buffer:
386
- self._create_block_buffer(obj)
387
- self._create_buffer_overflow_treatment(obj)
583
+ self._create_block_buffer(obj, buffer_size)
584
+ self._create_buffer_overflow_treatment(obj, buffer_overflow_treatment)
585
+ else:
586
+ msg = f'object {obj} is already registered.'
587
+ raise OSError(msg)
388
588
 
389
589
  def remove_object(self, *objects_to_remove):
390
- """Function that can be used to remove registered objects."""
590
+ """
591
+ Unregister one or more objects from the :class:`SampleSplitter`.
592
+
593
+ This method removes the specified objects and their associated block buffers from the
594
+ :class:`SampleSplitter`. If no objects are specified, all currently registered objects
595
+ are unregistered, effectively clearing all buffers.
596
+
597
+ Parameters
598
+ ----------
599
+ objects_to_remove : :class:`~acoular.base.Generator` or list of :class:`~acoular.base.Generator`, optional
600
+ A single object or a list of objects derived from :class:`~acoular.base.Generator` to be
601
+ removed from the :class:`SampleSplitter`.
602
+ If no objects are provided, all registered objects will be removed.
603
+
604
+ Raises
605
+ ------
606
+ :obj:`KeyError`
607
+ If any of the specified objects are not currently registered.
608
+
609
+ Notes
610
+ -----
611
+ - Once an object is removed, it will no longer receive data from the
612
+ :class:`SampleSplitter`.
613
+ - Removing an object also clears its associated buffer.
614
+ """ # noqa: W505
615
+ if not objects_to_remove:
616
+ objects_to_remove = list(self.block_buffer.keys())
391
617
  for obj in objects_to_remove:
392
618
  self._remove_block_buffer(obj)
393
619
  self._remove_buffer_overflow_treatment(obj)
394
620
 
395
621
  def result(self, num):
396
- """Python generator that yields the output block-wise from block-buffer.
622
+ """
623
+ Yield data blocks from the buffer to the calling object.
624
+
625
+ This generator method retrieves blocks of data for the calling object, either
626
+ from its dedicated block buffer or by processing new data from the source.
627
+ If the buffer is empty, new data blocks are generated and distributed to
628
+ all registered objects in a block-wise manner.
397
629
 
398
630
  Parameters
399
631
  ----------
400
- num : integer
401
- This parameter defines the size of the blocks to be yielded
402
- (i.e. the number of samples per block).
403
-
404
- Returns
405
- -------
406
- Samples in blocks of shape (num, numchannels).
407
- Delivers a block of samples to the calling object.
408
- The last block may be shorter than num.
409
-
632
+ num : :class:`int`
633
+ The size of each block to be yielded, defined as the number of samples per block.
634
+
635
+ Yields
636
+ ------
637
+ :class:`numpy.ndarray`
638
+ Blocks of data with shape ``(num, num_channels)``.
639
+ The last block may be shorter than ``num`` if the source data is exhausted.
640
+
641
+ Raises
642
+ ------
643
+ :obj:`OSError`
644
+ If the calling object is not registered with the :class:`SampleSplitter`.
645
+ :obj:`OSError`
646
+ If the block buffer reaches its maximum size and the overflow handling
647
+ policy is set to ``'error'``.
648
+
649
+ Notes
650
+ -----
651
+ - If the block buffer is empty, new data is fetched from the source and distributed to all
652
+ registered objects.
653
+ - Buffer overflow behavior is controlled by the :attr:`buffer_overflow_treatment` attribute,
654
+ which can be set to ``'error'``, ``'warning'``, or ``'none'``.
410
655
  """
411
656
  calling_obj = currentframe().f_back.f_locals['self']
412
657
  self._assert_obj_registered(calling_obj)
@@ -431,11 +676,15 @@ class SampleSplitter(InOut):
431
676
 
432
677
 
433
678
  class TimeAverage(Average):
434
- """Calculates average of the signal (Alias for :class:`acoular.process.Average`).
679
+ """
680
+ Calculate the average of the signal.
435
681
 
436
682
  .. deprecated:: 24.10
437
- Using :class:`~acoular.process.TimeAverage` is deprecated and will be removed in Acoular
438
- version 25.07. Use :class:`~acoular.process.Average` instead.
683
+ The use of :class:`~acoular.process.TimeAverage` is deprecated
684
+ and will be removed in Acoular version 25.07.
685
+ Please use :class:`~acoular.process.Average` instead for future compatibility.
686
+
687
+ Alias for :class:`~acoular.process.Average`.
439
688
  """
440
689
 
441
690
  def __init__(self, *args, **kwargs):
@@ -448,11 +697,15 @@ class TimeAverage(Average):
448
697
 
449
698
 
450
699
  class TimeCache(Cache):
451
- """Caches source signals in cache file (Alias for :class:`acoular.process.Cache`).
700
+ """
701
+ Cache source signals in cache file.
452
702
 
453
703
  .. deprecated:: 24.10
454
- Using :class:`~acoular.process.TimeCache` is deprecated and will be removed in Acoular
455
- version 25.07. Use :class:`~acoular.process.Cache` instead.
704
+ The use of :class:`~acoular.process.TimeCache` is deprecated
705
+ and will be removed in Acoular version 25.07.
706
+ Please use :class:`~acoular.process.Cache` instead for future compatibility.
707
+
708
+ Alias for :class:`~acoular.process.Cache`.
456
709
  """
457
710
 
458
711
  def __init__(self, *args, **kwargs):
@@ -462,3 +715,261 @@ class TimeCache(Cache):
462
715
  DeprecationWarning,
463
716
  stacklevel=2,
464
717
  )
718
+
719
+
720
+ class SamplesBuffer(InOut):
721
+ """
722
+ Handle buffering of samples from a source.
723
+
724
+ The :class:`SamplesBuffer` class buffers samples from a source and provides them in blocks of a
725
+ specified size. It supports various use cases for efficient handling of sample data.
726
+ Below is an example demonstrating its functionality.
727
+
728
+ Examples
729
+ --------
730
+ Suppose we want to draw blocks of 16 samples from the source, while ensuring that the buffer
731
+ always holds twice that number (32 samples). The following code achieves this behavior:
732
+
733
+ >>> import acoular as ac
734
+ >>> import numpy as np
735
+ >>> # create a white noise source with 512 samples
736
+ >>> source = ac.TimeSamples(
737
+ ... data=ac.WNoiseGenerator(
738
+ ... sample_freq=64,
739
+ ... num_samples=512,
740
+ ... ).signal()[:, np.newaxis],
741
+ ... sample_freq=64,
742
+ ... )
743
+ >>> # create a buffer with a size of 32 samples
744
+ >>> buffer = ac.process.SamplesBuffer(source=source, length=32)
745
+ >>> # get the first block of 16 samples
746
+ >>> block = next(buffer.result(num=16))
747
+ >>> np.testing.assert_array_equal(block, source.data[:16])
748
+
749
+ In the example above, the buffer initially collects blocks of the specified size from the
750
+ source. It then returns the first block of 16 samples. With subsequent calls to the
751
+ :meth:`result` method, the buffer refills and returns additional blocks of 16 samples.
752
+
753
+ In some cases, you may wish to retrieve a different number of samples from the source than you
754
+ want to return. This can be achieved by setting the :attr:`source_num` attribute. For example,
755
+ in the :class:`~acoular.tbeamform.BeamformerTimeTraj` class, the number of time samples varies
756
+ based on the expected delay for moving sources, while still adhering to the desired block size
757
+ for the buffer.
758
+
759
+ The :attr:`shift_index_by` attribute controls how the buffer updates its index when retrieving
760
+ data. If set to ``'num'``, the buffer returns :attr:`result_num` samples but forgets ``'num'``
761
+ samples from the buffer.
762
+ If set to :attr:`result_num`, the buffer will return and forget the same number of samples.
763
+
764
+ >>> buffer = ac.process.SamplesBuffer(source=source, length=32, result_num=20, shift_index_by='num')
765
+ >>> block_sizes = []
766
+ >>> block_sizes.append(
767
+ ... next(buffer.result(num=16)).shape[0]
768
+ ... ) # this time, the buffer will return 20 samples, but the buffer will only forget the first 16 samples
769
+ >>> buffer.result_num = 24
770
+ >>> block_sizes.append(
771
+ ... next(buffer.result(num=16)).shape[0]
772
+ ... ) # this time, the buffer will return 24 samples, but the buffer will only forget the first 16 samples
773
+ >>> np.testing.assert_array_equal(block_sizes, [20, 24])
774
+ """ # noqa: W505
775
+
776
+ #: The number of samples that the buffer can hold.
777
+ length = Int(desc='number of samples that fit in the buffer')
778
+
779
+ #: The number of samples per block to obtain from the source. If set to ``None``, the number of
780
+ #: samples will be determined by the ``num`` argument of the :meth:`result` method.
781
+ source_num = Union(
782
+ None,
783
+ Int(),
784
+ default_value=None,
785
+ desc='number of samples to return from the source. If "None", use "num" argument of result method',
786
+ )
787
+
788
+ #: The number of samples to return from the buffer. If set to ``None``, the number of
789
+ #: samples will be determined by the ``num`` argument of the :meth:`result` method.
790
+ result_num = Union(
791
+ None,
792
+ Int(),
793
+ default_value=None,
794
+ desc="number of samples to return from the buffer. If 'None', use 'num' argument of result method",
795
+ )
796
+
797
+ #: Index shift value for the buffer.
798
+ #:
799
+ #: - If set to ``'result_num'``, the buffer will return and forget :attr:`result_num` samples.
800
+ #: - If set to ``'num'``, the buffer will return :attr:`result_num` samples but forget ``num``
801
+ #: samples.
802
+ shift_index_by = Enum(
803
+ ('result_num', 'num'),
804
+ desc=(
805
+ 'index shift value for the buffer. If "result_num", use "result_num" trait.'
806
+ ' If "num", use "num" argument of result method'
807
+ ),
808
+ )
809
+
810
+ #: The current filling level of the buffer, i.e., how many samples are currently available.
811
+ level = Property(desc='current filling level of buffer')
812
+
813
+ #: The data type of the elements in the buffer.
814
+ dtype = Any(desc='data type of the buffer')
815
+
816
+ # Flag indicating if the source is empty (for internal use).
817
+ _empty_source = Bool(False, desc='flag to indicate that the source is empty')
818
+
819
+ # The actual buffer holding the samples for processing.
820
+ _buffer = Array(shape=(None, None), desc='buffer for block processing')
821
+
822
+ # The current index position in the buffer.
823
+ _index = Int(desc='current index in buffer')
824
+
825
+ def _get_level(self):
826
+ return self._buffer.shape[0] - self._index
827
+
828
+ def _create_new_buffer(self):
829
+ self._buffer = np.zeros((self.length, self.num_channels), dtype=self.dtype)
830
+ self._index = self.length
831
+ self._empty_source = False
832
+
833
+ def _write_to_buffer(self, data):
834
+ ns = data.shape[0]
835
+ self._buffer[0 : (self.length - ns)] = self._buffer[-(self.length - ns) :]
836
+ self._buffer[-ns:, :] = data.astype(self.dtype)
837
+ self._index -= ns
838
+
839
+ def increase_buffer(self, num):
840
+ """
841
+ Increase the size of the buffer by a specified number of samples.
842
+
843
+ This method expands the buffer by appending additional samples, effectively increasing
844
+ its capacity. The new samples are initialized to zero. The index of the buffer is adjusted
845
+ accordingly to accommodate the increase.
846
+
847
+ Parameters
848
+ ----------
849
+ num : :class:`int`
850
+ The number of samples by which to increase the buffer size.
851
+ """
852
+ ar = np.zeros((num, self.num_channels), dtype=self._buffer.dtype)
853
+ self._buffer = np.concatenate((ar, self._buffer), axis=0)
854
+ self._index += num
855
+ self.length += num
856
+
857
+ def read_from_buffer(self, num):
858
+ """
859
+ Read a specified number of samples from the buffer.
860
+
861
+ This method retrieves samples from the buffer, ensuring that the requested number of samples
862
+ is returned. If the buffer contains fewer samples than requested, the method will return all
863
+ available samples. The index of the buffer is updated based on the :attr:`shift_index_by`
864
+ setting.
865
+
866
+ Parameters
867
+ ----------
868
+ num : :class:`int`
869
+ The number of samples to read from the buffer.
870
+
871
+ Returns
872
+ -------
873
+ :class:`numpy.ndarray`
874
+ A block of samples (array) from the buffer.
875
+
876
+ Notes
877
+ -----
878
+ - If the :attr:`result_num` attribute is set, it determines the number of samples to return.
879
+ - The method ensures the buffer index is adjusted according to the :attr:`shift_index_by`
880
+ setting. Options are:
881
+
882
+ - ``'result_num'``: The index will shift by the number of samples returned.
883
+ - ``'num'``: The index will shift by the number of samples requested (``num``).
884
+ """
885
+ rnum = num if self.result_num is None else self.result_num
886
+ rnum = rnum if self.level >= rnum else self.level
887
+ data = self._buffer[self._index : self._index + rnum]
888
+ if self.shift_index_by == 'result_num':
889
+ self._index += rnum
890
+ else:
891
+ self._index += num
892
+ return data
893
+
894
+ def fill_buffer(self, snum):
895
+ """
896
+ Fill the buffer with samples from the source.
897
+
898
+ The :meth:`fill_buffer` method collects samples from the source and writes them to the
899
+ buffer. It continues to fill the buffer until there are enough samples available, or the
900
+ source runs out of data. If the buffer reaches its maximum capacity, additional samples are
901
+ discarded. The buffer will only contain the most recent data, and its index will be updated
902
+ accordingly.
903
+
904
+ Parameters
905
+ ----------
906
+ snum : :class:`int`
907
+ The number of samples to retrieve from the source in each iteration.
908
+
909
+ Yields
910
+ ------
911
+ :obj:`None`
912
+ This method is a generator and yields control back after filling the buffer.
913
+
914
+ Notes
915
+ -----
916
+ - The method ensures that the buffer is filled with the required number of samples,
917
+ adjusting the buffer size if necessary (via the :meth:`increase_buffer` method) when more
918
+ space is needed.
919
+ - Once the buffer is filled, it yields control and resumes only when the buffer is ready for
920
+ more data.
921
+ """
922
+ source_generator = self.source.result(snum)
923
+ while not self._empty_source:
924
+ while self._index >= snum:
925
+ if self.result_num is not None:
926
+ while self.result_num > self.length:
927
+ self.increase_buffer(snum)
928
+ try:
929
+ self._write_to_buffer(next(source_generator))
930
+ except StopIteration:
931
+ self._empty_source = True
932
+ break
933
+ yield
934
+
935
+ def result(self, num):
936
+ """
937
+ Return blocks of samples from the buffer.
938
+
939
+ The :meth:`result` method retrieves blocks of samples from the buffer and yields them to the
940
+ calling process. The number of samples per block is determined by the ``num`` argument, but
941
+ can also be influenced by other attributes like `result_num` (if set). If the buffer is not
942
+ yet filled, it will continue to collect samples from the source until the buffer contains
943
+ enough data. Once the buffer is full, it will return the requested blocks of samples.
944
+
945
+ Parameters
946
+ ----------
947
+ num : :class:`int`
948
+ The number of samples to return in each block.
949
+ This value specifies the size of the blocks to be yielded from the buffer.
950
+
951
+ Yields
952
+ ------
953
+ :class:`numpy.ndarray`
954
+ A block of samples from the buffer. The size of the block is determined by the ``num``
955
+ parameter or the :attr:`result_num` attribute, depending on the buffer's configuration.
956
+
957
+ Notes
958
+ -----
959
+ - If :attr:`result_num` is set, the method will use it to determine the number of samples
960
+ returned instead of the ``num`` parameter.
961
+ - If the buffer is empty or does not have enough samples, it will attempt to fill the buffer
962
+ by collecting data from the source. If there are not enough samples available from the
963
+ source, the method will yield whatever samples are left in the buffer.
964
+ """
965
+ self._create_new_buffer()
966
+ snum = num
967
+ if self.source_num is not None:
968
+ snum = self.source_num
969
+ for _ in self.fill_buffer(snum):
970
+ if self.level > 0:
971
+ yield self.read_from_buffer(num)
972
+ else:
973
+ break
974
+ while self.level > 0:
975
+ yield self.read_from_buffer(num)