acoular 25.4__py3-none-any.whl → 25.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
acoular/h5cache.py CHANGED
@@ -2,7 +2,8 @@
2
2
  # Copyright (c) Acoular Development Team.
3
3
  # ------------------------------------------------------------------------------
4
4
 
5
- # imports from other packages
5
+ """Implements a cache for HDF5 files used in Acoular."""
6
+
6
7
  import gc
7
8
  from pathlib import Path
8
9
  from weakref import WeakValueDictionary
@@ -31,23 +32,27 @@ class HDF5Cache(HasStrictTraits):
31
32
  pass
32
33
 
33
34
  def close_cachefile(self, cachefile):
35
+ """Close a cache file and remove it from the reference counter."""
34
36
  self.open_file_reference.pop(Path(cachefile.filename))
35
37
  cachefile.close()
36
38
 
37
39
  def get_open_cachefiles(self):
40
+ """Get an iterator over all open cache files."""
38
41
  try:
39
42
  return self.open_files.itervalues()
40
43
  except AttributeError:
41
44
  return iter(self.open_files.values())
42
45
 
43
46
  def close_unreferenced_cachefiles(self):
47
+ """Close cache files that are no longer referenced by any objects."""
44
48
  for cachefile in self.get_open_cachefiles():
45
49
  if not self.is_reference_existent(cachefile):
46
50
  self.close_cachefile(cachefile)
47
51
 
48
52
  def is_reference_existent(self, file):
53
+ """Check if a file object still has active references."""
49
54
  exist_flag = False
50
- # inspect all refererres to the file object
55
+ # inspect all referrers to the file object
51
56
  gc.collect() # clear garbage before collecting referrers
52
57
  for ref in gc.get_referrers(file):
53
58
  # does the file object have a referrer that has a 'h5f'
@@ -92,10 +97,11 @@ class HDF5Cache(HasStrictTraits):
92
97
  return # cachefile is not created in readonly mode
93
98
 
94
99
  if isinstance(obj.h5f, file_cls):
95
- if Path(obj.h5f.filename).resolve() == filename:
100
+ h5filename = Path(obj.h5f.filename).resolve()
101
+ if h5filename == filename:
96
102
  self.busy = False
97
103
  return
98
- self._decrease_file_reference_counter(obj.h5f.filename)
104
+ self._decrease_file_reference_counter(h5filename)
99
105
 
100
106
  if filename not in self.open_files: # or tables.file._open_files.filenames
101
107
  if config.global_caching == 'readonly':
acoular/h5files.py CHANGED
@@ -2,6 +2,8 @@
2
2
  # Copyright (c) Acoular Development Team.
3
3
  # ------------------------------------------------------------------------------
4
4
 
5
+ """Implements base classes for handling HDF5 files."""
6
+
5
7
  from .configuration import config
6
8
 
7
9
 
@@ -9,25 +11,88 @@ class H5FileBase:
9
11
  """Base class for File objects that handle writing and reading of .h5 files."""
10
12
 
11
13
  def create_extendable_array(self, nodename, shape, precision, group=None):
12
- pass
14
+ """
15
+ Create an extendable array in the HDF5 file.
16
+
17
+ Parameters
18
+ ----------
19
+ nodename : :class:`str`
20
+ Name of the node (dataset) to create in the HDF5 file.
21
+ shape : :class:`tuple` of :class:`int`
22
+ Shape of the array to be created.
23
+ precision : :class:`str`
24
+ Data type/precision of the array (e.g., 'float32', 'int16').
25
+ group : object, optional
26
+ Group in which to create the array. If None, the root group is used.
27
+ """
13
28
 
14
29
  def get_data_by_reference(self, nodename, group=None):
15
- pass
30
+ """
31
+ Get data by reference from the HDF5 file.
32
+
33
+ Parameters
34
+ ----------
35
+ nodename : :class:`str`
36
+ Name of the node (dataset or group) to retrieve from the HDF5 file.
37
+ group : object, optional
38
+ The parent group in which to look for the node. If None, the root group is used.
39
+
40
+ Returns
41
+ -------
42
+ object
43
+ A reference to the requested node (e.g., a dataset or group object) in the HDF5 file.
44
+ """
16
45
 
17
46
  def set_node_attribute(self, node, attrname, value):
18
- pass
47
+ """
48
+ Set an attribute on a node.
49
+
50
+ Parameters
51
+ ----------
52
+ node : object
53
+ The node (e.g., group or dataset) to which the attribute will be set.
54
+ attrname : :class:`str`
55
+ The name of the attribute to set.
56
+ value : any
57
+ The value to assign to the attribute.
58
+ """
19
59
 
20
60
  def get_node_attribute(self, node, attrname):
21
- pass
61
+ """
62
+ Get an attribute from a node.
63
+
64
+ Parameters
65
+ ----------
66
+ node : object
67
+ The node (e.g., group or dataset) from which to retrieve the attribute.
68
+ attrname : :class:`str`
69
+ The name of the attribute to retrieve.
70
+
71
+ Returns
72
+ -------
73
+ object
74
+ The value of the specified attribute.
75
+ """
22
76
 
23
77
  def append_data(self, node, data):
24
- pass
78
+ """
79
+ Append data to an existing node.
80
+
81
+ Parameters
82
+ ----------
83
+ node : object
84
+ The node (e.g., array or dataset) in the HDF5 file to which data will be appended.
85
+ The expected type depends on the backend (e.g., PyTables node or h5py dataset).
86
+ data : array-like
87
+ The data to append. Should be compatible in shape and type with the existing node.
88
+ The format and type must match the node's requirements.
89
+ """
25
90
 
26
91
  def remove_data(self, nodename):
27
- pass
92
+ """Remove data from the HDF5 file."""
28
93
 
29
94
  def create_new_group(self, name, group=None):
30
- pass
95
+ """Create a new group in the HDF5 file."""
31
96
 
32
97
 
33
98
  class H5CacheFileBase:
@@ -36,10 +101,10 @@ class H5CacheFileBase:
36
101
  compression_filter = None
37
102
 
38
103
  def is_cached(self, nodename, group=None):
39
- pass
104
+ """Check if data is cached in the HDF5 file."""
40
105
 
41
106
  def create_compressible_array(self, nodename, shape, precision, group=None):
42
- pass
107
+ """Create a compressible array in the HDF5 cache file."""
43
108
 
44
109
 
45
110
  if config.have_tables:
@@ -57,35 +122,45 @@ if config.have_tables:
57
122
  }
58
123
 
59
124
  class H5FileTables(H5FileBase, tables.File):
125
+ """Hdf5 File based on PyTables."""
126
+
60
127
  def create_extendable_array(self, nodename, shape, precision, group=None):
128
+ """Create an extendable array using PyTables."""
61
129
  if not group:
62
130
  group = self.root
63
131
  atom = precision_to_atom[precision]
64
132
  self.create_earray(group, nodename, atom, shape)
65
133
 
66
134
  def get_data_by_reference(self, nodename, group=None):
135
+ """Get data by reference using PyTables."""
67
136
  if not group:
68
137
  group = self.root
69
138
  return self.get_node(group, nodename)
70
139
 
71
140
  def set_node_attribute(self, node, attrname, value):
141
+ """Set an attribute on a PyTables node."""
72
142
  node.set_attr(attrname, value)
73
143
 
74
144
  def get_node_attribute(self, node, attrname):
145
+ """Get an attribute from a PyTables node."""
75
146
  return node._v_attrs[attrname] # noqa: SLF001
76
147
 
77
148
  def append_data(self, node, data):
149
+ """Append data to a PyTables node."""
78
150
  node.append(data)
79
151
 
80
152
  def remove_data(self, nodename):
153
+ """Remove data from PyTables file."""
81
154
  self.remove_node('/', nodename, recursive=True)
82
155
 
83
156
  def create_new_group(self, name, group=None):
157
+ """Create a new group in PyTables file."""
84
158
  if not group:
85
159
  group = self.root
86
160
  return self.create_group(group, name)
87
161
 
88
162
  def get_child_nodes(self, nodename):
163
+ """Get child nodes from a PyTables group."""
89
164
  for childnode in self.list_nodes(nodename):
90
165
  yield (childnode.name, childnode)
91
166
 
@@ -106,14 +181,18 @@ if config.have_tables:
106
181
  return result
107
182
 
108
183
  class H5CacheFileTables(H5FileTables, H5CacheFileBase):
184
+ """Hdf5 Cache File based on PyTables."""
185
+
109
186
  compression_filter = tables.Filters(complevel=5, complib='blosc')
110
187
 
111
188
  def is_cached(self, nodename, group=None):
189
+ """Check if data is cached in PyTables file."""
112
190
  if not group:
113
191
  group = self.root
114
192
  return nodename in group
115
193
 
116
194
  def create_compressible_array(self, nodename, shape, precision, group=None):
195
+ """Create a compressible array in PyTables cache file."""
117
196
  if not group:
118
197
  group = self.root
119
198
  atom = precision_to_atom[precision]
@@ -124,44 +203,56 @@ if config.have_h5py:
124
203
  import h5py
125
204
 
126
205
  class H5FileH5py(H5FileBase, h5py.File):
206
+ """Hdf5 File based on h5py."""
207
+
127
208
  def _get_in_file_path(self, nodename, group=None):
209
+ """Get the in-file path for h5py operations."""
128
210
  if not group:
129
211
  return '/' + nodename
130
212
  return group + '/' + nodename
131
213
 
132
214
  def create_array(self, where, name, obj):
215
+ """Create an array in h5py file."""
133
216
  self.create_dataset(f'{where}/{name}', data=obj)
134
217
 
135
218
  def create_extendable_array(self, nodename, shape, precision, group=None):
219
+ """Create an extendable array using h5py."""
136
220
  in_file_path = self._get_in_file_path(nodename, group)
137
221
  self.create_dataset(in_file_path, shape=shape, dtype=precision, maxshape=(None, shape[1]))
138
222
 
139
223
  def get_data_by_reference(self, nodename, group=None):
224
+ """Get data by reference using h5py."""
140
225
  in_file_path = self._get_in_file_path(nodename, group)
141
226
  return self[in_file_path]
142
227
 
143
228
  def set_node_attribute(self, node, attrname, value):
229
+ """Set an attribute on an h5py node."""
144
230
  node.attrs[attrname] = value
145
231
 
146
232
  def get_node_attribute(self, node, attrname):
233
+ """Get an attribute from an h5py node."""
147
234
  return node.attrs[attrname]
148
235
 
149
236
  def append_data(self, node, data):
237
+ """Append data to an h5py dataset."""
150
238
  old_shape = node.shape
151
239
  new_shape = (old_shape[0] + data.shape[0], data.shape[1])
152
240
  node.resize(new_shape)
153
241
  node[old_shape[0] : new_shape[0], :] = data
154
242
 
155
243
  def remove_data(self, nodename, group=None):
244
+ """Remove data from h5py file."""
156
245
  in_file_path = self._get_in_file_path(nodename, group)
157
246
  del self[in_file_path]
158
247
 
159
248
  def create_new_group(self, name, group=None):
249
+ """Create a new group in h5py file."""
160
250
  in_file_path = self._get_in_file_path(name, group)
161
251
  self.create_group(in_file_path)
162
252
  return in_file_path
163
253
 
164
254
  def get_child_nodes(self, nodename):
255
+ """Get child nodes from an h5py group."""
165
256
  for childnode in self[nodename]:
166
257
  yield (childnode, self[f'{nodename}/{childnode}'])
167
258
 
@@ -182,15 +273,19 @@ if config.have_h5py:
182
273
  return result
183
274
 
184
275
  class H5CacheFileH5py(H5CacheFileBase, H5FileH5py):
276
+ """Hdf5 Cache File based on h5py."""
277
+
185
278
  compression_filter = 'lzf'
186
279
  # compression_filter = 'blosc' # unavailable...
187
280
 
188
281
  def is_cached(self, nodename, group=None):
282
+ """Check if data is cached in h5py file."""
189
283
  if not group:
190
284
  group = '/'
191
285
  return group + nodename in self
192
286
 
193
287
  def create_compressible_array(self, nodename, shape, precision, group=None):
288
+ """Create a compressible array in h5py cache file."""
194
289
  in_file_path = self._get_in_file_path(nodename, group)
195
290
  self.create_dataset(
196
291
  in_file_path,
@@ -202,6 +297,7 @@ if config.have_h5py:
202
297
 
203
298
 
204
299
  def _get_h5file_class():
300
+ """Get the appropriate H5File class based on configuration."""
205
301
  if config.h5library in ['pytables', 'tables']:
206
302
  return H5FileTables
207
303
  if config.h5library == 'h5py':
@@ -210,6 +306,7 @@ def _get_h5file_class():
210
306
 
211
307
 
212
308
  def _get_cachefile_class():
309
+ """Get the appropriate H5CacheFile class based on configuration."""
213
310
  if config.h5library in ['pytables', 'tables']:
214
311
  return H5CacheFileTables
215
312
  if config.h5library == 'h5py':
acoular/internal.py CHANGED
@@ -2,10 +2,13 @@
2
2
  # Copyright (c) Acoular Development Team.
3
3
  # ------------------------------------------------------------------------------
4
4
 
5
+ """Implements a digest function for caching of traits based on a unique identifier."""
6
+
5
7
  from hashlib import md5
6
8
 
7
9
 
8
10
  def digest(obj, name='digest'):
11
+ """Generate a unique digest for the given object based on its traits."""
9
12
  str_ = [str(obj.__class__).encode('UTF-8')]
10
13
  for do_ in obj.trait(name).depends_on:
11
14
  vobj = obj
@@ -19,6 +22,7 @@ def digest(obj, name='digest'):
19
22
 
20
23
 
21
24
  def ldigest(obj_list):
25
+ """Generate a unique digest for a list of objects based on their traits."""
22
26
  str_ = []
23
27
  for i in obj_list:
24
28
  str_.append(str(i.digest).encode('UTF-8'))
acoular/microphones.py CHANGED
@@ -14,7 +14,7 @@ Implements support for array microphone arrangements.
14
14
  import xml.dom.minidom
15
15
  from pathlib import Path
16
16
 
17
- from numpy import array, average
17
+ import numpy as np
18
18
  from scipy.spatial.distance import cdist
19
19
  from traits.api import (
20
20
  CArray,
@@ -24,15 +24,13 @@ from traits.api import (
24
24
  Property,
25
25
  Union,
26
26
  cached_property,
27
- on_trait_change,
27
+ observe,
28
28
  )
29
29
 
30
30
  # acoular imports
31
- from .deprecation import deprecated_alias
32
31
  from .internal import digest
33
32
 
34
33
 
35
- @deprecated_alias({'mpos_tot': 'pos_total', 'mpos': 'pos', 'from_file': 'file'}, read_only=['mpos'])
36
34
  class MicGeom(HasStrictTraits):
37
35
  """
38
36
  Provide the geometric arrangement of microphones in an array.
@@ -41,6 +39,16 @@ class MicGeom(HasStrictTraits):
41
39
  microphone array. The positions can be read from an XML file or set programmatically. Invalid
42
40
  microphones can be excluded by specifying their indices via :attr:`invalid_channels`.
43
41
 
42
+ .. _units_note_microphones:
43
+ .. admonition:: Unit of length
44
+
45
+ The source code is agnostic to the unit of length. The microphone positions' coordinates are
46
+ assumed to be in meters. This is consistent with the standard
47
+ :class:`~acoular.environments.Environment` class which uses the speed of sound at 20°C at sea
48
+ level under standard atmosphere pressure in m/s. If the microphone positions' coordinates are
49
+ provided in a unit other than meter, it is advisable to change the
50
+ :attr:`~acoular.environments.Environment.c` attribute to match the given unit.
51
+
44
52
  Notes
45
53
  -----
46
54
  - The microphone geometry as in :attr:`total_pos` is automatically changed if the :attr:`file`
@@ -133,11 +141,13 @@ class MicGeom(HasStrictTraits):
133
141
 
134
142
  #: Array containing the ``x, y, z`` positions of all microphones, including invalid ones, shape
135
143
  #: ``(3,`` :attr:`num_mics` ``)``. This is set automatically when :attr:`file` changes or
136
- #: explicitly by assigning an array of floats.
144
+ #: explicitly by assigning an array of floats. All coordinates are in meters by default
145
+ #: (:ref:`see here <units_note_microphones>`).
137
146
  pos_total = CArray(dtype=float, shape=(3, None), desc='x, y, z position of all microphones')
138
147
 
139
148
  #: Array containing the ``x, y, z`` positions of valid microphones (i.e., excluding those in
140
149
  #: :attr:`invalid_channels`), shape ``(3,`` :attr:`num_mics` ``)``. (read-only)
150
+ #: All coordinates are in meters by default (:ref:`see here <units_note_microphones>`).
141
151
  pos = Property(depends_on=['pos_total', 'invalid_channels'], desc='x, y, z position of used microphones')
142
152
 
143
153
  #: List of indices indicating microphones to be excluded from calculations and results.
@@ -166,7 +176,7 @@ class MicGeom(HasStrictTraits):
166
176
  if len(self.invalid_channels) == 0:
167
177
  return self.pos_total
168
178
  allr = [i for i in range(self.pos_total.shape[-1]) if i not in self.invalid_channels]
169
- return self.pos_total[:, array(allr)]
179
+ return self.pos_total[:, np.array(allr)]
170
180
 
171
181
  @cached_property
172
182
  def _get_num_mics(self):
@@ -175,7 +185,7 @@ class MicGeom(HasStrictTraits):
175
185
  @cached_property
176
186
  def _get_center(self):
177
187
  if self.pos.any():
178
- center = average(self.pos, axis=1)
188
+ center = np.average(self.pos, axis=1)
179
189
  # set very small values to zero
180
190
  center[abs(center) < 1e-16] = 0.0
181
191
  return center
@@ -187,8 +197,8 @@ class MicGeom(HasStrictTraits):
187
197
  return cdist(self.pos.T, self.pos.T).max()
188
198
  return None
189
199
 
190
- @on_trait_change('file')
191
- def _import_mpos(self):
200
+ @observe('file')
201
+ def _import_mpos(self, event): # noqa ARG002
192
202
  # Import the microphone positions from an XML file.
193
203
  #
194
204
  # This method parses the XML file specified in :attr:`file` and extracts the ``x``, ``y``,
@@ -222,7 +232,7 @@ class MicGeom(HasStrictTraits):
222
232
  for el in doc.getElementsByTagName('pos'):
223
233
  names.append(el.getAttribute('Name'))
224
234
  xyz.append([float(el.getAttribute(a)) for a in 'xyz'])
225
- self.pos_total = array(xyz, 'd').swapaxes(0, 1)
235
+ self.pos_total = np.array(xyz, 'd').swapaxes(0, 1)
226
236
 
227
237
  def export_mpos(self, filename):
228
238
  """
@@ -251,6 +261,8 @@ class MicGeom(HasStrictTraits):
251
261
  index of the microphone.
252
262
  - This method only exports the positions of the valid microphones (those not listed in
253
263
  :attr:`invalid_channels`).
264
+ - All coordinates (x, y, z) are exported in meters by default (:ref:`see here
265
+ <units_note_microphones>`).
254
266
  """
255
267
  filepath = Path(filename)
256
268
  basename = filepath.stem
acoular/process.py CHANGED
@@ -10,8 +10,6 @@ General purpose blockwise processing methods independent of the domain (time or
10
10
  Average
11
11
  Cache
12
12
  SampleSplitter
13
- TimeAverage
14
- TimeCache
15
13
  SamplesBuffer
16
14
  """
17
15
 
@@ -21,12 +19,11 @@ from inspect import currentframe
21
19
  from warnings import warn
22
20
 
23
21
  import numpy as np
24
- from traits.api import Any, Array, Bool, Dict, Enum, Instance, Int, Property, Union, cached_property, on_trait_change
22
+ from traits.api import Any, Array, Bool, Dict, Enum, Instance, Int, Property, Union, cached_property, observe
25
23
 
26
24
  # acoular imports
27
25
  from .base import Generator, InOut
28
26
  from .configuration import config
29
- from .deprecation import deprecated_alias
30
27
  from .h5cache import H5cache
31
28
  from .h5files import H5CacheFileBase
32
29
  from .internal import digest
@@ -51,7 +48,7 @@ class LockedGenerator:
51
48
 
52
49
  See Also
53
50
  --------
54
- :class:`acoular.process.SampleSplitter` :
51
+ :class:`~acoular.process.SampleSplitter` :
55
52
  Distribute data from a source to several following objects in a block-wise manner.
56
53
  """
57
54
 
@@ -65,7 +62,6 @@ class LockedGenerator:
65
62
  return self.it.__next__()
66
63
 
67
64
 
68
- @deprecated_alias({'naverage': 'num_per_average', 'numsamples': 'num_samples'}, read_only=['numsamples'])
69
65
  class Average(InOut):
70
66
  """
71
67
  Calculate the average across consecutive time samples or frequency snapshots.
@@ -80,7 +76,7 @@ class Average(InOut):
80
76
 
81
77
  See Also
82
78
  --------
83
- :class:`acoular.base.InOut` :
79
+ :class:`~acoular.base.InOut` :
84
80
  Receive data from any source domain and return signals in the same domain.
85
81
 
86
82
  Examples
@@ -214,8 +210,8 @@ class Cache(InOut):
214
210
 
215
211
  See Also
216
212
  --------
217
- :class:`acoular.base.InOut` : Receive data from any source domain and return signals in the same
218
- domain.
213
+ :class:`~acoular.base.InOut` : Receive data from any source domain and return signals in the
214
+ same domain.
219
215
 
220
216
  Examples
221
217
  --------
@@ -542,8 +538,8 @@ class SampleSplitter(InOut):
542
538
  next_block = next(self._source_generator)
543
539
  [self.block_buffer[obj].appendleft(next_block) for obj in self.block_buffer]
544
540
 
545
- @on_trait_change('buffer_size')
546
- def _change_buffer_size(self): #
541
+ @observe('buffer_size')
542
+ def _change_buffer_size(self, event): # noqa: ARG002
547
543
  for obj in self.block_buffer:
548
544
  self._remove_block_buffer(obj)
549
545
  self._create_block_buffer(obj)
@@ -675,48 +671,6 @@ class SampleSplitter(InOut):
675
671
  raise OSError(msg)
676
672
 
677
673
 
678
- class TimeAverage(Average):
679
- """
680
- Calculate the average of the signal.
681
-
682
- .. deprecated:: 24.10
683
- The use of :class:`~acoular.process.TimeAverage` is deprecated
684
- and will be removed in Acoular version 25.07.
685
- Please use :class:`~acoular.process.Average` instead for future compatibility.
686
-
687
- Alias for :class:`~acoular.process.Average`.
688
- """
689
-
690
- def __init__(self, *args, **kwargs):
691
- super().__init__(*args, **kwargs)
692
- warn(
693
- 'Using TimeAverage is deprecated and will be removed in Acoular version 25.07. Use Average instead.',
694
- DeprecationWarning,
695
- stacklevel=2,
696
- )
697
-
698
-
699
- class TimeCache(Cache):
700
- """
701
- Cache source signals in cache file.
702
-
703
- .. deprecated:: 24.10
704
- The use of :class:`~acoular.process.TimeCache` is deprecated
705
- and will be removed in Acoular version 25.07.
706
- Please use :class:`~acoular.process.Cache` instead for future compatibility.
707
-
708
- Alias for :class:`~acoular.process.Cache`.
709
- """
710
-
711
- def __init__(self, *args, **kwargs):
712
- super().__init__(*args, **kwargs)
713
- warn(
714
- 'Using TimeCache is deprecated and will be removed in Acoular version 25.07. Use Cache instead.',
715
- DeprecationWarning,
716
- stacklevel=2,
717
- )
718
-
719
-
720
674
  class SamplesBuffer(InOut):
721
675
  """
722
676
  Handle buffering of samples from a source.
acoular/sdinput.py CHANGED
@@ -14,14 +14,12 @@ from traits.api import Any, Bool, Enum, Float, Int, Property, cached_property, o
14
14
  # acoular imports
15
15
  from .base import SamplesGenerator
16
16
  from .configuration import config
17
- from .deprecation import deprecated_alias
18
17
  from .internal import digest
19
18
 
20
19
  if config.have_sounddevice:
21
20
  import sounddevice as sd
22
21
 
23
22
 
24
- @deprecated_alias({'numchannels': 'num_channels', 'numsamples': 'num_samples', 'collectsamples': 'collect_samples'})
25
23
  class SoundDeviceSamplesGenerator(SamplesGenerator):
26
24
  """Controller for sound card hardware using sounddevice library.
27
25
 
@@ -87,15 +85,20 @@ class SoundDeviceSamplesGenerator(SamplesGenerator):
87
85
  self._sample_freq = f
88
86
 
89
87
  def device_properties(self):
90
- """Returns
88
+ """
89
+ Display the properties of the sounddevice input device.
90
+
91
+ Returns
91
92
  -------
92
93
  Dictionary of device properties according to sounddevice
93
94
  """
94
95
  return sd.query_devices(self.device)
95
96
 
96
97
  def result(self, num):
97
- """Python generator that yields the output block-wise. Use at least a
98
- block-size of one ring cache block.
98
+ """
99
+ Python generator that yields the output block-wise.
100
+
101
+ Use at least a block-size of one ring cache block.
99
102
 
100
103
  Parameters
101
104
  ----------