sofar 0.3.1__py2.py3-none-any.whl → 1.1.0__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (79) hide show
  1. sofar/__init__.py +13 -7
  2. sofar/io.py +423 -0
  3. sofar/sofa.py +1795 -0
  4. sofar/sofa_conventions/VERSION +1 -0
  5. sofar/sofa_conventions/conventions/FreeFieldDirectivityTF_1.1.csv +59 -0
  6. sofar/sofa_conventions/conventions/FreeFieldDirectivityTF_1.1.json +444 -0
  7. sofar/{conventions/source → sofa_conventions/conventions}/FreeFieldHRIR_1.0.csv +3 -3
  8. sofar/{conventions → sofa_conventions/conventions}/FreeFieldHRIR_1.0.json +3 -3
  9. sofar/{conventions/source → sofa_conventions/conventions}/FreeFieldHRTF_1.0.csv +2 -2
  10. sofar/{conventions → sofa_conventions/conventions}/FreeFieldHRTF_1.0.json +3 -3
  11. sofar/{conventions/source → sofa_conventions/conventions}/GeneralFIR-E_2.0.csv +2 -2
  12. sofar/{conventions → sofa_conventions/conventions}/GeneralFIR-E_2.0.json +2 -2
  13. sofar/{conventions/source/GeneralFIR_2.0.csv → sofa_conventions/conventions/GeneralFIR_1.0.csv} +2 -2
  14. sofar/{conventions/GeneralFIR_2.0.json → sofa_conventions/conventions/GeneralFIR_1.0.json} +2 -2
  15. sofar/{conventions/source/GeneralFIR_1.0.csv → sofa_conventions/conventions/GeneralSOS_1.0.csv} +11 -11
  16. sofar/{conventions/GeneralFIR_1.0.json → sofa_conventions/conventions/GeneralSOS_1.0.json} +48 -37
  17. sofar/{conventions/source → sofa_conventions/conventions}/GeneralTF-E_1.0.csv +3 -3
  18. sofar/{conventions → sofa_conventions/conventions}/GeneralTF-E_1.0.json +4 -4
  19. sofar/{conventions/source → sofa_conventions/conventions}/GeneralTF_1.0.csv +1 -1
  20. sofar/{conventions → sofa_conventions/conventions}/GeneralTF_1.0.json +1 -1
  21. sofar/{conventions/source → sofa_conventions/conventions}/GeneralTF_2.0.csv +4 -4
  22. sofar/{conventions → sofa_conventions/conventions}/GeneralTF_2.0.json +4 -4
  23. sofar/sofa_conventions/conventions/SimpleFreeFieldHRIR_1.0.csv +47 -0
  24. sofar/{conventions → sofa_conventions/conventions}/SimpleFreeFieldHRIR_1.0.json +1 -1
  25. sofar/{conventions/source → sofa_conventions/conventions}/SimpleFreeFieldHRSOS_1.0.csv +1 -1
  26. sofar/{conventions → sofa_conventions/conventions}/SimpleFreeFieldHRSOS_1.0.json +1 -1
  27. sofar/{conventions/source/SimpleFreeFieldHRTF_2.0.csv → sofa_conventions/conventions/SimpleFreeFieldHRTF_1.0.csv} +3 -3
  28. sofar/{conventions/SimpleFreeFieldHRTF_2.0.json → sofa_conventions/conventions/SimpleFreeFieldHRTF_1.0.json} +4 -4
  29. sofar/{conventions/source → sofa_conventions/conventions}/SimpleHeadphoneIR_1.0.csv +9 -9
  30. sofar/sofa_conventions/conventions/SimpleHeadphoneIR_1.0.json +396 -0
  31. sofar/{conventions/source → sofa_conventions/conventions}/SingleRoomMIMOSRIR_1.0.csv +18 -8
  32. sofar/{conventions → sofa_conventions/conventions}/SingleRoomMIMOSRIR_1.0.json +124 -50
  33. sofar/{conventions/source → sofa_conventions/conventions}/SingleRoomSRIR_1.0.csv +18 -8
  34. sofar/{conventions → sofa_conventions/conventions}/SingleRoomSRIR_1.0.json +124 -50
  35. sofar/{conventions/source → sofa_conventions/conventions/deprecated}/FreeFieldDirectivityTF_1.0.csv +2 -2
  36. sofar/{conventions → sofa_conventions/conventions/deprecated}/FreeFieldDirectivityTF_1.0.json +2 -2
  37. sofar/sofa_conventions/conventions/deprecated/MultiSpeakerBRIR_0.3.csv +48 -0
  38. sofar/sofa_conventions/conventions/deprecated/SimpleFreeFieldHRIR_0.4.csv +43 -0
  39. sofar/sofa_conventions/conventions/deprecated/SimpleFreeFieldHRIR_0.4.json +333 -0
  40. sofar/{conventions/source/SimpleFreeFieldHRIR_1.0.csv → sofa_conventions/conventions/deprecated/SimpleFreeFieldTF_0.4.csv} +15 -18
  41. sofar/sofa_conventions/conventions/deprecated/SimpleFreeFieldTF_0.4.json +340 -0
  42. sofar/sofa_conventions/conventions/deprecated/SimpleFreeFieldTF_1.0.csv +44 -0
  43. sofar/sofa_conventions/conventions/deprecated/SimpleFreeFieldTF_1.0.json +340 -0
  44. sofar/sofa_conventions/conventions/deprecated/SimpleHeadphoneIR_0.1.csv +51 -0
  45. sofar/sofa_conventions/conventions/deprecated/SimpleHeadphoneIR_0.1.json +396 -0
  46. sofar/sofa_conventions/conventions/deprecated/SimpleHeadphoneIR_0.2.csv +51 -0
  47. sofar/{conventions/SimpleHeadphoneIR_1.0.json → sofa_conventions/conventions/deprecated/SimpleHeadphoneIR_0.2.json} +3 -3
  48. sofar/sofa_conventions/conventions/deprecated/SingleRoomDRIR_0.2.csv +47 -0
  49. sofar/sofa_conventions/conventions/deprecated/SingleRoomDRIR_0.2.json +360 -0
  50. sofar/sofa_conventions/rules/deprecations.json +12 -0
  51. sofar/sofa_conventions/rules/rules.json +800 -0
  52. sofar/sofa_conventions/rules/unit_aliases.json +11 -0
  53. sofar/sofa_conventions/rules/upgrade.json +190 -0
  54. sofar/update_conventions.py +427 -0
  55. sofar/utils.py +315 -0
  56. {sofar-0.3.1.dist-info → sofar-1.1.0.dist-info}/AUTHORS.rst +1 -0
  57. sofar-1.1.0.dist-info/METADATA +89 -0
  58. sofar-1.1.0.dist-info/RECORD +75 -0
  59. {sofar-0.3.1.dist-info → sofar-1.1.0.dist-info}/WHEEL +1 -1
  60. {sofar-0.3.1.dist-info → sofar-1.1.0.dist-info}/top_level.txt +1 -0
  61. tests/__init__.py +0 -0
  62. tests/test_deprecations.py +19 -0
  63. tests/test_io.py +344 -0
  64. tests/test_sofa.py +354 -0
  65. tests/test_sofa_upgrade_conventions.py +102 -0
  66. tests/test_sofa_verify.py +472 -0
  67. tests/test_utils.py +241 -0
  68. sofar/conventions/source/MultiSpeakerBRIR_0.3.csv +0 -48
  69. sofar/sofar.py +0 -2531
  70. sofar-0.3.1.dist-info/METADATA +0 -69
  71. sofar-0.3.1.dist-info/RECORD +0 -46
  72. /sofar/{conventions/source → sofa_conventions/conventions}/SimpleFreeFieldSOS_1.0.csv +0 -0
  73. /sofar/{conventions → sofa_conventions/conventions}/SimpleFreeFieldSOS_1.0.json +0 -0
  74. /sofar/{conventions/source → sofa_conventions/conventions/deprecated}/GeneralFIRE_1.0.csv +0 -0
  75. /sofar/{conventions → sofa_conventions/conventions/deprecated}/GeneralFIRE_1.0.json +0 -0
  76. /sofar/{conventions → sofa_conventions/conventions/deprecated}/MultiSpeakerBRIR_0.3.json +0 -0
  77. /sofar/{conventions/source → sofa_conventions/conventions/deprecated}/SingleRoomDRIR_0.3.csv +0 -0
  78. /sofar/{conventions → sofa_conventions/conventions/deprecated}/SingleRoomDRIR_0.3.json +0 -0
  79. {sofar-0.3.1.dist-info → sofar-1.1.0.dist-info}/LICENSE +0 -0
sofar/sofar.py DELETED
@@ -1,2531 +0,0 @@
1
- import os
2
- import re
3
- import glob
4
- import json
5
- import requests
6
- from datetime import datetime
7
- import platform
8
- import numpy as np
9
- import numpy.testing as npt
10
- from packaging.version import parse as version_parse
11
- import warnings
12
- from bs4 import BeautifulSoup
13
- from netCDF4 import Dataset, stringtochar, chartostring
14
- from copy import deepcopy
15
- import sofar as sf
16
-
17
-
18
- class Sofa():
19
- """Create a new SOFA object.
20
-
21
- Parameters
22
- ----------
23
- convention : str
24
- The name of the convention from which the SOFA file is created. See
25
- :py:func:`~sofar.list_conventions`.
26
- mandatory : bool, optional
27
- If ``True``, only the mandatory data of the convention will be
28
- returned. The default is ``False``, which returns mandatory and
29
- optional data.
30
- version : str, optional
31
- The version of the convention as a string, e.g., ``' 2.0'``. The
32
- default is ``'latest'``. Also see :py:func:`~sofar.list_conventions`.
33
- verify : bool, optional
34
- Verify the SOFA object by calling :py:func:`~Sofa.verify`. This helps
35
- to find potential errors in the default values and is thus recommended
36
- If creating a file does not work, try to call `Sofa` with
37
- ``verify=False``. The default is ``True``.
38
-
39
- Returns
40
- -------
41
- sofa : Sofa
42
- A SOFA object filled with the default values of the convention.
43
-
44
- Examples
45
- --------
46
- Create a new SOFA object with default values
47
-
48
- .. code-block:: python
49
-
50
- import sofar as sf
51
-
52
- # create SOFA object
53
- sofa = sf.Sofa("SimpleFreeFieldHRIR")
54
-
55
- Add data as a list
56
-
57
- .. code-block:: python
58
-
59
- sofa.Data_IR = [1, 1]
60
-
61
- Data can be entered as numbers, numpy arrays or lists. Note the following
62
-
63
- 1. Lists are converted to numpy arrays with at least two dimensions, i.e.,
64
- ``sofa.Data_IR`` is converted to a numpy array of shape (1, 2)
65
- 2. Missing dimensions are appended when writing the SOFA object to disk,
66
- i.e., ``sofa.Data_IR`` is written as an array of shape (1, 2, 1) because
67
- the SOFA standard AES69-2020 defines it as a three dimensional array
68
- with the dimensions (`M: measurements`, `R: receivers`, `N: samples`)
69
- 3. When reading data from a SOFA file, array data is always returned as
70
- numpy arrays and singleton trailing dimensions are discarded (numpy
71
- default). I.e., ``sofa.Data_IR`` will again be an array of shape (1, 2)
72
- after writing and reading to and from disk.
73
- 4. One dimensional arrays with only one element will be converted to scalar
74
- values. E.g. ``sofa.Data_SamplingRate`` is stored as an array of shape
75
- (1, ) inside SOFA files (according to the SOFA standard AES69-2020) but
76
- will be a scalar inside SOFA objects after reading from disk.
77
-
78
-
79
- For more examples refer to the `Quick tour of SOFA and sofar` at
80
- https://sofar.readthedocs.io/en/latest/
81
- """
82
-
83
- # these have to be set here, because they are used in __setattr__ and
84
- # Python checks if they exist upon class creation
85
-
86
- # don't allow adding attributes and deleting/writing read only attributes
87
- _protected = False
88
- # list of read only attributes (filled upon init)
89
- _read_only_attr = []
90
-
91
- def __init__(self, convention, mandatory=False, version="latest",
92
- verify=True):
93
- """See class docstring"""
94
-
95
- # get convention
96
- self._convention = self._load_convention(convention, version)
97
-
98
- # update read only attributes
99
- self._read_only_attr = [
100
- key for key in self._convention.keys()
101
- if self._read_only(self._convention[key]["flags"])]
102
-
103
- # add attributes with default values
104
- self._convention_to_sofa(mandatory)
105
-
106
- # add and update the API
107
- if verify:
108
- self.verify(version)
109
-
110
- self._protected = True
111
-
112
- def __setattr__(self, name: str, value):
113
- # don't allow new attributes to be added outside the class
114
- if self._protected and not hasattr(self, name):
115
- raise TypeError(f"{name} is an invalid attribute")
116
-
117
- # don't allow setting read only attributes
118
- if name in self._read_only_attr and self._protected:
119
- raise TypeError(f"{name} is a read only attribute")
120
-
121
- # convert to numpy array or scalar
122
- if not isinstance(value, (str, dict, np.ndarray)):
123
- value = np.atleast_2d(value)
124
- if value.size == 1:
125
- value = value.flatten()[0]
126
-
127
- super.__setattr__(self, name, value)
128
-
129
- def __delattr__(self, name: str):
130
- # can't delete non existing attributes
131
- if not hasattr(self, name):
132
- raise TypeError(f"{name} is not an attribute")
133
- # delete anything if not frozen, delete non mandatory
134
- if not self._protected or \
135
- not self._mandatory(self._convention[name]["flags"]):
136
- super().__delattr__(name)
137
-
138
- # check if custom field as to be deleted
139
- if hasattr(self, "_custom"):
140
- if name in self._custom:
141
- self._custom.pop(name)
142
- else:
143
- raise TypeError(
144
- f"{name} is a mandatory attribute that can not be deleted")
145
-
146
- def __repr__(self):
147
- return (f"sofar.SOFA object: {self.GLOBAL_SOFAConventions} "
148
- f"{self.GLOBAL_SOFAConventionsVersion}")
149
-
150
- @property
151
- def list_dimensions(self):
152
- """
153
- Print the dimensions of the SOFA object
154
-
155
- The SOFA file standard defines the following dimensions:
156
-
157
- M
158
- number of measurements
159
- N
160
- number of samles, frequencies, SOS coefficients
161
- (depending on self.GLOBAL_DataType)
162
- R
163
- Number of receivers or SH coefficients
164
- (depending on ReceiverPosition_Type)
165
- E
166
- Number of emitters or SH coefficients
167
- (depending on EmitterPosition_Type)
168
- S
169
- Maximum length of a string in a string array
170
- C
171
- Size of the coordinate dimension. This is always three.
172
- I
173
- Single dimension. This is always one.
174
-
175
- see :py:func:`~Sofa.info` to see the shapes of the data inside the SOFA
176
- object.
177
- """
178
-
179
- # Check if the dimensions can be updated
180
- self._update_dimensions()
181
-
182
- # get verbose description for dimesion N
183
- if self.GLOBAL_DataType.startswith("FIR"):
184
- N_verbose = "samples"
185
- elif self.GLOBAL_DataType.startswith("TF"):
186
- N_verbose = "frequencies"
187
- elif self.GLOBAL_DataType.startswith("SOS"):
188
- N_verbose = "SOS coefficients"
189
- else:
190
- # This line can not be tested. An invalid DataType would be cached
191
- # in self.verify above. This to make sure we don't miss something
192
- # in case new DataTypes are added to SOFA in the future.
193
- raise ValueError((
194
- "GLOBAL_DataType start with 'FIR', 'TF', "
195
- f"or 'SOS' but not with {self.GLOBAL_DataType}"))
196
-
197
- # get verbose description for dimensions R and E
198
- R_verbose = "receiver spherical harmonics coefficients" if \
199
- 'harmonic' in self.ReceiverPosition_Type else "receiver"
200
- E_verbose = "emitter spherical harmonics coefficients" if \
201
- 'harmonic' in self.EmitterPosition_Type else "emitter"
202
-
203
- dimensions = {
204
- "M": "measurements",
205
- "N": N_verbose,
206
- "R": R_verbose,
207
- "E": E_verbose,
208
- "S": "maximum string length",
209
- "C": "coordinate dimensions, fixed",
210
- "I": "single dimension, fixed"}
211
-
212
- info_str = ""
213
- for key, value in self._api.items():
214
- dim_info = dimensions[key] if key in dimensions \
215
- else "custom dimension"
216
-
217
- info_str += f"{key} = {value} {dim_info}"
218
-
219
- if dim_info != "custom ":
220
- for key2, value2 in self._convention.items():
221
- dim = value2["dimensions"]
222
- if dim is not None and key.lower() in dim:
223
- info_str += \
224
- f" (set by {key2} of dimension {dim.upper()})"
225
- break
226
-
227
- info_str += "\n"
228
-
229
- print(info_str)
230
-
231
- def get_dimension(self, dimension):
232
- """
233
- Get size of a SOFA dimension
234
-
235
- SOFA dimensions specify the shape of the data contained in a SOFA
236
- object. For a list of all dimensions see :py:func:`~list_dimensions`.
237
-
238
- Parameters
239
- ----------
240
- dimension : str
241
- The dimension as a string, e.g., ``'N'``.
242
-
243
- Returns
244
- -------
245
- size : int
246
- the size of the queried dimension.
247
- """
248
-
249
- # Check if the dimensions can be updated
250
- self._update_dimensions()
251
-
252
- if dimension not in self._api:
253
- raise ValueError((
254
- f"{dimension} is not a valid dimension. "
255
- "See Sofa.list_dimensions for a list of valid dimensions."))
256
-
257
- return self._api[dimension]
258
-
259
- def _update_dimensions(self):
260
- """
261
- Call verify and raise an error if the dimensions could not be updated.
262
-
263
- used in Sofa.list_dimensions and Sofa.get_dimension
264
- """
265
-
266
- issues = self.verify(version="match", issue_handling="return")
267
- if issues is not None and ("data of wrong type" in issues or
268
- "variables of wrong shape" in issues or
269
- not hasattr(self, "_api")):
270
- raise ValueError(("Dimensions can not be shown because variables "
271
- "of wrong type or shape were detected. "
272
- "Call Sofa.verify() for more information."))
273
-
274
- def info(self, info):
275
- """
276
- Print information about the convention of a SOFA object.
277
-
278
- Prints the variable type (attribute, double, string), shape, flags
279
- (mandatory, read only) and comment (if any) for each or selected
280
- entries.
281
-
282
- Parameters
283
- ----------
284
- info : str
285
- Specifies the kind of information that is printed:
286
-
287
- ``'all'`` ``'mandatory'`` ``'optional'`` ``'read only'`` ``'data'``
288
- Print the name, type, shape, and flags and comment for all or
289
- selected entries of the SOFA object. ``'data'`` does not show
290
- entries of type attribute.
291
- key
292
- If key is the name of an object attribute, all information for
293
- attribute will be printed.
294
- """
295
-
296
- # update the private attribute `_convention` to make sure the required
297
- # meta data is in place
298
- self._update_convention(version="match")
299
-
300
- # list of all attributes
301
- keys = [k for k in self.__dict__.keys() if not k.startswith("_")]
302
-
303
- # start printing the information
304
- info_str = (
305
- f"{self.GLOBAL_SOFAConventions} "
306
- F"{self.GLOBAL_SOFAConventionsVersion} "
307
- f"(SOFA version {self.GLOBAL_Version})\n")
308
- info_str += "-" * len(info_str) + "\n"
309
-
310
- if info in ["all", "mandatory", "optional", "read only", "data"]:
311
-
312
- info_str += f"showing {info} entries : type (shape), flags\n\n"
313
-
314
- for key in keys:
315
-
316
- # check if field should be skipped
317
- flags = self._convention[key]["flags"]
318
- if (not self._mandatory(flags) and info == "mandatory") \
319
- or \
320
- (self._mandatory(flags) and info == "optional") \
321
- or \
322
- (not self._read_only(flags) and info == "read only") \
323
- or \
324
- (self._convention[key]['type'] == "attribute" and
325
- info == "data"):
326
- continue
327
-
328
- info_str += f"{key} : {self._convention[key]['type']}"
329
-
330
- if self._convention[key]['dimensions']:
331
- info_str += \
332
- f" ({self._convention[key]['dimensions'].upper()})"
333
-
334
- if self._mandatory(flags):
335
- info_str += ", mandatory"
336
- else:
337
- info_str += ", optional"
338
- if self._read_only(flags):
339
- info_str += ", read only"
340
-
341
- if self._convention[key]['comment']:
342
- info_str += f"\n {self._convention[key]['comment']}\n"
343
- else:
344
- info_str += "\n"
345
-
346
- elif info in keys:
347
-
348
- for key in [k for k in keys if info in k]:
349
- comment = str(self._convention[key]['comment'])
350
-
351
- info_str += (
352
- f"{key}\n"
353
- f" type: {self._convention[key]['type']}\n"
354
- f" mandatory: "
355
- f"{self._mandatory(self._convention[key]['flags'])}\n"
356
- f" read only: "
357
- f"{self._read_only(self._convention[key]['flags'])}\n"
358
- f" default: {self._convention[key]['default']}\n"
359
- f" shape: "
360
- f"{str(self._convention[key]['dimensions']).upper()}\n"
361
- f" comment: {comment}\n")
362
- else:
363
- raise ValueError(f"info='{info}' is invalid")
364
-
365
- print(info_str)
366
-
367
- def inspect(self, file=None, issue_handling="print"):
368
- """
369
- Get information about data inside a SOFA object.
370
-
371
- Prints the values of all attributes and variables with six or less
372
- entries and the shapes and type of all numeric and string variables.
373
- When printing the values of arrays, single dimensions are discarded for
374
- easy of display, i.e., an array of shape (1, 3, 2) will be displayed as
375
- an array of shape (3, 2).
376
-
377
- Parameters
378
- ----------
379
- file : str
380
- Full path of a file under which the information is to be stored in
381
- plain text. The default ``None`` does only print the information.
382
- issue_handling : str, optional
383
- Defines how issues detected during verification of the SOFA object
384
- are handeled (see :py:func:`~sofar.sofar.Sofa.verify`)
385
-
386
- ``'raise'``
387
- Warnings and errors are raised if issues are detected
388
- ``'print'``
389
- Issues are printed without raising warnings and errors
390
- ``'return'``
391
- Issues are returned as string but neither raised nor printed
392
- ``'ignore'``
393
- Issues are ignored, i.e., not raised, printed, or returned.
394
-
395
- The default is ``print'``.
396
- """
397
-
398
- # update the private attribute `_convention` to make sure the required
399
- # meta data is in place
400
- self.verify(version="match", issue_handling=issue_handling)
401
-
402
- # list of all attributes
403
- keys = [k for k in self.__dict__.keys() if not k.startswith("_")]
404
-
405
- # start printing the information
406
- info_str = (
407
- f"{self.GLOBAL_SOFAConventions} "
408
- F"{self.GLOBAL_SOFAConventionsVersion} "
409
- f"(SOFA version {self.GLOBAL_Version})\n")
410
- info_str += "-" * len(info_str) + "\n"
411
-
412
- for key in keys:
413
-
414
- info_str += key + " : "
415
- value = getattr(self, key)
416
-
417
- # information for attributes and scalars
418
- if self._convention[key]["type"] == "attribute" or value.size == 1:
419
- info_str += str(value) + "\n"
420
- # information for variables
421
- else:
422
- # get shape and dimension
423
- shape = value.shape
424
- dimension = self._dimensions[key]
425
-
426
- # pad shape if required (trailing single dimensions are
427
- # discarded following the numpy default)
428
- while len(shape) < len(dimension):
429
- shape += (1, )
430
-
431
- # make verbose shape, e.g., '(M=100, R=2, N=128, '
432
- shape_verbose = "("
433
- for s, d in zip(shape, dimension):
434
- shape_verbose += f"{d}={s}, "
435
-
436
- # add shape information
437
- info_str += shape_verbose[:-2] + ")\n"
438
- # add value information if not too much
439
- if value.size < 7:
440
- info_str += " " + \
441
- str(np.squeeze(value)).replace("\n", "\n ") + "\n"
442
-
443
- # write to text file
444
- if file is not None:
445
- with open(file, 'w') as f_id:
446
- f_id.write(info_str + "\n")
447
-
448
- # output to console
449
- print(info_str)
450
-
451
- def add_variable(self, name, value, dtype, dimensions):
452
- """
453
- Add custom variable to the SOFA object, i.e., numeric or string arrays.
454
-
455
- Parameters
456
- ----------
457
- name : str
458
- Name of the new variable.
459
- value : any
460
- value to be added (see `dtype` for restrictions).
461
- dtype : str
462
- Type of the entry to be added in netCDF style:
463
-
464
- ``'double'``
465
- Use this to store numeric data that can be provided as number
466
- list or numpy array.
467
- ``'string'``
468
- Use this to store string variables as numpy string arrays of
469
- type ``'U'`` or ``'S'``.
470
-
471
- dimensions : str
472
- The shape of the new entry as a string. See
473
- ``self.info('dimensions')``.
474
-
475
- Examples
476
- --------
477
- .. code-block:: python
478
-
479
- import sofar as sf
480
- sofa = sf.Sofa("GeneralTF")
481
-
482
- # add numeric data
483
- sofa.add_variable("Temperature", 25.1, "double", "MI")
484
-
485
- # add GLOBAL and Variable attribtue
486
- sofa.add_entry(
487
- "GLOBAL_DateMeasured", "8.08.2021", "attribute", None)
488
- sofa.add_entry(
489
- "Temperature_Units", "degree Celsius", "attribute", None)
490
-
491
- # add a string data
492
- sofa.add_variable(
493
- "Comment", "Measured with wind screen", "string", "MS")
494
- """
495
-
496
- self._add_entry(name, value, dtype, dimensions)
497
-
498
- def add_attribute(self, name, value):
499
- """
500
- Add custom attribute to the SOFA object.
501
-
502
- Parameters
503
- ----------
504
- name : str
505
- Name of the new attribute.
506
- value : str
507
- value to be added.
508
-
509
- Examples
510
- --------
511
- .. code-block:: python
512
-
513
- import sofar as sf
514
- sofa = sf.Sofa("GeneralTF")
515
-
516
- # add GLOBAL and Variable attribtue
517
- sofa.add_attribute("GLOBAL_DateMeasured", "8.08.2021")
518
- sofa.add_attribute("Data_Real_Units", "Pascal")
519
-
520
- """
521
-
522
- self._add_entry(name, value, 'attribute', None)
523
-
524
- def delete(self, name):
525
- """
526
- Delete variable or attribute from SOFA object.
527
-
528
- Note that mandatory data can not be deleted. Call
529
- :py:func:`Sofa.info("optional") <sofar.sofar.Sofa.info>` to list all
530
- optional variables and attributes.
531
-
532
- Parameters
533
- ----------
534
- name : str
535
- Name of the variable or attribute to be deleted
536
- """
537
- delattr(self, name)
538
-
539
- def _add_entry(self, name, value, dtype, dimensions):
540
- """
541
- Add custom data to a SOFA object. See add_variable and add_attribute
542
- for more information.
543
- """
544
-
545
- # check input
546
- if hasattr(self, name):
547
- raise ValueError(f"Entry {name} already exists")
548
- if dtype not in ["attribute", "double", "string"]:
549
- raise ValueError(
550
- f"dtype is {dtype} but must be attribute, double, or string")
551
- if "_" in name and dtype != "attribute":
552
- raise ValueError(("underscores '_' in the name are only "
553
- "allowed for attributes"))
554
- if dtype == "attribute":
555
- if name.count("_") != 1 or \
556
- (name.startswith("Data_") and (name.count("_") == 0 or
557
- name.count("_") > 2)):
558
- raise ValueError((f"The name of {name} must have the "
559
- "form VariableName_AttributeName"))
560
- if not name.startswith("GLOBAL_") and \
561
- name[:name.rindex("_")] not in self._convention:
562
- raise ValueError((f"Adding Attribute {name} requires "
563
- f"variable {name[:name.rindex('_')]}"))
564
- if dimensions is None and dtype != "attribute":
565
- raise ValueError(("dimensions must be provided for entries of "
566
- "dtype double and string"))
567
- if dimensions is not None:
568
- dimensions = dimensions.upper()
569
- for dimension in dimensions:
570
- if dimension not in "ERMNCIS":
571
- warnings.warn(
572
- f"Added custom dimension {dimensions} to SOFA object")
573
-
574
- # add attribute to class
575
- _add_custom_api_entry(
576
- self, name, value, None, dimensions, dtype)
577
-
578
- def verify(self, version="latest", issue_handling="raise"):
579
- """
580
- Verify a SOFA object against the SOFA standard.
581
-
582
- This function updates the API, and checks the following
583
-
584
- - Are all mandatory fields contained? If not mandatory fields are added
585
- with their default value and a warning is raised.
586
- - Are the names of variables and attributes in accordance to the SOFA
587
- standard? If not a warning is raised.
588
- - Are the data types in accordance with the SOFA standard?
589
- - Are the dimensions of the variables consistent and in accordance
590
- to the SOFA standard?
591
- - Are the values of attributes consistent and in accordance to the
592
- SOFA standard?
593
-
594
- .. note::
595
- :py:func:`~verify` is automatically called when you create a new
596
- SOFA object, read a SOFA file from disk, and write a SOFA file to
597
- disk (using the default parameters).
598
-
599
- The API of a SOFA object consists of four parts, that are stored
600
- dictionaries in private attributes. This is required for writing data
601
- with :py:func:`~sofa.write_sofa` and should usually not be manipulated
602
- outside of :py:func:`~verify`
603
-
604
- self._convention
605
- The SOFA convention with default values, variable dimensions, flags
606
- and comments. These data are read from the official SOFA
607
- conventions contained in the SOFA Matlab/Octave API.
608
- self._dimensions
609
- The detected dimensions of the data inside the SOFA object.
610
- self._api
611
- The size of the dimensions (see py:func:`~list_dimensions`). This
612
- specifies the dimensions of the data inside the SOFA object.
613
- self._custom
614
- Stores information of custom variables that are not defined by the
615
- convention. The format is the same as in `self._convention`.
616
-
617
- Parameters
618
- ----------
619
- version : str, optional
620
- The version to which the API is updated.
621
-
622
- ``'latest'``
623
- Use the latest API and upgrade the SOFA file if required.
624
- ``'match'``
625
- Match the version of the sofa file.
626
- str
627
- Version string, e.g., ``'1.0'``. Note that this might downgrade
628
- the SOFA object
629
-
630
- The default is ``'latest'``
631
- issue_handling : str, optional
632
- Defines how detected issues are handeled
633
-
634
- ``'raise'``
635
- Warnings and errors are raised if issues are detected
636
- ``'print'``
637
- Issues are printed without raising warnings and errors
638
- ``'return'``
639
- Issues are returned as string but neither raised nor printed
640
- ``'ignore'``
641
- Issues are ignored, i.e., not raised, printed, or returned.
642
-
643
- The default is ``'raise'``.
644
-
645
- Returns
646
- -------
647
- issues : str, None
648
- Detected issues as a string. None if no issues were detected. Note
649
- that this is only returned if ``issue_handling='return'`` (see
650
- above)
651
-
652
- """
653
- # NOTE: This function collects warnings and errors and tries to output
654
- # them in a block. This makes the code slightly more complicated but
655
- # is more convenient for the user and with respect to a potential
656
- # future web based tool for verifying SOFA files.
657
-
658
- # initialize warning and error messages
659
- error_msg = "\nERRORS\n------\n"
660
- warning_msg = "\nWARNINGS\n--------\n"
661
-
662
- # ---------------------------------------------------------------------
663
- # 0. update the convention
664
- self._update_convention(version)
665
-
666
- # ---------------------------------------------------------------------
667
- # 1. check if the mandatory attributes are contained
668
- current_warning = ""
669
- keys = [key for key in self.__dict__.keys() if not key.startswith("_")]
670
-
671
- for key in self._convention.keys():
672
- if self._mandatory(self._convention[key]["flags"]) \
673
- and key not in keys:
674
- # add missing data with default value
675
- self._protected = False
676
- setattr(self, key, self._convention[key]["default"])
677
- self._protected = True
678
-
679
- # prepare to raise warning
680
- current_warning += "- " + key + "\n"
681
-
682
- if current_warning:
683
- warning_msg += "Added mandatory data with default values:\n"
684
- warning_msg += current_warning
685
-
686
- # ---------------------------------------------------------------------
687
- # 2. verify data type
688
- current_error = ""
689
- for key in keys:
690
-
691
- # handle dimensions
692
- dimensions = self._convention[key]["dimensions"]
693
- dtype = self._convention[key]["type"]
694
-
695
- # check data type
696
- value = getattr(self, key)
697
-
698
- if dtype == "attribute":
699
- if not isinstance(value, str):
700
- current_error += \
701
- f"- {key} must be string but is {type(value)}\n"
702
-
703
- elif dtype == "double":
704
- # multiple checks needed because sofar does not force the user
705
- # to initally pass data as numpy arrays
706
- if not isinstance(value,
707
- (np.int_, np.float_, np.double, np.ndarray)):
708
- current_error += (f"- {key} must be int, float or numpy "
709
- f"array but is {type(value)}\n")
710
-
711
- if isinstance(value, np.ndarray) and not (
712
- str(value.dtype).startswith('int') or
713
- str(value.dtype).startswith('float')):
714
- current_error += (f"- {key} must be int or float "
715
- f"but is {type(value.dtype)}\n")
716
-
717
- elif dtype == "string":
718
- # multiple checks needed because sofar does not force the user
719
- # to initally pass data as numpy arrays
720
- if not isinstance(value, (str, np.ndarray)):
721
- current_error += (f"- {key} must be string or numpy array "
722
- f"but is {type(value)}\n")
723
-
724
- if isinstance(value, np.ndarray) and not (
725
- str(value.dtype).startswith('<U') or
726
- str(value.dtype).startswith('<S')):
727
- current_error += (f"- {key} must be U or S "
728
- f"but is {type(value.dtype)}\n")
729
-
730
- else:
731
- # Could only be tested by manipulating JSON convention files
732
- # (Could take different data types in the future and convert to
733
- # numpy double arrays.)
734
- current_error += (
735
- f"- {key}: Error in convention. Type must be "
736
- f"double, string, or attribute but is {dtype}\n")
737
-
738
- if current_error:
739
- error_msg += "Detected data of wrong type:\n"
740
- error_msg += current_error
741
-
742
- # if an error ocurred up to here, it has to be handled. Otherwise
743
- # detecting the dimensions might fail. Warnings are not reported until
744
- # the end
745
- if error_msg != "\nERRORS\n------\n" and issue_handling != "ignore":
746
- _, issues = self._verify_handle_issues(
747
- "\nWARNINGS\n--------\n", error_msg, issue_handling)
748
-
749
- if issue_handling == "print":
750
- return
751
- else: # (issue_handling == "return"):
752
- return issues
753
-
754
- # ---------------------------------------------------------------------
755
- # 3. Verify names of entries
756
-
757
- # check attributes without variables
758
- current_error = ""
759
- for key in keys:
760
-
761
- if self._convention[key]["type"] != "attribute" or \
762
- key.count("_") == 0:
763
- continue
764
-
765
- if (key[:key.rindex("_")] not in self._convention and
766
- not key.startswith("GLOBAL_")):
767
- current_error += "- " + key + "\n"
768
-
769
- if current_error:
770
- error_msg += "Detected attributes with missing variables:\n"
771
- error_msg += current_error
772
-
773
- # check number of underscores
774
- current_error = ""
775
- for key in keys:
776
-
777
- if self._convention[key]["type"] != "attribute":
778
- continue
779
-
780
- # the case above caught attributes with too many underscores
781
- if key.count("_") == 0:
782
- current_error += "- " + key + "\n"
783
-
784
- if current_error:
785
- error_msg += (
786
- "Detected attribute names with too many or little underscores."
787
- " Names must have the form Variable_Attribute, Data_Attribute "
788
- "(one underscore), or Data_Variable_Attribute (two "
789
- "underscores):\n")
790
- error_msg += current_error
791
-
792
- # check numeric variables
793
- current_error = ""
794
- for key in keys:
795
-
796
- if self._convention[key]["type"] == "attribute":
797
- continue
798
-
799
- if "_" in key.replace("Data_", ""):
800
- current_error += "- " + key + "\n"
801
-
802
- if current_error:
803
- error_msg += (
804
- "Detected variable names with too many underscores."
805
- "Underscores are only allowed for the variable Data:\n")
806
- error_msg += current_error
807
-
808
- # ---------------------------------------------------------------------
809
- # 4. Get dimensions (E, R, M, N, S, c, I, and custom)
810
-
811
- # initialize required API fields
812
- self._protected = False
813
- self._dimensions = {}
814
- self._api = {}
815
- self._protected = True
816
-
817
- # get keys for checking the dimensions (all SOFA variables)
818
- keys = [key for key in self.__dict__.keys()
819
- if key in self._convention
820
- and self._convention[key]["dimensions"] is not None]
821
- if hasattr(self, "_custom"):
822
- keys_custom = [key for key in self._custom.keys()
823
- if not key.startswith("_")
824
- and self._custom[key]["dimensions"] is not None]
825
- keys += keys_custom
826
-
827
- S = 0
828
- for key in keys:
829
-
830
- value = getattr(self, key)
831
- dimensions = self._convention[key]["dimensions"]
832
-
833
- # - dimensions are given as string, e.g., 'mRN', or 'IC, MC'
834
- # - defined by lower case letters in `dimensions`
835
- for id, dim in enumerate(dimensions.split(", ")[0]):
836
- if dim not in "ICS" and dim.islower():
837
- # numeric data
838
- self._api[dim.upper()] = \
839
- _nd_newaxis(value, 4).shape[id]
840
- if dim == "S":
841
- # string data
842
- S = max(S, np.max(self._get_size_and_shape_of_string_var(
843
- value, key)[0]))
844
-
845
- # add fixed sizes
846
- self._api["C"] = 3
847
- self._api["I"] = 1
848
- self._api["S"] = S
849
-
850
- # ---------------------------------------------------------------------
851
- # 5. verify dimensions of data
852
- current_error = ""
853
- for key in keys:
854
-
855
- # handle dimensions
856
- dimensions = self._convention[key]["dimensions"]
857
- dtype = self._convention[key]["type"]
858
-
859
- # get value and actual shape
860
- try:
861
- value = getattr(self, key).copy()
862
- except AttributeError:
863
- value = getattr(self, key)
864
-
865
- if dtype in ["attribute", "string"]:
866
- # string or string array like data
867
- shape_act = self._get_size_and_shape_of_string_var(
868
- value, key)[1]
869
- elif len(dimensions.split(",")[0]) > 1:
870
- # multidimensional array like data
871
- shape_act = _atleast_nd(value, 4).shape
872
- else:
873
- # scalar of single dimensional array like data
874
- shape_act = (np.array(value).size, )
875
-
876
- shape_matched = False
877
- for dim in dimensions.split(", "):
878
-
879
- # get the reference shape ('S' translates to a shape of 1,
880
- # because the strings are stored in an array whose shape does
881
- # not reflect the max. lengths of the actual strings inside it)
882
- shape_ref = tuple(
883
- [self._api[d.upper()] if d != "S" else 1 for d in dim])
884
-
885
- # get shape for comparison to correct length by cropping and
886
- # appending singelton dimensions if required
887
- shape_compare = shape_act[:len(shape_ref)]
888
- for _ in range(len(shape_ref) - len(shape_compare)):
889
- shape_compare += (1, )
890
-
891
- # check if the shapes match and write to API
892
- if shape_compare == shape_ref:
893
- shape_matched = True
894
- self._dimensions[key] = dim.upper()
895
- break
896
-
897
- if not shape_matched:
898
- # get possible dimensions in verbose form, i.e., "(M=2, C=3)""
899
- dimensions_verbose = []
900
- for dim in dimensions.upper().replace(" ", "").split(","):
901
- dimensions_verbose.append(
902
- f"({', '.join([f'{d}={self._api[d]}' for d in dim])})")
903
-
904
- current_error += (
905
- f"- {key} has shape {shape_compare} but must "
906
- f"have {', '.join(dimensions_verbose)}\n")
907
-
908
- if current_error:
909
- error_msg += "Detected variables of wrong shape:\n"
910
- error_msg += current_error
911
-
912
- # ---------------------------------------------------------------------
913
- # 6. check restrictions on the content of SOFA files
914
- data, data_type, api, convention, unit_aliases = _sofa_restrictions()
915
-
916
- # general restrictions on data
917
- current_error = ""
918
- for key in data.keys():
919
-
920
- ref = data[key]["value"]
921
- if hasattr(self, key):
922
-
923
- # test if the value is valid
924
- test = getattr(self, key)
925
- if not self._verify_value(test, ref, unit_aliases):
926
- current_error += \
927
- f"- {key} is {test} but must be {', '.join(ref)}\n"
928
-
929
- # check dependencies
930
- if "dependency" not in data[key]:
931
- continue
932
-
933
- for key_dep, ref_dep in data[key]["dependency"].items():
934
-
935
- # check if dependency is contained in SOFA object
936
- # hard to test, because mandatory fields are added by sofar
937
- # this is more to be future proof
938
- if not hasattr(self, key_dep):
939
- current_error += (f"- {key_dep} must be given if "
940
- f"{key} is in SOFA object\n")
941
- continue
942
-
943
- # check if dependency has the correct value
944
- test_dep = getattr(self, key_dep)
945
- if not (isinstance(ref, list) and
946
- isinstance(ref_dep, list)):
947
- continue
948
-
949
- idx = ref.index(test)
950
- if not self._verify_value(test_dep, ref_dep[idx],
951
- unit_aliases):
952
- current_error += (
953
- f"- {key_dep} is {test_dep} but must be "
954
- f"{ref_dep[idx]} if {key} is {test}\n")
955
-
956
- # restriction posed by GLOBAL_DataType
957
- if self.GLOBAL_DataType.startswith("FIR"):
958
- data_str = "FIR"
959
- elif self.GLOBAL_DataType.startswith("TF"):
960
- data_str = "TF"
961
- elif self.GLOBAL_DataType.startswith("SOS"):
962
- data_str = "SOS"
963
- else:
964
- # the data type was tested before. This is to prevent redundant
965
- # errors in the next for loop
966
- data_str = False
967
-
968
- if data_str:
969
- for key, value in data_type[data_str].items():
970
-
971
- # hard to test. included to detect problems with future
972
- # conventions
973
- if not hasattr(self, key):
974
- current_error += (
975
- f"- {key} must be contained if"
976
- f" GLOBAL_DataType={self.GLOBAL_DataType}\n")
977
-
978
- if value is not None and getattr(self, key) not in value[0]:
979
- current_error += (f"{key} is {getattr(self, key)} but "
980
- f"must be {value[1]}\n")
981
-
982
- # restrictions on the API
983
- for key, value in api.items():
984
- if hasattr(self, key) and getattr(self, key) == value["value"]:
985
- size = getattr(self, "_api")[value["API"][0]]
986
- if size not in value["API"][1]:
987
- current_error += \
988
- (f"- Dimension {value['API'][0]} is of size {size} "
989
- f"but must be {value['API'][2]} if "
990
- f"{key} is {getattr(self, key)}\n")
991
-
992
- # restrictions from the SOFA convention (on the data and API)
993
- if self.GLOBAL_SOFAConventions in convention:
994
- for key, ref in convention[self.GLOBAL_SOFAConventions].items():
995
-
996
- if key == "API":
997
- for dimension, size in ref.items():
998
- if self._api[dimension] != size:
999
- current_error += \
1000
- (f"- Dimension {dimension} is of size " # noqa
1001
- f"{self._api[dimension]} but must be {size} if "
1002
- f"GLOBAL_SOFAConventions is {key}\n")
1003
- else:
1004
- value = getattr(self, key)
1005
- if value not in ref:
1006
- current_error += \
1007
- f"{key} is {value} but must be {ref}\n"
1008
-
1009
- if current_error:
1010
- error_msg += "Detected violations of the SOFA convention:\n"
1011
- error_msg += current_error
1012
-
1013
- # handle warnings and errors
1014
- if issue_handling != "ignore":
1015
- error_occurred, issues = self._verify_handle_issues(
1016
- warning_msg, error_msg, issue_handling)
1017
-
1018
- if error_occurred:
1019
- if issue_handling == "print":
1020
- return
1021
- elif issue_handling == "return":
1022
- return issues
1023
-
1024
- @staticmethod
1025
- def _verify_value(test, ref, unit_aliases):
1026
- """
1027
- Check a value agains the SOFA standard for Sofa.verify()
1028
-
1029
- Parameters
1030
- ----------
1031
- test :
1032
- the value under test
1033
- ref :
1034
- the value enforced by the SOFA standard
1035
- unit_aliases :
1036
- dict of aliases for units from _sofa_restrictions()
1037
-
1038
- Returns
1039
- -------
1040
- ``True`` if `test` and `ref` agree, ``False`` otherwise
1041
- """
1042
-
1043
- value_valid = True
1044
-
1045
- # Don't check the value if ref is None or test in ref
1046
- if ref is not None and test not in ref:
1047
-
1048
- # in case test is a string it might be a unit and unit aliases
1049
- # according to the SOFA standard must be checked
1050
-
1051
- # Following the SOFA standard AES69-2020, units may be separated by
1052
- # `, ` (comma and space), `,` (comma only), and ` ` (space only).
1053
- # (regexp ', ?' matches ', ' and ',')
1054
- ref = re.split(', ?| ', ref) if isinstance(ref, str) else ref
1055
- units = re.split(', ?| ', test) if isinstance(test, str) else []
1056
-
1057
- # check if number of units agree
1058
- if not units or len(ref) != len(units):
1059
- value_valid = False
1060
- return value_valid
1061
-
1062
- # check if units are valid
1063
- for unit, unit_ref in zip(units, ref):
1064
- if unit != unit_ref and (unit not in unit_aliases
1065
- or unit_aliases[unit] != unit_ref):
1066
- value_valid = False
1067
- break
1068
-
1069
- return value_valid
1070
-
1071
- @staticmethod
1072
- def _verify_handle_issues(warning_msg, error_msg, issue_handling):
1073
- """Handle warnings and errors from Sofa.verify"""
1074
-
1075
- # handle warnings
1076
- if warning_msg != "\nWARNINGS\n--------\n":
1077
- if issue_handling == "raise":
1078
- warnings.warn(warning_msg)
1079
- elif issue_handling == "print":
1080
- print(warning_msg)
1081
- else:
1082
- warning_msg = None
1083
-
1084
- # handle errors
1085
- if error_msg != "\nERRORS\n------\n":
1086
- if issue_handling == "raise":
1087
- raise ValueError(error_msg)
1088
- elif issue_handling == "print":
1089
- print(error_msg)
1090
- else:
1091
- error_msg = None
1092
-
1093
- # flag indicating if an error occurred
1094
- error_occurred = error_msg is not None
1095
-
1096
- # verbose issue message
1097
- if warning_msg and error_msg:
1098
- issues = error_msg + "\n" + warning_msg
1099
- elif warning_msg:
1100
- issues = warning_msg
1101
- elif error_msg:
1102
- issues = error_msg
1103
- else:
1104
- issues = None
1105
-
1106
- return error_occurred, issues
1107
-
1108
- def copy(self):
1109
- """Return a copy of the SOFA object."""
1110
- return deepcopy(self)
1111
-
1112
- def _update_convention(self, version):
1113
- """
1114
- Add SOFA convention to SOFA object in private attribute `_convention`.
1115
- If The object already contains a convention, it will be overwritten.
1116
-
1117
- Parameters
1118
- ----------
1119
- version : str
1120
- ``'latest'``
1121
- Use the latest API and upgrade the SOFA file if required.
1122
- ``'match'``
1123
- Match the version of the sofa file.
1124
- str
1125
- Version string, e.g., ``'1.0'``.
1126
- """
1127
-
1128
- # verify convention and version
1129
- c_current = self.GLOBAL_SOFAConventions
1130
- v_current = str(self.GLOBAL_SOFAConventionsVersion)
1131
-
1132
- v_new = _verify_convention_and_version(
1133
- version, v_current, c_current)
1134
-
1135
- # load and add convention and version
1136
- convention = self._load_convention(
1137
- c_current, v_new)
1138
- self._convention = convention
1139
-
1140
- if v_current != v_new:
1141
- self._protected = False
1142
- self.GLOBAL_SOFAConventionsVersion = v_new
1143
- self._protected = True
1144
-
1145
- # feedback in case of up/downgrade
1146
- if float(v_current) < float(v_new):
1147
- warnings.warn(("Upgraded SOFA object from "
1148
- f"version {v_current} to {v_new}"))
1149
- elif float(v_current) > float(v_new):
1150
- warnings.warn(("Downgraded SOFA object from "
1151
- f"version {v_current} to {v_new}"))
1152
-
1153
- # check if custom fields can be added
1154
- if hasattr(self, "_custom"):
1155
- for key in self._custom:
1156
- self._convention[key] = self._custom[key]
1157
-
1158
- def _load_convention(self, convention, version):
1159
- """
1160
- Load SOFA convention from json file.
1161
-
1162
- Parameters
1163
- ----------
1164
- convention : str
1165
- The name of the convention from which the SOFA file is created. See
1166
- :py:func:`~sofar.list_conventions`.
1167
- version : str
1168
- ``'latest'``
1169
- Use the latest API and upgrade the SOFA file if required.
1170
- str
1171
- Version string, e.g., ``'1.0'``.
1172
-
1173
- Returns
1174
- -------
1175
- convention : dict
1176
- The SOFA convention as a dictionary
1177
- """
1178
- # check input
1179
- if not isinstance(convention, str):
1180
- raise TypeError(("Convention must be a string "
1181
- f"but is of type {type(convention)}"))
1182
-
1183
- # get and check path to json file
1184
- paths = _get_conventions("path")
1185
- path = [path for path in paths
1186
- if os.path.basename(path).startswith(convention + "_")]
1187
-
1188
- if not len(path):
1189
- raise ValueError(
1190
- (f"Convention '{convention}' not found. See "
1191
- "sofar.list_conventions() for available conventions."))
1192
-
1193
- # get available versions as strings
1194
- versions = [p.split('_')[-1][:-5] for p in path]
1195
-
1196
- # select the correct version
1197
- if version == "latest":
1198
- versions = np.array([float(v) for v in versions])
1199
- path = path[np.argmax(versions)]
1200
- else:
1201
- if version not in versions:
1202
- raise ValueError((
1203
- f"Version {version} not found. "
1204
- f"Available versions are {versions}"))
1205
- path = path[versions.index(version)]
1206
-
1207
- # read convention from json file
1208
- with open(path, "r") as file:
1209
- convention = json.load(file)
1210
-
1211
- # replace ':' and '.' in key names by '_'
1212
- convention = {
1213
- key.replace(':', '_'): value for key, value in convention.items()}
1214
- convention = {
1215
- key.replace('.', '_'): value for key, value in convention.items()}
1216
-
1217
- return convention
1218
-
1219
- def _convention_to_sofa(self, mandatory):
1220
- """
1221
- Use SOFA convention to create attributes with default values.
1222
-
1223
- Parameters
1224
- ----------
1225
- mandatory : bool
1226
- Flag to indicate if only mandatory fields are to be included.
1227
- """
1228
-
1229
- # populate the SOFA file
1230
- for key in self._convention.keys():
1231
-
1232
- # skip optional fields if requested
1233
- if not self._mandatory(self._convention[key]["flags"]) \
1234
- and mandatory:
1235
- continue
1236
-
1237
- # get the default value
1238
- default = self._convention[key]["default"]
1239
- if isinstance(default, list):
1240
- ndim = len(self._convention[key]["dimensions"].split(", ")[0])
1241
- default = _atleast_nd(default, ndim)
1242
-
1243
- # create attribute with default value
1244
- setattr(self, key, default)
1245
-
1246
- # write API and date specific fields (some read only)
1247
- now = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
1248
- self._protected = False
1249
- self.GLOBAL_DateCreated = now
1250
- self.GLOBAL_DateModified = now
1251
- self.GLOBAL_APIName = "sofar SOFA API for Python (pyfar.org)"
1252
- self.GLOBAL_APIVersion = sf.__version__
1253
- self.GLOBAL_ApplicationName = "Python"
1254
- self.GLOBAL_ApplicationVersion = (
1255
- f"{platform.python_version()} "
1256
- f"[{platform.python_implementation()} - "
1257
- f"{platform.python_compiler()}]")
1258
- self._protected = True
1259
-
1260
- @staticmethod
1261
- def _get_size_and_shape_of_string_var(value, key):
1262
- """
1263
- String variables can be strings, list of strings, or numpy arrays of
1264
- strings. This functions returns the length of the longest string S
1265
- inside the string variable and the shape of the string variable as
1266
- required by the SOFA definition. Note that the shape is the shape of
1267
- the array that holds the strings. NETCDF stores all string variables in
1268
- arrays.
1269
- """
1270
-
1271
- if isinstance(value, str):
1272
- S = len(value)
1273
- shape = (1, 1)
1274
- elif isinstance(value, list):
1275
- S = len(max(value, key=len))
1276
- shape = np.array(value).shape
1277
- elif isinstance(value, np.ndarray):
1278
- S = max(np.vectorize(len)(value))
1279
- shape = value.shape
1280
- else:
1281
- raise TypeError((f"{key} must be a string, numpy string array, "
1282
- "or list of strings"))
1283
-
1284
- return S, shape
1285
-
1286
- @staticmethod
1287
- def _mandatory(flags):
1288
- """
1289
- Check if a field is mandatory
1290
-
1291
- Parameters
1292
- ----------
1293
- flags : None, str
1294
- The flags from convention[key]["flags"]
1295
-
1296
- Returns
1297
- -------
1298
- is_mandatory : bool
1299
- """
1300
- # skip optional fields if requested
1301
- if flags is None:
1302
- is_mandatory = False
1303
- elif "m" not in flags:
1304
- is_mandatory = False
1305
- else:
1306
- is_mandatory = True
1307
-
1308
- return is_mandatory
1309
-
1310
- @staticmethod
1311
- def _read_only(flags):
1312
- """
1313
- Check if a field is read only
1314
-
1315
- Parameters
1316
- ----------
1317
- flags : None, str
1318
- The flags from convention[key]["flags"]
1319
-
1320
- Returns
1321
- -------
1322
- is_read_only : bool
1323
- """
1324
- # skip optional fields if requested
1325
- if flags is None:
1326
- is_read_only = False
1327
- elif "r" not in flags:
1328
- is_read_only = False
1329
- else:
1330
- is_read_only = True
1331
-
1332
- return is_read_only
1333
-
1334
-
1335
- def _update_conventions(conventions_path=None):
1336
- """
1337
- Update SOFA conventions.
1338
-
1339
- A SOFA convention defines the kind of data and the data format that is
1340
- stored in a SOFA file. Updating the conventions is done in two steps:
1341
-
1342
- 1.
1343
- Download official SOFA conventions as csv files from
1344
- https://github.com/sofacoustics/API_MO/tree/master/API_MO/conventions.
1345
- 2.
1346
- Convert csv files to json files for easier handling
1347
-
1348
- The csv and json files are stored at sofar/conventions. Sofar works only on
1349
- the json files. To get a list of all currently available SOFA conventions
1350
- and their paths see :py:func:`~sofar.list_conventions`.
1351
-
1352
- .. note::
1353
- If the official convention contain errors, calling this function might
1354
- break sofar. Be sure that you want to do this.
1355
-
1356
- Parameters
1357
- ----------
1358
- conventions_path : str, optional
1359
- Path to the folder where the conventions are saved. The default is
1360
- ``None``, which saves the conventions inside the sofar package.
1361
- Conventions saved under a different path can not be used by sofar.
1362
- """
1363
-
1364
- # url for parsing and downloading the convention files
1365
- url = ("https://github.com/sofacoustics/API_MO/tree/"
1366
- "master/API_MO/conventions")
1367
- url_raw = ("https://raw.githubusercontent.com/sofacoustics/API_MO/"
1368
- "master/API_MO/conventions")
1369
- ext = 'csv'
1370
-
1371
- print(f"Reading SOFA conventions from {url} ...")
1372
-
1373
- # get file names of conventions from the SOFA Matlab/Octave API
1374
- page = requests.get(url).text
1375
- soup = BeautifulSoup(page, 'html.parser')
1376
- conventions = [os.path.split(node.get('href'))[1]
1377
- for node in soup.find_all('a')
1378
- if node.get('href').endswith(ext)]
1379
-
1380
- # directory handling
1381
- if conventions_path is None:
1382
- conventions_path = os.path.join(os.path.dirname(__file__),
1383
- "conventions")
1384
- if not os.path.isdir(os.path.join(conventions_path, "source")):
1385
- os.mkdir(os.path.join(conventions_path, "source"))
1386
-
1387
- # Loop and download conventions if they changed
1388
- updated = False
1389
- for convention in conventions:
1390
-
1391
- # exclude these conventions
1392
- if convention.startswith(("General_", "GeneralString_")):
1393
- continue
1394
-
1395
- filename_csv = os.path.join(conventions_path, "source", convention)
1396
-
1397
- # download SOFA convention definitions to package diretory
1398
- data = requests.get(url_raw + "/" + convention)
1399
- # remove trailing tabs
1400
- data = data.content.replace(b"\t\n", b"\n").replace(b"\r\n", b"\n")
1401
-
1402
- # check if convention needs to be added or updated
1403
- update = False
1404
- if not os.path.isfile(filename_csv):
1405
- update = True
1406
- updated = f"- added new convention: {convention}"
1407
- else:
1408
- with open(filename_csv, "rb") as file:
1409
- data_current = b"".join(file.readlines())
1410
- data_current = data_current.replace(b"\r\n", b"\n")
1411
- if data_current != data:
1412
- update = True
1413
- updated = f"- updated existing convention: {convention}"
1414
-
1415
- # update convention
1416
- if update:
1417
- with open(filename_csv, "wb") as file:
1418
- file.write(data)
1419
- print(updated)
1420
-
1421
- # compile json files from csv file
1422
- # (this is also done if nothing changed. It won't affect the content of
1423
- # the json files but the time-stamp will be updated)
1424
- _compile_conventions()
1425
-
1426
- if updated:
1427
- print("... done.")
1428
- else:
1429
- print("... conventions already up to date.")
1430
-
1431
-
1432
- def _compile_conventions(conventions_path=None):
1433
- """
1434
- Compile SOFA conventions (json files) from source conventions (csv files
1435
- from SOFA API_MO), i.e., only do step 2 from `_update_conventions`. This is
1436
- a helper function for debugging and developing and might break sofar.
1437
-
1438
- Parameters
1439
- ----------
1440
- conventions_path : str
1441
- Path to the folder containing the conventions as json files (might be
1442
- empty) and the source convention as csv files in the subfolder `source`
1443
- (must not be empty). The default is ``None``, which uses the
1444
- default location inside the sofar package.
1445
- """
1446
- # directory handling
1447
- if conventions_path is None:
1448
- conventions_path = os.path.join(os.path.dirname(__file__),
1449
- "conventions")
1450
- if not os.path.isdir(os.path.join(conventions_path, "source")):
1451
- raise ValueError("conventions_path must contain the folder 'source'")
1452
-
1453
- # get list of source conventions
1454
- csv_files = glob.glob(os.path.join(
1455
- conventions_path, "source", "*.csv"))
1456
- csv_files = [os.path.split(csv_file)[1] for csv_file in csv_files]
1457
-
1458
- for csv_file in csv_files:
1459
-
1460
- # directories for reading and writing
1461
- json_file = os.path.join(conventions_path, csv_file[:-3] + "json")
1462
- csv_file = os.path.join(conventions_path, "source", csv_file)
1463
-
1464
- # convert SOFA conventions from csv to json
1465
- convention_dict = sf.sofar._convention_csv2dict(csv_file)
1466
- with open(json_file, 'w') as file:
1467
- json.dump(convention_dict, file, indent=4)
1468
-
1469
-
1470
- def list_conventions():
1471
- """
1472
- List available SOFA conventions by printing to the console.
1473
- """
1474
- print(_get_conventions("string"))
1475
-
1476
-
1477
- def _get_conventions(return_type):
1478
- """
1479
- Get available SOFA conventions.
1480
-
1481
- Parameters
1482
- ----------
1483
- return_type : string, optional
1484
- ``'path'``
1485
- Return a list with the full paths and filenames of the convention
1486
- files (json files)
1487
- ``'path_source'``
1488
- Return a list with the full paths and filenames of the source
1489
- convention files from API_MO (csv files)
1490
- ``'name'``
1491
- Return a list of the convention names without version
1492
- ``'name_version'``
1493
- Return a list of tuples containing the convention name and version.
1494
- ``'string'``
1495
- Returns a string that lists the names and versions of all
1496
- conventions.
1497
-
1498
- Returns
1499
- -------
1500
- See parameter `return_type`.
1501
- """
1502
- # directory containing the SOFA conventions
1503
- if return_type == "path_source":
1504
- directory = os.path.join(
1505
- os.path.dirname(__file__), "conventions", "source")
1506
- reg_str = "*.csv"
1507
- else:
1508
- directory = os.path.join(os.path.dirname(__file__), "conventions")
1509
- reg_str = "*.json"
1510
-
1511
- # SOFA convention files
1512
- paths = [file for file in glob.glob(os.path.join(directory, reg_str))]
1513
-
1514
- conventions_str = "Available SOFA conventions:\n"
1515
-
1516
- conventions = []
1517
- versions = []
1518
- for path in paths:
1519
- fileparts = os.path.basename(path).split(sep="_")
1520
- conventions += [fileparts[0]]
1521
- versions += [fileparts[1][:-5]]
1522
- conventions_str += f"{conventions[-1]} (Version {versions[-1]})\n"
1523
-
1524
- if return_type is None:
1525
- return
1526
- elif return_type.startswith("path"):
1527
- return paths
1528
- elif return_type == "name":
1529
- return conventions
1530
- elif return_type == "name_version":
1531
- return [(n, v) for n, v in zip(conventions, versions)]
1532
- elif return_type == "string":
1533
- return conventions_str
1534
- else:
1535
- raise ValueError(f"return_type {return_type} is invalid")
1536
-
1537
-
1538
- def read_sofa(filename, verify=True, version="latest", verbose=True):
1539
- """
1540
- Read SOFA file from disk and convert it to SOFA object.
1541
-
1542
- Numeric data is returned as floats or numpy float arrays unless they have
1543
- missing data, in which case they are returned as numpy masked arrays.
1544
-
1545
- Parameters
1546
- ----------
1547
- filename : str
1548
- The filename. '.sofa' is appended to the filename, if it is not
1549
- explicitly given.
1550
- verify : bool, optional
1551
- Verify and update the SOFA object by calling :py:func:`~Sofa.verify`.
1552
- This helps to find potential errors in the default values and is thus
1553
- recommended. If reading a file does not work, try to call `Sofa` with
1554
- ``verify=False``. The default is ``True``.
1555
- version : str, optional
1556
- Control if the SOFA file convention is changed.
1557
-
1558
- ``'latest'``
1559
- Update the conventions to the latest version
1560
- ``'match'``
1561
- Do not change the conventions version, i.e. match the version
1562
- of the SOFA file that is being read.
1563
- str
1564
- Force specific version, e.g., ``'1.0'``. Note that this might
1565
- downgrade the SOFA object.
1566
-
1567
- The default is ``'latest'``
1568
- verbose : bool, optional
1569
- Print the names of detected custom variables and attributes. The
1570
- default is ``True``
1571
-
1572
- Returns
1573
- -------
1574
- sofa : Sofa
1575
- The SOFA object filled with the default values of the convention.
1576
-
1577
- Notes
1578
- -----
1579
-
1580
- 1. Missing dimensions are appended when writing the SOFA object to disk.
1581
- E.g., if ``sofa.Data_IR`` is of shape (1, 2) it is written as an array
1582
- of shape (1, 2, 1) because the SOFA standard AES69-2020 defines it as a
1583
- three dimensional array with the dimensions (`M: measurements`,
1584
- `R: receivers`, `N: samples`)
1585
- 2. When reading data from a SOFA file, array data is always returned as
1586
- numpy arrays and singleton trailing dimensions are discarded (numpy
1587
- default). I.e., ``sofa.Data_IR`` will again be an array of shape (1, 2)
1588
- after writing and reading to and from disk.
1589
- 3. One dimensional arrays with only one element will be converted to scalar
1590
- values. E.g. ``sofa.Data_SamplingRate`` is stored as an array of shape
1591
- (1, ) inside SOFA files (according to the SOFA standard AES69-2020) but
1592
- will be a scalar inside SOFA objects after reading from disk.
1593
- """
1594
-
1595
- # check the filename
1596
- if not filename.endswith('.sofa'):
1597
- raise ValueError("Filename must end with .sofa")
1598
- if not os.path.isfile(filename):
1599
- raise ValueError(f"{filename} does not exist")
1600
-
1601
- # attributes that are skipped
1602
- skip = ["_Encoding"]
1603
-
1604
- # init list of all and custom attributes
1605
- all_attr = []
1606
- custom = []
1607
-
1608
- # open new NETCDF4 file for reading
1609
- with Dataset(filename, "r", format="NETCDF4") as file:
1610
-
1611
- # get convention name and version
1612
- convention = getattr(file, "SOFAConventions")
1613
- all_attr.append("GLOBAL_SOFAConventions")
1614
- version_in = getattr(file, "SOFAConventionsVersion")
1615
- all_attr.append("GLOBAL_SOFAConventionsVersion")
1616
-
1617
- # check if convention and version exist
1618
- version_out = _verify_convention_and_version(
1619
- version, version_in, convention)
1620
-
1621
- # get SOFA object with default values
1622
- sofa = sf.Sofa(convention, version=version_out, verify=verify)
1623
-
1624
- # allow writing read only attributes
1625
- sofa._protected = False
1626
-
1627
- # load global attributes
1628
- for attr in file.ncattrs():
1629
-
1630
- if attr in ["SOFAConventionsVersion", "SOFAConventions"]:
1631
- # convention and version were already set above
1632
- continue
1633
-
1634
- value = getattr(file, attr)
1635
- all_attr.append("GLOBAL_" + attr)
1636
-
1637
- if not hasattr(sofa, "GLOBAL_" + attr):
1638
- _add_custom_api_entry(sofa, "GLOBAL_" + attr, value, None,
1639
- None, "attribute")
1640
- custom.append("GLOBAL_" + attr)
1641
- sofa._protected = False
1642
- else:
1643
- setattr(sofa, "GLOBAL_" + attr, value)
1644
-
1645
- # load data
1646
- for var in file.variables.keys():
1647
-
1648
- value = _format_value_from_netcdf(file[var][:], var)
1649
- all_attr.append(var.replace(".", "_"))
1650
-
1651
- if hasattr(sofa, var.replace(".", "_")):
1652
- setattr(sofa, var.replace(".", "_"), value)
1653
- else:
1654
- dimensions = "".join([d for d in file[var].dimensions])
1655
- # SOFA only uses dtypes 'double' and 'S1' but netCDF has more
1656
- dtype = "string" if file[var].datatype == "S1" else "double"
1657
- _add_custom_api_entry(sofa, var.replace(".", "_"), value, None,
1658
- dimensions, dtype)
1659
- custom.append(var.replace(".", "_"))
1660
- sofa._protected = False
1661
-
1662
- # load variable attributes
1663
- for attr in [a for a in file[var].ncattrs() if a not in skip]:
1664
-
1665
- value = getattr(file[var], attr)
1666
- all_attr.append(var.replace(".", "_") + "_" + attr)
1667
-
1668
- if not hasattr(sofa, var.replace(".", "_") + "_" + attr):
1669
- _add_custom_api_entry(
1670
- sofa, var.replace(".", "_") + "_" + attr, value, None,
1671
- None, "attribute")
1672
- custom.append(var.replace(".", "_") + "_" + attr)
1673
- sofa._protected = False
1674
- else:
1675
- setattr(sofa, var.replace(".", "_") + "_" + attr, value)
1676
-
1677
- # remove fields from initial Sofa object that were not contained in NetCDF
1678
- # file (initial Sofa object contained mandatory and optional fields)
1679
- attrs = [attr for attr in sofa.__dict__.keys() if not attr.startswith("_")]
1680
- for attr in attrs:
1681
- if attr not in all_attr:
1682
- delattr(sofa, attr)
1683
-
1684
- # do not allow writing read only attributes any more
1685
- sofa._protected = True
1686
-
1687
- # notice about custom entries
1688
- if custom and verbose:
1689
- print(("SOFA file contained custom entries\n"
1690
- "----------------------------------\n"
1691
- f"{', '.join(custom)}"))
1692
-
1693
- # update api
1694
- if verify:
1695
- try:
1696
- sofa.verify(version)
1697
- except: # noqa (No error handling - just improved verbosity)
1698
- raise ValueError((
1699
- "The SOFA object could not be verified, maybe due to errornous"
1700
- " data. Call sofa=sofar.read_sofa(filename, verify=False) and "
1701
- "than sofa.verify() to get more information"))
1702
-
1703
- return sofa
1704
-
1705
-
1706
- def write_sofa(filename: str, sofa: Sofa, version="latest", compression=4):
1707
- """
1708
- Write a SOFA object to disk as a SOFA file.
1709
-
1710
- Parameters
1711
- ----------
1712
- filename : str
1713
- The filename. '.sofa' is appended to the filename, if it is not
1714
- explicitly given.
1715
- sofa : object
1716
- The SOFA object that is written to disk
1717
- version : str
1718
- The SOFA object is verified and updated with :py:func:`~Sofa.verify`
1719
- before writing to disk. Version specifies, which version of the
1720
- convention is used:
1721
-
1722
- ``'latest'``
1723
- Use the latest version upgrade the SOFA file if required.
1724
- ``'match'``
1725
- Match the version of the SOFA object.
1726
- str
1727
- Version string, e.g., ``'1.0'``.
1728
-
1729
- The default is ``'latest'``.
1730
- compression : int
1731
- The level of compression with ``0`` being no compression and ``9``
1732
- being the best compression. The default of ``9`` optimizes the file
1733
- size but increases the time for writing files to disk.
1734
-
1735
- Notes
1736
- -----
1737
-
1738
- 1. Missing dimensions are appended when writing the SOFA object to disk.
1739
- E.g., if ``sofa.Data_IR`` is of shape (1, 2) it is written as an array
1740
- of shape (1, 2, 1) because the SOFA standard AES69-2020 defines it as a
1741
- three dimensional array with the dimensions (`M: measurements`,
1742
- `R: receivers`, `N: samples`)
1743
- 2. When reading data from a SOFA file, array data is always returned as
1744
- numpy arrays and singleton trailing dimensions are discarded (numpy
1745
- default). I.e., ``sofa.Data_IR`` will again be an array of shape (1, 2)
1746
- after writing and reading to and from disk.
1747
- 3. One dimensional arrays with only one element will be converted to scalar
1748
- values. E.g. ``sofa.Data_SamplingRate`` is stored as an array of shape
1749
- (1, ) inside SOFA files (according to the SOFA standard AES69-2020) but
1750
- will be a scalar inside SOFA objects after reading from disk.
1751
- """
1752
-
1753
- # check the filename
1754
- if not filename.endswith('.sofa'):
1755
- raise ValueError("Filename must end with .sofa")
1756
-
1757
- # setting the netCDF compression parameter
1758
- zlib = False if compression == 0 else True
1759
-
1760
- # update the dimensions
1761
- sofa.verify(version)
1762
-
1763
- # list of all attribute names
1764
- all_keys = [key for key in sofa.__dict__.keys() if not key.startswith("_")]
1765
-
1766
- # open new NETCDF4 file for writing
1767
- with Dataset(filename, "w", format="NETCDF4") as file:
1768
-
1769
- # write dimensions
1770
- for dim in sofa._api:
1771
- file.createDimension(dim, sofa._api[dim])
1772
-
1773
- # write global attributes
1774
- keys = [key for key in all_keys if key.startswith("GLOBAL_")]
1775
- for key in keys:
1776
- setattr(file, key[7:], str(getattr(sofa, key)))
1777
-
1778
- # write data
1779
- for key in all_keys:
1780
-
1781
- # skip attributes
1782
- # Note: This definition of attribute is blurry:
1783
- # lax definition:
1784
- # sofa._convention[key]["type"] == "attribute":
1785
- # strict definition:
1786
- # ("_" in key and not key.startswith("Data_")) or \
1787
- # key.count("_") > 1
1788
- #
1789
- # The strict definition is implicitly included in the SOFA standard
1790
- # since underscores only occur for variables starting with Data_
1791
- if sofa._convention[key]["type"] == "attribute":
1792
- continue
1793
-
1794
- # get the data and type and shape
1795
- value, dtype = _format_value_for_netcdf(
1796
- getattr(sofa, key), key, sofa._convention[key]["type"],
1797
- sofa._dimensions[key], sofa._api["S"])
1798
-
1799
- # create variable and write data
1800
- shape = tuple([dim for dim in sofa._dimensions[key]])
1801
- tmp_var = file.createVariable(
1802
- key.replace("Data_", "Data."), dtype, shape,
1803
- zlib=zlib, complevel=compression)
1804
- if dtype == "f8":
1805
- tmp_var[:] = value
1806
- else:
1807
- tmp_var[:] = stringtochar(value)
1808
- tmp_var._Encoding = "ascii"
1809
-
1810
- # write variable attributes
1811
- sub_keys = [k for k in all_keys if k.startswith(key + "_")]
1812
- for sub_key in sub_keys:
1813
- setattr(tmp_var, sub_key[len(key)+1:],
1814
- str(getattr(sofa, sub_key)))
1815
-
1816
-
1817
- def equals(sofa_a, sofa_b, verbose=True, exclude=None):
1818
- """
1819
- Compare two SOFA objects against each other.
1820
-
1821
- Parameters
1822
- ----------
1823
- sofa_a : Sofa
1824
- SOFA object
1825
- sofa_b : Sofa
1826
- SOFA object
1827
- verbose : bool, optional
1828
- Print differences to the console. The default is True.
1829
- exclude : str, optional
1830
- Specify what fields should be excluded from the comparison
1831
-
1832
- ``'GLOBAL'``
1833
- Exclude all global attributes, i.e., fields starting with 'GLOBAL:'
1834
- ``'DATE'``
1835
- Exclude date attributs, i.e., fields that contain 'Date'
1836
- ``'ATTR'``
1837
- Exclude all attributes, i.e., fields that contain ':'
1838
-
1839
- The default is None, which does not exclude anything.
1840
-
1841
- Returns
1842
- -------
1843
- is_identical : bool
1844
- ``True`` if sofa_a and sofa_b are identical, ``False`` otherwise.
1845
- """
1846
-
1847
- is_identical = True
1848
-
1849
- # get and filter keys
1850
- # ('_*' are SOFA object private variables, '__' are netCDF attributes)
1851
- keys_a = [k for k in sofa_a.__dict__.keys() if not k.startswith("_")]
1852
- keys_b = [k for k in sofa_b.__dict__.keys() if not k.startswith("_")]
1853
-
1854
- if exclude is not None:
1855
- if exclude.upper() == "GLOBAL":
1856
- keys_a = [k for k in keys_a if not k.startswith("GLOBAL_")]
1857
- keys_b = [k for k in keys_b if not k.startswith("GLOBAL_")]
1858
- elif exclude.upper() == "ATTR":
1859
- keys_a = [k for k in keys_a if
1860
- sofa_a._convention[k]["type"] != "attribute"]
1861
- keys_b = [k for k in keys_b if
1862
- sofa_b._convention[k]["type"] != "attribute"]
1863
- elif exclude.upper() == "DATE":
1864
- keys_a = [k for k in keys_a if "Date" not in k]
1865
- keys_b = [k for k in keys_b if "Date" not in k]
1866
- else:
1867
- raise ValueError(
1868
- f"exclude is {exclude} but must be GLOBAL, DATE, or ATTR")
1869
-
1870
- # check for equal length
1871
- if len(keys_a) != len(keys_b):
1872
- is_identical = _equals_raise_warning((
1873
- f"not identical: sofa_a has {len(keys_a)} attributes for "
1874
- f"comparison and sofa_b has {len(keys_b)}."), verbose)
1875
-
1876
- return is_identical
1877
-
1878
- # check if the keys match
1879
- if set(keys_a) != set(keys_b):
1880
- is_identical = _equals_raise_warning(
1881
- "not identical: sofa_a and sofa_b do not have the ame attributes",
1882
- verbose)
1883
-
1884
- return is_identical
1885
-
1886
- # compare the data inside the SOFA object
1887
- for key in keys_a:
1888
-
1889
- # get data and types
1890
- a = getattr(sofa_a, key)
1891
- b = getattr(sofa_b, key)
1892
- type_a = sofa_a._convention[key]["type"]
1893
- type_b = sofa_b._convention[key]["type"]
1894
-
1895
- # compare attributes
1896
- if type_a == "attribute" and type_b == "attribute":
1897
-
1898
- # handling versions (might be integer, float, or string)
1899
- if not isinstance(a, str) or not isinstance(a, str):
1900
- a = str(float(a))
1901
- b = str(float(b))
1902
-
1903
- # compare
1904
- if a != b:
1905
- is_identical = _equals_raise_warning(
1906
- f"not identical: different values for {key}", verbose)
1907
-
1908
- # compare double variables
1909
- elif type_a == "double" and type_b == "double":
1910
-
1911
- try:
1912
- npt.assert_allclose(np.squeeze(a), np.squeeze(b))
1913
- except AssertionError:
1914
- is_identical = _equals_raise_warning(
1915
- "not identical: different values for {key}", verbose)
1916
-
1917
- # compare string variables
1918
- elif type_a == "string" and type_b == "string":
1919
- try:
1920
- assert np.all(
1921
- np.squeeze(a).astype("S") == np.squeeze(b).astype("S"))
1922
- except AssertionError:
1923
- is_identical = _equals_raise_warning(
1924
- "not identical: different values for {key}", verbose)
1925
- else:
1926
- is_identical = _equals_raise_warning(
1927
- (f"not identical: {key} has different data types "
1928
- f"({type_a}, {type_b})"), verbose)
1929
-
1930
- return is_identical
1931
-
1932
-
1933
- def _equals_raise_warning(message, verbose):
1934
- if verbose:
1935
- warnings.warn(message)
1936
- return False
1937
-
1938
-
1939
- def _convention_csv2dict(file: str):
1940
- """
1941
- Read SOFA convention from csv file and convert to json file. The csv files
1942
- are taken from the official Matlab/Octave SOFA API.
1943
-
1944
- Parameters
1945
- ----------
1946
- file : str
1947
- filename of the SOFA convention
1948
-
1949
- Returns
1950
- -------
1951
- convention : dict
1952
- SOFA convention as nested dictionary. Each attribute is a sub
1953
- dictionary with the keys `default`, `flags`, `dimensions`, `type`, and
1954
- `comment`.
1955
- """
1956
-
1957
- # read the file
1958
- # (encoding should be changed to utf-8 after the SOFA conventions repo is
1959
- # clean.)
1960
- # TODO: add explicit test for this function that checks the output
1961
- with open(file, 'r', encoding="windows-1252") as fid:
1962
- lines = fid.readlines()
1963
-
1964
- # write into dict
1965
- convention = {}
1966
- for idl, line in enumerate(lines):
1967
-
1968
- try:
1969
- # separate by tabs
1970
- line = line.strip().split("\t")
1971
- # parse the line entry by entry
1972
- for idc, cell in enumerate(line):
1973
- # detect empty cells and leading trailing white spaces
1974
- cell = None if cell.replace(' ', '') == '' else cell.strip()
1975
- # nothing to do for empty cells
1976
- if cell is None:
1977
- line[idc] = cell
1978
- continue
1979
- # parse text cells that do not contain arrays
1980
- if cell[0] != '[':
1981
- # check for numbers
1982
- try:
1983
- cell = float(cell) if '.' in cell else int(cell)
1984
- except ValueError:
1985
- pass
1986
-
1987
- line[idc] = cell
1988
- continue
1989
-
1990
- # parse array cell
1991
- # remove brackets
1992
- cell = cell[1:-1]
1993
-
1994
- if ';' not in cell:
1995
- # get rid of white spaces
1996
- cell = cell.strip()
1997
- cell = cell.replace(' ', ',')
1998
- cell = cell.replace(' ', '')
1999
- # create flat list of integers and floats
2000
- numbers = cell.split(',')
2001
- cell = [float(n) if '.' in n else int(n) for n in numbers]
2002
- else:
2003
- # create a nested list of integers and floats
2004
- # separate multidimensional arrays
2005
- cell = cell.split(';')
2006
- cell_nd = [None] * len(cell)
2007
- for idx, cc in enumerate(cell):
2008
- # get rid of white spaces
2009
- cc = cc.strip()
2010
- cc = cc.replace(' ', ',')
2011
- cc = cc.replace(' ', '')
2012
- numbers = cc.split(',')
2013
- cell_nd[idx] = [float(n) if '.' in n else int(n)
2014
- for n in numbers]
2015
-
2016
- cell = cell_nd
2017
-
2018
- # write parsed cell to line
2019
- line[idc] = cell
2020
-
2021
- # first line contains field names
2022
- if idl == 0:
2023
- fields = line[1:]
2024
- continue
2025
-
2026
- # add blank comment if it does not exist
2027
- if len(line) == 5:
2028
- line.append("")
2029
- # convert empty defaults from None to ""
2030
- if line[1] is None:
2031
- line[1] = ""
2032
-
2033
- # make sure some unusual default values are converted for json
2034
- if line[1] == "permute([0 0 0 1 0 0; 0 0 0 1 0 0], [3 1 2]);":
2035
- # Field Data.SOS in SimpleFreeFieldHRSOS and SimpleFreeFieldSOS
2036
- line[1] = [[[0, 0, 0, 1, 0, 0], [0, 0, 0, 1, 0, 0]]]
2037
- if line[1] == "{''}":
2038
- line[1] = ['']
2039
- # convert versions to strings
2040
- if "Version" in line[0] and not isinstance(line[1], str):
2041
- line[1] = str(float(line[1]))
2042
-
2043
- # write second to last line
2044
- convention[line[0]] = {}
2045
- for ff, field in enumerate(fields):
2046
- convention[line[0]][field.lower()] = line[ff + 1]
2047
-
2048
- except: # noqa
2049
- raise ValueError((f"Failed to parse line {idl}, entry {idc} in: "
2050
- f"{file}: \n{line}\n"))
2051
-
2052
- # reorder the fields to be nicer to read and understand
2053
- # 1. Move everything to the end that is not GLOBAL
2054
- keys = [key for key in convention.keys()]
2055
- for key in keys:
2056
- if "GLOBAL" not in key:
2057
- convention[key] = convention.pop(key)
2058
- # 1. Move Data entries to the end
2059
- for key in keys:
2060
- if key.startswith("Data"):
2061
- convention[key] = convention.pop(key)
2062
-
2063
- return convention
2064
-
2065
-
2066
- def _format_value_for_netcdf(value, key, dtype, dimensions, S):
2067
- """
2068
- Format value from SOFA object for saving in a NETCDF4 file.
2069
-
2070
- Parameters
2071
- ----------
2072
- value : str, array like
2073
- The value to be formatted
2074
- key : str
2075
- The name of the current attribute. Needed for verbose errors.
2076
- dtype : str
2077
- The the data type of value
2078
- dimensions : str
2079
- The intended dimensions from ``sofa.dimensions``
2080
- S : int
2081
- Length of the string array.
2082
-
2083
- Returns
2084
- -------
2085
- value : str, numpy array
2086
- The formatted value.
2087
- netcdf_dtype : str
2088
- The data type as a string for writing to a NETCDF4 file ('attribute',
2089
- 'f8', or 'S1').
2090
- """
2091
- # copy value
2092
- try:
2093
- value = value.copy()
2094
- except AttributeError:
2095
- pass
2096
-
2097
- # parse data
2098
- if dtype == "attribute":
2099
- value = str(value)
2100
- netcdf_dtype = "attribute"
2101
- elif dtype == "double":
2102
- value = _atleast_nd(value, len(dimensions))
2103
- netcdf_dtype = "f8"
2104
- elif dtype == "string":
2105
- value = np.array(value, dtype="S" + str(S))
2106
- value = _atleast_nd(value, len(dimensions))
2107
- netcdf_dtype = 'S1'
2108
- else:
2109
- raise ValueError(f"Unknown type {dtype} for {key}")
2110
-
2111
- return value, netcdf_dtype
2112
-
2113
-
2114
- def _format_value_from_netcdf(value, key):
2115
- """
2116
- Format value from NETCDF4 file for saving in a SOFA object
2117
-
2118
- Parameters
2119
- ----------
2120
- value : np.array of dtype float or S
2121
- The value to be formatted
2122
- key : str
2123
- The variable name of the current value. Needed for verbose errors.
2124
-
2125
- Returns
2126
- -------
2127
- value : str, number, numpy array
2128
- The formatted value.
2129
- """
2130
-
2131
- if "float" in str(value.dtype) or "int" in str(value.dtype):
2132
- if np.ma.is_masked(value):
2133
- warnings.warn(f"Entry {key} contains missing data")
2134
- else:
2135
- # Convert to numpy array or scalar
2136
- value = np.asarray(value)
2137
- elif str(value.dtype)[1] in ["S", "U"]:
2138
- # string arrays are stored in masked arrays with empty strings '' being
2139
- # masked. Convert to regular arrays with unmasked empty strings
2140
- if str(value.dtype)[1] == "S":
2141
- value = chartostring(value, encoding="ascii")
2142
- value = np.atleast_1d(value).astype("U")
2143
- else:
2144
- raise TypeError(
2145
- f"{key}: value.dtype is {value.dtype} but must be float, S or, U")
2146
-
2147
- # convert arrays to scalars if they do not store data that is usually used
2148
- # as scalar metadata, e.g., the SamplingRate
2149
- data_keys = ["Data_IR", "Data_Real", "Data_Imag", "Data_SOS" "Data_Delay"]
2150
- if value.size == 1 and key not in data_keys:
2151
- value = value[0]
2152
-
2153
- return value
2154
-
2155
-
2156
- def _add_custom_api_entry(sofa, key, value, flags, dimensions, dtype):
2157
- """
2158
- Add custom entry to the sofa._convention and permanently save it in
2159
- sofa._custom
2160
-
2161
- Parameters
2162
- ----------
2163
- sofa : Sofa
2164
- key : str
2165
- name of the entry
2166
- flags, dimensions, dtype : any
2167
- as in sofa._convention
2168
- """
2169
- # create custom API if it not exists
2170
- sofa._protected = False
2171
- if not hasattr(sofa, "_custom"):
2172
- sofa._custom = {}
2173
-
2174
- # lower case letters to indicate custom dimensions
2175
- if dimensions is not None:
2176
- dimensions = [d.upper() if d.upper() in "ERMNCIS" else d.lower()
2177
- for d in dimensions]
2178
- dimensions = "".join(dimensions)
2179
-
2180
- # add user entry to custom API
2181
- sofa._custom[key] = {
2182
- "flags": flags,
2183
- "dimensions": dimensions,
2184
- "type": dtype,
2185
- "default": None,
2186
- "comment": ""}
2187
- sofa._update_convention(version="match")
2188
-
2189
- # add attribute to object
2190
- setattr(sofa, key, value)
2191
- sofa._protected = True
2192
-
2193
-
2194
- def _verify_convention_and_version(version, version_in, convention):
2195
- """
2196
- Verify if convention and version exist and return version
2197
-
2198
- Parameters
2199
- ----------
2200
- version : str
2201
- 'latest', 'match', version string (e.g., '1.0')
2202
- version_in : str
2203
- The version to be checked against
2204
- convention : str
2205
- The name of the convention to be checked
2206
-
2207
- Returns
2208
- -------
2209
- version_out : str
2210
- The version to be used depending on `version`, and `version_in`
2211
- """
2212
-
2213
- # check if the convention exists in sofar
2214
- if convention not in _get_conventions("name"):
2215
- raise ValueError(
2216
- f"Convention '{convention}' does not exist")
2217
-
2218
- name_version = _get_conventions("name_version")
2219
-
2220
- if version == "latest":
2221
- # get list of versions as floats
2222
- version_out = [float(versions[1]) for versions in name_version
2223
- if versions[0] == convention]
2224
- # get latest version as string
2225
- version_out = str(version_out[np.argmax(version_out)])
2226
-
2227
- if version_parse(version_out) > version_parse(version_in):
2228
- print(("Updated conventions version from "
2229
- f"{version_in} to {version_out}"))
2230
- else:
2231
- # check which version is wanted
2232
- if version == "match":
2233
- match = version_in
2234
- else:
2235
- match = version
2236
-
2237
- version_out = None
2238
- for versions in name_version:
2239
- # check if convention and version match
2240
- if versions[0] == convention \
2241
- and str(float(versions[1])) == match:
2242
- version_out = str(float(versions[1]))
2243
-
2244
- if version_out is None:
2245
- raise ValueError((
2246
- f"Version {match} does not exist for convention {convention}. "
2247
- "Try to access the data with version='latest'"))
2248
-
2249
- return version_out
2250
-
2251
-
2252
- def _atleast_nd(array, ndim):
2253
- """
2254
- Get numpy array with specified number of dimensions. Dimensions are
2255
- appended at the end if ndim > 3.
2256
- """
2257
- try:
2258
- array = array.copy()
2259
- except AttributeError:
2260
- array = array
2261
-
2262
- if ndim == 1:
2263
- array = np.atleast_1d(array)
2264
- if ndim == 2:
2265
- array = np.atleast_2d(array)
2266
- if ndim >= 3:
2267
- array = np.atleast_3d(array)
2268
- for _ in range(ndim - array.ndim):
2269
- array = array[..., np.newaxis]
2270
- return array
2271
-
2272
-
2273
- def _nd_newaxis(array, ndim):
2274
- """Append dimensions to the end of an array until array.ndim == ndim"""
2275
- array = np.array(array)
2276
-
2277
- for _ in range(ndim - array.ndim):
2278
- array = array[..., np.newaxis]
2279
- return array
2280
-
2281
-
2282
- def _sofa_restrictions():
2283
- """
2284
- Return dictionaries to check restrictions on the data posed by SOFA.
2285
-
2286
- The check is done in SOFA.verify(). This is not a private class method,
2287
- to save additional indention that would make the code harder to read and
2288
- write.
2289
-
2290
- Returns:
2291
- data : dict
2292
- General restrictions on the data of any SOFA convention
2293
- data_type : dict
2294
- Restriction depending on GLOBAL_DataType
2295
- api : dict
2296
- Restrictions on the API depending on specific fields of a SOFA file
2297
- """
2298
-
2299
- # definition of valid coordinate systems and units
2300
- coords_min = ["cartesian", "spherical"]
2301
- coords_full = coords_min + ["spherical harmonics"]
2302
- units_min = ["metre", "degree, degree, metre"]
2303
- units_full = units_min + [units_min[1]]
2304
- unit_aliases = {
2305
- "metres": "metre",
2306
- "meter": "metre",
2307
- "meters": "metre",
2308
- "cubic metres": "cubic metre",
2309
- "cubic meter": "cubic metre",
2310
- "cubic meters": "cubic metre",
2311
- "degrees": "degree",
2312
- "seconds": "second"
2313
- }
2314
- # possible values for restricted dimensions in the API
2315
- sh_dimension = ([(N+1)**2 for N in range(200)],
2316
- "(N+1)**2 where N is the spherical harmonics order")
2317
- sos_dimension = ([6 * (N + 1) for N in range(1000)],
2318
- "an integer multiple of 6 greater 0")
2319
-
2320
- # restrictions on the data
2321
- # - if `value` is None it in only checked if the SOFA object has the attr
2322
- # - if `value` is a list, it is also checked if the actual value is in
2323
- # `value`
2324
- # - if there is a list of values for a dependency the value of the SOFA
2325
- # object has to match the value of the list at a certain index. The index
2326
- # is determined by the value of the parent.
2327
- data = {
2328
- # Global --------------------------------------------------------------
2329
- # GLOBAL_SOFAConventions?
2330
- # Check value of GLOBAL_DataType
2331
- # (FIRE and TFE are legacy data types from SOFA version 1.0)
2332
- "GLOBAL_DataType": {
2333
- "value": ["FIR", "FIR-E", "FIRE", "TF", "TF-E", "TFE", "SOS"]},
2334
- "GLOBAL_RoomType": {
2335
- "value": ["free field", "reverberant", "shoebox", "dae"]},
2336
- "GLOBAL_SOFAConventions": {
2337
- "value": _get_conventions(return_type="name")},
2338
- # check NLongName
2339
- "N_LongName": {
2340
- "value": ["frequency"]},
2341
- # Listener ------------------------------------------------------------
2342
- # Check values and consistency of if ListenerPosition Type and Unit
2343
- "ListenerPosition_Type": {
2344
- "value": coords_min,
2345
- "dependency": {
2346
- "ListenerPosition_Units": units_min}},
2347
- # Check if dependencies of ListenerView are contained
2348
- "ListenerView": {
2349
- "value": None,
2350
- "dependency": {
2351
- "ListenerView_Type": None,
2352
- "ListenerView_Units": None}},
2353
- # Check values and consistency of if ListenerView Type and Unit
2354
- "ListenerView_Type": {
2355
- "value": coords_min,
2356
- "dependency": {
2357
- "ListenerView_Units": units_min}},
2358
- # Check if dependencies of ListenerUp are contained
2359
- "ListenerUp": {
2360
- "value": None,
2361
- "dependency": {
2362
- "ListenerView": None}},
2363
- # Receiver ------------------------------------------------------------
2364
- # Check values and consistency of if ReceiverPosition Type and Unit
2365
- "ReceiverPosition_Type": {
2366
- "value": coords_full,
2367
- "dependency": {
2368
- "ReceiverPosition_Units": units_full}},
2369
- # Check if dependencies of ReceiverView are contained
2370
- "ReceiverView": {
2371
- "value": None,
2372
- "dependency": {
2373
- "ReceiverView_Type": None,
2374
- "ReceiverView_Units": None}},
2375
- # Check values and consistency of if ReceiverView Type and Unit
2376
- "ReceiverView_Type": {
2377
- "value": coords_min,
2378
- "dependency": {
2379
- "ReceiverView_Units": units_min}},
2380
- # Check if dependencies of ReceiverUp are contained
2381
- "ReceiverUp": {
2382
- "value": None,
2383
- "dependency": {
2384
- "ReceiverView": None}},
2385
- # Source --------------------------------------------------------------
2386
- # Check values and consistency of if SourcePosition Type and Unit
2387
- "SourcePosition_Type": {
2388
- "value": coords_min,
2389
- "dependency": {
2390
- "SourcePosition_Units": units_min}},
2391
- # Check if dependencies of SourceView are contained
2392
- "SourceView": {
2393
- "value": None,
2394
- "dependency": {
2395
- "SourceView_Type": None,
2396
- "SourceView_Units": None}},
2397
- # Check values and consistency of if SourceView Type and Unit
2398
- "SourceView_Type": {
2399
- "value": coords_min,
2400
- "dependency": {
2401
- "SourceView_Units": units_min}},
2402
- # Check if dependencies of SourceUp are contained
2403
- "SourceUp": {
2404
- "value": None,
2405
- "dependency": {
2406
- "SourceView": None}},
2407
- # Emitter -------------------------------------------------------------
2408
- # Check values and consistency of if EmitterPosition Type and Unit
2409
- "EmitterPosition_Type": {
2410
- "value": coords_full,
2411
- "dependency": {
2412
- "EmitterPosition_Units": units_full}},
2413
- # Check if dependencies of EmitterView are contained
2414
- "EmitterView": {
2415
- "value": None,
2416
- "dependency": {
2417
- "EmitterView_Type": None,
2418
- "EmitterView_Units": None}},
2419
- # Check values and consistency of if EmitterView Type and Unit
2420
- "EmitterView_Type": {
2421
- "value": coords_min,
2422
- "dependency": {
2423
- "EmitterView_Units": units_min}},
2424
- # Check if dependencies of EmitterUp are contained
2425
- "EmitterUp": {
2426
- "value": None,
2427
- "dependency": {
2428
- "EmitterView": None}},
2429
- # Room ----------------------------------------------------------------
2430
- "RoomVolume": {
2431
- "value": None,
2432
- "dependency": {
2433
- "RoomVolume_Units": None}},
2434
- "RoomTemperature": {
2435
- "value": None,
2436
- "dependency": {
2437
- "RoomTemperature_Units": None}},
2438
- "RoomVolume_Units": {
2439
- "value": ["cubic metre"]},
2440
- "RoomTemperature_Units": {
2441
- "value": ["Kelvin"]}
2442
- }
2443
-
2444
- # restrictions arising from GLOBAL_DataType
2445
- # - if `value` is None it is only checked if the SOFA object has the attr
2446
- # - if `value` is a list, it is also checked if the actual value is in
2447
- # `value`
2448
- data_type = {
2449
- "FIR": {
2450
- "Data_IR": None,
2451
- "Data_Delay": None,
2452
- "Data_SamplingRate": None,
2453
- "Data_SamplingRate_Units": (["hertz"], "hertz")},
2454
- "TF": {
2455
- "Data_Real": None,
2456
- "Data_Imag": None,
2457
- "N": None,
2458
- # "N_LongName": (["frequency"], "frequency"), # optional parameter
2459
- "N_Units": (["hertz"], "hertz")},
2460
- "SOS": {
2461
- "Data_SOS": None,
2462
- "Data_Delay": None,
2463
- "Data_SamplingRate": None,
2464
- "Data_SamplingRate_Units": (["hertz"], "hertz")}
2465
- }
2466
-
2467
- # restrictions on the API
2468
- api = {
2469
- # Check dimension R if using spherical harmonics for the Receiver
2470
- # (assuming SH orders < 200)
2471
- "ReceiverPosition_Type": {
2472
- "value": "spherical harmonics",
2473
- "API": ("R", ) + sh_dimension},
2474
- # Check dimension E if using spherical harmonics for the Emitter
2475
- # (assuming SH orders < 200)
2476
- "EmitterPosition_Type": {
2477
- "value": "spherical harmonics",
2478
- "API": ("E", ) + sh_dimension},
2479
- # Checking the dimension of N if having SOS data
2480
- # (assuming up to 1000 second order sections)
2481
- "GLOBAL_DataType": {
2482
- "value": "SOS",
2483
- "API": ("N", ) + sos_dimension}
2484
- }
2485
-
2486
- # restrictions from the convention. Values of fields will be checked.
2487
- # Must contain testing the API. If this would be tested under api={}, the
2488
- # entry GLOBAL_SOFAConventions would be repeated.
2489
- convention = {
2490
- "GeneralFIR": {
2491
- "GLOBAL_DataType": ["FIR"]},
2492
- "GeneralFIR-E": {
2493
- "GLOBAL_DataType": ["FIR-E"]},
2494
- "GeneralFIRE": { # SOFA version 1.0 legacy
2495
- "GLOBAL_DataType": ["FIRE"]},
2496
- "GeneralTF": {
2497
- "GLOBAL_DataType": ["TF"]},
2498
- "GeneralTF-E": {
2499
- "GLOBAL_DataType": ["TF-E"]},
2500
- "SimpleFreeFieldHRIR": {
2501
- "GLOBAL_DataType": ["FIR"],
2502
- "GLOBAL_RoomType": ["free field"],
2503
- "EmitterPosition_Type": coords_min,
2504
- "API": {"E": 1}},
2505
- "SimpleFreeFieldHRTF": {
2506
- "GLOBAL_DataType": ["TF"],
2507
- "GLOBAL_RoomType": ["free field"],
2508
- "EmitterPosition_Type": coords_min,
2509
- "API": {"E": 1}},
2510
- "SimpleFreeFieldHRSOS": {
2511
- "GLOBAL_DataType": ["SOS"],
2512
- "GLOBAL_RoomType": ["free field"],
2513
- "EmitterPosition_Type": coords_min,
2514
- "API": {"E": 1}},
2515
- "FreeFieldHRIR": {
2516
- "GLOBAL_DataType": ["FIR-E"],
2517
- "GLOBAL_RoomType": ["free field"]},
2518
- "FreeFieldHRTF": {
2519
- "GLOBAL_DataType": ["TF-E"],
2520
- "GLOBAL_RoomType": ["free field"]},
2521
- "SimpleHeadphoneIR": {
2522
- "GLOBAL_DataType": ["FIR"]},
2523
- "SingleRoomSRIR": {
2524
- "GLOBAL_DataType": ["FIR"]},
2525
- "SingleRoomMIMOSRIR": {
2526
- "GLOBAL_DataType": ["FIR-E"]},
2527
- "FreeFieldDirectivityTF": {
2528
- "GLOBAL_DataType": ["TF"]}
2529
- }
2530
-
2531
- return data, data_type, api, convention, unit_aliases