das2numpy 1.0__tar.gz → 1.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (27) hide show
  1. das2numpy-1.1/PKG-INFO +102 -0
  2. das2numpy-1.1/README.md +83 -0
  3. {das2numpy-1.0 → das2numpy-1.1}/pyproject.toml +4 -2
  4. {das2numpy-1.0 → das2numpy-1.1}/src/das2numpy/__init__.py +12 -4
  5. {das2numpy-1.0 → das2numpy-1.1}/src/das2numpy/__main__.py +10 -7
  6. {das2numpy-1.0 → das2numpy-1.1}/src/das2numpy/chunk.py +7 -38
  7. {das2numpy-1.0 → das2numpy-1.1}/src/das2numpy/setups/light_tdms_reader.py +8 -35
  8. das2numpy-1.1/src/das2numpy.egg-info/PKG-INFO +102 -0
  9. {das2numpy-1.0 → das2numpy-1.1}/src/das2numpy.egg-info/SOURCES.txt +1 -2
  10. das2numpy-1.1/src/das2numpy.egg-info/requires.txt +5 -0
  11. {das2numpy-1.0 → das2numpy-1.1}/src/das2numpy.egg-info/top_level.txt +0 -1
  12. {das2numpy-1.0 → das2numpy-1.1}/src/example.py +8 -5
  13. das2numpy-1.0/PKG-INFO +0 -93
  14. das2numpy-1.0/README.md +0 -79
  15. das2numpy-1.0/src/das2numpy/test.py +0 -158
  16. das2numpy-1.0/src/das2numpy.egg-info/PKG-INFO +0 -93
  17. das2numpy-1.0/src/test_downsampled.py +0 -54
  18. {das2numpy-1.0 → das2numpy-1.1}/LICENSE +0 -0
  19. {das2numpy-1.0 → das2numpy-1.1}/setup.cfg +0 -0
  20. {das2numpy-1.0 → das2numpy-1.1}/src/das2numpy/filefinder.py +0 -0
  21. {das2numpy-1.0 → das2numpy-1.1}/src/das2numpy/setups/flac_200hz.py +0 -0
  22. {das2numpy-1.0 → das2numpy-1.1}/src/das2numpy/setups/optasense_b35idefix.py +0 -0
  23. {das2numpy-1.0 → das2numpy-1.1}/src/das2numpy/setups/optasense_b35idefix_fast.py +0 -0
  24. {das2numpy-1.0 → das2numpy-1.1}/src/das2numpy/setups/silixa.py +0 -0
  25. {das2numpy-1.0 → das2numpy-1.1}/src/das2numpy/setups/silixa_200hz.py +0 -0
  26. {das2numpy-1.0 → das2numpy-1.1}/src/das2numpy/utils.py +0 -0
  27. {das2numpy-1.0 → das2numpy-1.1}/src/das2numpy.egg-info/dependency_links.txt +0 -0
das2numpy-1.1/PKG-INFO ADDED
@@ -0,0 +1,102 @@
1
+ Metadata-Version: 2.4
2
+ Name: das2numpy
3
+ Version: 1.1
4
+ Summary: A simple and universal package for loading large amounts of distributed acoustic sensing (DAS) data.
5
+ Author-email: Erik Genthe <erik.genthe@desy.de>
6
+ Project-URL: Homepage, https://git.physnet.uni-hamburg.de/wave/das2numpy
7
+ Classifier: Programming Language :: Python :: 3
8
+ Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3)
9
+ Classifier: Operating System :: OS Independent
10
+ Requires-Python: >=3.8
11
+ Description-Content-Type: text/markdown
12
+ License-File: LICENSE
13
+ Requires-Dist: numpy
14
+ Requires-Dist: ffmpeg-python
15
+ Requires-Dist: h5py
16
+ Requires-Dist: scipy
17
+ Requires-Dist: numba
18
+ Dynamic: license-file
19
+
20
+ # Module for loading Distributed Acoustic Sensing (DAS) data. SILIXA / OPTASENSE
21
+
22
+
23
+
24
+ ## Install
25
+
26
+ You can install via PIP.
27
+ ```
28
+ python -m pip install das2numpy
29
+ ```
30
+
31
+ To load data from flac files, ffmpeg (https://ffmpeg.org) needs to be installed. It is not possible to install ffmpeg with pip.
32
+
33
+ On DESY's Maxwell cluster ffmpeg is available as a module. Before using das2numpy execute:
34
+ ```
35
+ module load maxwell ffmpeg
36
+ ```
37
+
38
+
39
+
40
+
41
+ ## Python API
42
+
43
+ Example: If you want to get started quickly, have a look at the [example.py](src/example.py).
44
+
45
+ Create an instance with:
46
+
47
+ ```python
48
+ def loader(root_path:str, predefined_setup:str, num_worker_threads):
49
+ ```
50
+ ```
51
+ Loads data and returns it as a numpy array.
52
+ Args:
53
+ root_path (str): Path to directory that contains the files to be loaded from. Subdirectories are (recursively) also searched.
54
+ predefined_setup (str): One of ["SILIXA", "FLAC_200HZ", "OPTASENSE"]
55
+ num_worker_threads (int): The number of worker threads used for loading files in parallel.
56
+ Returns:
57
+ A loader instance to load data. Call instance.load_array(...).
58
+ ```
59
+
60
+ Use one of the load_array(..) functions of that instance.
61
+
62
+ ```python
63
+ def load_array(t_start:datetime, t_end:datetime, channel_start:int, channel_end:int) -> NP.ndarray:
64
+ ```
65
+ ```
66
+ Loading data into numpy array.
67
+ Returns nothing, the data can be accessed by accessing the data field of this instance.
68
+ Warning: using a different value then 1 for t_step or channel_step can result in a high cpu-usage.
69
+ Consider using multithreaded=True in the constructor and a high amount of workers if needed.
70
+ Args:
71
+ t_start (datetime): datetime object which defines the start of the data to load.
72
+ t_end (datetime): datetime object which defines the end of the data to load.
73
+ channel_start (int): The starting index of the sensor position in the data (inclusive).
74
+ channel_end (int): The ending index of the sensors position in the data (exclusive).
75
+ t_step (int): Reduces the data on the time axis by factor t_step. Uses mean averaging. Default is 1.
76
+ channel_step (int): Like t_step, but for the sensor position.
77
+ Returns:
78
+ A 2d-numpy-array containing the data.
79
+ The first axis corresponds to the time, the second to the channel (sensor position)
80
+ ```
81
+
82
+ For more details have a look at the inline documentation of [chunk.py](src/das2numpy/chunk.py)
83
+
84
+
85
+ ## Command Line Interface
86
+
87
+ Creates a numpy file from the requested data. Optionally, the binary data can be printed to stdout.
88
+
89
+ Example call:
90
+ ```
91
+ python -m das2numpy "SILIXA" /pnfs/desy.de/m/project/iDAS/raw/2024-DESY/2024-07-23-desy 2024-07-23T10:01:00 2024-07-23T10:02:00 10 0 1000 10 default
92
+ ```
93
+
94
+ For more information:
95
+ ```
96
+ python -m das2numpy -h
97
+ ```
98
+
99
+
100
+ ## Issues
101
+
102
+ - Loading from OPTASENSE may not work anymore. I haven't tested it for a long time.
@@ -0,0 +1,83 @@
1
+ # Module for loading Distributed Acoustic Sensing (DAS) data. SILIXA / OPTASENSE
2
+
3
+
4
+
5
+ ## Install
6
+
7
+ You can install via PIP.
8
+ ```
9
+ python -m pip install das2numpy
10
+ ```
11
+
12
+ To load data from flac files, ffmpeg (https://ffmpeg.org) needs to be installed. It is not possible to install ffmpeg with pip.
13
+
14
+ On DESY's Maxwell cluster ffmpeg is available as a module. Before using das2numpy execute:
15
+ ```
16
+ module load maxwell ffmpeg
17
+ ```
18
+
19
+
20
+
21
+
22
+ ## Python API
23
+
24
+ Example: If you want to get started quickly, have a look at the [example.py](src/example.py).
25
+
26
+ Create an instance with:
27
+
28
+ ```python
29
+ def loader(root_path:str, predefined_setup:str, num_worker_threads):
30
+ ```
31
+ ```
32
+ Loads data and returns it as a numpy array.
33
+ Args:
34
+ root_path (str): Path to directory that contains the files to be loaded from. Subdirectories are (recursively) also searched.
35
+ predefined_setup (str): One of ["SILIXA", "FLAC_200HZ", "OPTASENSE"]
36
+ num_worker_threads (int): The number of worker threads used for loading files in parallel.
37
+ Returns:
38
+ A loader instance to load data. Call instance.load_array(...).
39
+ ```
40
+
41
+ Use one of the load_array(..) functions of that instance.
42
+
43
+ ```python
44
+ def load_array(t_start:datetime, t_end:datetime, channel_start:int, channel_end:int) -> NP.ndarray:
45
+ ```
46
+ ```
47
+ Loading data into numpy array.
48
+ Returns nothing, the data can be accessed by accessing the data field of this instance.
49
+ Warning: using a different value then 1 for t_step or channel_step can result in a high cpu-usage.
50
+ Consider using multithreaded=True in the constructor and a high amount of workers if needed.
51
+ Args:
52
+ t_start (datetime): datetime object which defines the start of the data to load.
53
+ t_end (datetime): datetime object which defines the end of the data to load.
54
+ channel_start (int): The starting index of the sensor position in the data (inclusive).
55
+ channel_end (int): The ending index of the sensors position in the data (exclusive).
56
+ t_step (int): Reduces the data on the time axis by factor t_step. Uses mean averaging. Default is 1.
57
+ channel_step (int): Like t_step, but for the sensor position.
58
+ Returns:
59
+ A 2d-numpy-array containing the data.
60
+ The first axis corresponds to the time, the second to the channel (sensor position)
61
+ ```
62
+
63
+ For more details have a look at the inline documentation of [chunk.py](src/das2numpy/chunk.py)
64
+
65
+
66
+ ## Command Line Interface
67
+
68
+ Creates a numpy file from the requested data. Optionally, the binary data can be printed to stdout.
69
+
70
+ Example call:
71
+ ```
72
+ python -m das2numpy "SILIXA" /pnfs/desy.de/m/project/iDAS/raw/2024-DESY/2024-07-23-desy 2024-07-23T10:01:00 2024-07-23T10:02:00 10 0 1000 10 default
73
+ ```
74
+
75
+ For more information:
76
+ ```
77
+ python -m das2numpy -h
78
+ ```
79
+
80
+
81
+ ## Issues
82
+
83
+ - Loading from OPTASENSE may not work anymore. I haven't tested it for a long time.
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "das2numpy"
7
- version = "1.0"
7
+ version = "1.1"
8
8
  authors = [
9
9
  { name="Erik Genthe", email="erik.genthe@desy.de" },
10
10
  ]
@@ -16,6 +16,8 @@ classifiers = [
16
16
  "License :: OSI Approved :: GNU General Public License v3 (GPLv3)",
17
17
  "Operating System :: OS Independent",
18
18
  ]
19
-
19
+ dependencies = [
20
+ "numpy", "ffmpeg-python", "h5py", "scipy", "numba",
21
+ ]
20
22
  [project.urls]
21
23
  Homepage = "https://git.physnet.uni-hamburg.de/wave/das2numpy"
@@ -8,13 +8,21 @@ from . import utils
8
8
 
9
9
 
10
10
  def loader(root_path:str, predefined_setup:str, num_worker_threads):
11
-
11
+ """
12
+ Loads data and returns it as a numpy array.
13
+ Args:
14
+ root_path (str): Path to directory that contains the files to be loaded from. Subdirectories are (recursively) also searched.
15
+ predefined_setup (str): One of ["SILIXA", "FLAC_200HZ", "OPTASENSE"]
16
+ num_worker_threads (int): The number of worker threads used for loading files in parallel.
17
+ Returns:
18
+ A loader instance to load data. Call instance.load_array(...).
19
+ """
12
20
  if predefined_setup.upper() == "SILIXA":
13
21
  from .setups import silixa
14
22
  chunk = silixa.init(root_path, num_worker_threads)
15
- elif predefined_setup.upper() == "SILIXA_200HZ":
16
- from .setups import silixa_200hz
17
- chunk = silixa_200hz.init(root_path, num_worker_threads)
23
+ #elif predefined_setup.upper() == "SILIXA_200HZ":
24
+ # from .setups import silixa_200hz
25
+ # chunk = silixa_200hz.init(root_path, num_worker_threads)
18
26
  elif predefined_setup.upper() == "FLAC_200HZ":
19
27
  from .setups import flac_200hz
20
28
  chunk = flac_200hz.init(root_path, num_worker_threads)
@@ -13,8 +13,12 @@ def parse_arguments():
13
13
  default=False,
14
14
  help="Print more information to stdout"
15
15
  )
16
-
17
-
16
+ parser.add_argument(
17
+ "--workers",
18
+ type=int,
19
+ default=1,
20
+ help="Number of worker threads used for loading files in parallel."
21
+ )
18
22
  parser.add_argument(
19
23
  "device",
20
24
  type=str,
@@ -38,7 +42,7 @@ def parse_arguments():
38
42
  parser.add_argument(
39
43
  "time_step",
40
44
  type=int,
41
- help="Time step as an integer."
45
+ help="Time step as an integer. Uses mean averaging."
42
46
  )
43
47
  parser.add_argument(
44
48
  "channel_start",
@@ -53,7 +57,7 @@ def parse_arguments():
53
57
  parser.add_argument(
54
58
  "channel_step",
55
59
  type=int,
56
- help="Channel step as an integer."
60
+ help="Channel step as an integer. Uses mean averaging."
57
61
  )
58
62
  parser.add_argument(
59
63
  "output",
@@ -81,9 +85,8 @@ def main():
81
85
 
82
86
  print("Load...")
83
87
  start = time()
84
- loaderinstance = loader(args.root_path, args.device, num_worker_threads=1)
85
- data = loaderinstance.load_array(args.start, args.end, args.time_step,
86
- args.channel_start, args.channel_end, args.channel_step)
88
+ loaderinstance = loader(args.root_path, args.device, num_worker_threads=args.workers)
89
+ data = loaderinstance.load_array(args.start, args.end, args.channel_start, args.channel_end, args.time_step, args.channel_step)
87
90
  if args.verbosity:
88
91
  end = time()
89
92
  print("Duration", end-start)
@@ -13,7 +13,6 @@ from typing import Callable
13
13
  from math import floor
14
14
  from datetime import datetime
15
15
  from random import shuffle
16
- from multipledispatch import dispatch
17
16
  import concurrent.futures as CF
18
17
  from concurrent.futures import ThreadPoolExecutor
19
18
  from threading import Lock
@@ -59,8 +58,8 @@ class Chunk():
59
58
  assert type(sample_rate) == int
60
59
  if multithreaded:
61
60
  self.__executor = ThreadPoolExecutor(workers)
62
- if not self.__multithreaded:
63
- print("Warning: Chunk is not in multiprocessing or multithreading mode!")
61
+ #if not self.__multithreaded:
62
+ # print("Warning: Chunk is not in multiprocessing or multithreading mode!")
64
63
 
65
64
 
66
65
 
@@ -114,7 +113,6 @@ class Chunk():
114
113
  n_channels = min(data.shape[1], self.data.shape[1])
115
114
  self.data[start_index : start_index + data.shape[0], 0:n_channels] = data[:,:n_channels]
116
115
 
117
- @dispatch(int, int, int, int, int, int)
118
116
  def load_array_posix_ms(self, t_start: int, t_end: int, t_step: int, channel_start: int, channel_end: int, channel_step: int) -> NP.ndarray:
119
117
  """ Loading data
120
118
  Warning: using a different value then 1 for t_step or channel_step can result in a high cpu-usage.
@@ -197,50 +195,21 @@ class Chunk():
197
195
 
198
196
 
199
197
 
200
- @dispatch(datetime, datetime, int, int)
201
- def load_array(self, t_start:datetime, t_end:datetime, channel_start:int, channel_end:int) -> NP.ndarray:
202
- """ Loads data and returns it as a numpy array.
203
- Constraints:
204
- t_start has to be less or equal t_end,
205
- same for channel_start and channel_end.
206
- Args:
207
- t_start (datetime): datetime object which defines the start of the data to load.
208
- t_end (datetime): datetime object which defines the end of the data to load.
209
- channel_start (int): The starting index of sensor in the data (inclusive).
210
- channel_end (int): The ending index of sensors in the data (exclusive).
211
- Returns:
212
- A 2d-numpy-array containing the data.
213
- The first axis corresponds to the time, the second to the channel
214
- """
215
- return self.load_array(t_start, t_end, 1, channel_start, channel_end, 1)
216
-
217
-
218
- @dispatch(datetime, datetime, int, int, int, int)
219
- def load_array(self, t_start:datetime, t_end:datetime, t_step:int, channel_start:int, channel_end:int, channel_step:int) -> NP.ndarray:
198
+ def load_array(self, t_start:datetime, t_end:datetime, channel_start:int, channel_end:int, t_step=1, channel_step=1) -> NP.ndarray:
220
199
  """ Loading data into numpy array.
221
200
  Returns nothing, the data can be accessed by accessing the data field of this instance.
222
201
  Warning: using a different value then 1 for t_step or channel_step can result in a high cpu-usage.
223
202
  Consider using multithreaded=True in the constructor and a high amount of workers if needed.
224
- Constraints:
225
- t_start has to be less or equal t_end,
226
- same for channel_start and channel_end.
227
- t_step and channel_step have to be greater then 0
228
203
  Args:
229
204
  t_start (datetime): datetime object which defines the start of the data to load.
230
205
  t_end (datetime): datetime object which defines the end of the data to load.
231
- t_step (int): If you, for example only want to load the data of every fourth timestep use t_end=4
232
- channel_start (int): The starting index of sensor in the data (inclusive).
233
- channel_end (int): The ending index of sensors in the data (exclusive).
206
+ channel_start (int): The starting index of the sensor position in the data (inclusive).
207
+ channel_end (int): The ending index of the sensors position in the data (exclusive).
208
+ t_step (int): Reduces the data on the time axis by factor t_step. Uses mean averaging. Default is 1.
234
209
  channel_step (int): Like t_step, but for the sensor position.
235
210
  Returns:
236
211
  A 2d-numpy-array containing the data.
237
- The first axis corresponds to the time, the second to the channel
212
+ The first axis corresponds to the time, the second to the channel (sensor position)
238
213
  """
239
214
  return self.load_array_posix_ms(to_posix_timestamp_ms(t_start), to_posix_timestamp_ms(t_end), t_step, channel_start, channel_end, channel_step)
240
215
 
241
-
242
- @dispatch(int, int, int, int)
243
- def load_array_posix_ms(self, t_start:int, t_end:int, channel_start:int, channel_end:int) -> NP.ndarray:
244
- return self.load_array_posix_ms(t_start, t_end, 1, channel_start, channel_end, 1)
245
-
246
-
@@ -24,19 +24,10 @@ Changed by Erik Genthe, erik.genthe@desy.de
24
24
  """
25
25
 
26
26
  import os, struct, datetime
27
- import pandas as pd
28
27
  import numpy as np
29
28
  import mmap
30
-
31
- import matplotlib.pyplot as plt
32
29
  from copy import deepcopy
33
30
 
34
- #%%
35
- def load_property_map(xls_file):
36
- prop_map = pd.read_excel(xls_file, sheetname='Sheet1')
37
- return prop_map[['CurrentTag', 'CorrectTag']].applymap(lambda x: x.replace(" ", "")).set_index('CurrentTag').to_dict()['CorrectTag']
38
-
39
- #prop_map = load_property_map('MetaDataTable_iDAS_TDMS_CFG_Tags.xlsx')
40
31
 
41
32
  def write_property_dict(prop_dict, out_file):
42
33
  from pprint import pformat
@@ -180,34 +171,19 @@ class TdmsReader(object):
180
171
 
181
172
  channel_length = property(_get_channel_length)
182
173
 
183
- def get_properties(self, mapped=False):
174
+ def get_properties(self):
184
175
  """
185
176
  Return a dictionary of properties. Read from file only if necessary.
186
177
  """
187
178
  # Check if already hold properties in memory
188
179
  if self._properties is None:
189
180
  self._properties = self._read_properties()
190
- if mapped:
191
- props = self._properties.copy()
192
- tmp = [prop_map.get(col.replace(" ", ""),col.replace(" ", "")) for col in self._properties.index]
193
- tmp1 = []
194
- def addToList(ls, val, cnt=0):
195
- if val not in ls:
196
- ls.append(val)
197
- else:
198
- newVal = val + '_' + str(cnt+1)
199
- if newVal not in ls:
200
- ls.append(newVal)
201
- else:
202
- addToList(ls, val, cnt+1)
203
-
204
- for col in tmp:
205
- addToList(tmp1, col)
206
-
207
- props.index = tmp1
208
- return props.loc[:,'Value'].to_dict()
209
- else:
210
- return self._properties.loc[:,'Value'].to_dict()
181
+ print(self._properties)
182
+ dict = {}
183
+ for key, _, value in self._properties:
184
+ dict[key] = value
185
+ return dict
186
+
211
187
 
212
188
  def _read_property(self):
213
189
  """
@@ -242,15 +218,12 @@ class TdmsReader(object):
242
218
 
243
219
  # loop through and read each property
244
220
  properties = [self._read_property() for _ in range(var)]
245
- df = pd.DataFrame(properties)
246
- df.columns = ['Property', 'Type', 'Value']
247
- df.set_index('Property', inplace=True)
248
221
 
249
222
  self._end_of_properties_offset = self._tdms_file.tell()
250
223
 
251
224
  self._read_chunk_size()
252
225
  #TODO: Add number of channels to properties
253
- return df
226
+ return properties
254
227
 
255
228
  def _read_chunk_size(self):
256
229
  """ Read the data chunk size from the TDMS file header."""
@@ -0,0 +1,102 @@
1
+ Metadata-Version: 2.4
2
+ Name: das2numpy
3
+ Version: 1.1
4
+ Summary: A simple and universal package for loading large amounts of distributed acoustic sensing (DAS) data.
5
+ Author-email: Erik Genthe <erik.genthe@desy.de>
6
+ Project-URL: Homepage, https://git.physnet.uni-hamburg.de/wave/das2numpy
7
+ Classifier: Programming Language :: Python :: 3
8
+ Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3)
9
+ Classifier: Operating System :: OS Independent
10
+ Requires-Python: >=3.8
11
+ Description-Content-Type: text/markdown
12
+ License-File: LICENSE
13
+ Requires-Dist: numpy
14
+ Requires-Dist: ffmpeg-python
15
+ Requires-Dist: h5py
16
+ Requires-Dist: scipy
17
+ Requires-Dist: numba
18
+ Dynamic: license-file
19
+
20
+ # Module for loading Distributed Acoustic Sensing (DAS) data. SILIXA / OPTASENSE
21
+
22
+
23
+
24
+ ## Install
25
+
26
+ You can install via PIP.
27
+ ```
28
+ python -m pip install das2numpy
29
+ ```
30
+
31
+ To load data from flac files, ffmpeg (https://ffmpeg.org) needs to be installed. It is not possible to install ffmpeg with pip.
32
+
33
+ On DESY's Maxwell cluster ffmpeg is available as a module. Before using das2numpy execute:
34
+ ```
35
+ module load maxwell ffmpeg
36
+ ```
37
+
38
+
39
+
40
+
41
+ ## Python API
42
+
43
+ Example: If you want to get started quickly, have a look at the [example.py](src/example.py).
44
+
45
+ Create an instance with:
46
+
47
+ ```python
48
+ def loader(root_path:str, predefined_setup:str, num_worker_threads):
49
+ ```
50
+ ```
51
+ Loads data and returns it as a numpy array.
52
+ Args:
53
+ root_path (str): Path to directory that contains the files to be loaded from. Subdirectories are (recursively) also searched.
54
+ predefined_setup (str): One of ["SILIXA", "FLAC_200HZ", "OPTASENSE"]
55
+ num_worker_threads (int): The number of worker threads used for loading files in parallel.
56
+ Returns:
57
+ A loader instance to load data. Call instance.load_array(...).
58
+ ```
59
+
60
+ Use one of the load_array(..) functions of that instance.
61
+
62
+ ```python
63
+ def load_array(t_start:datetime, t_end:datetime, channel_start:int, channel_end:int) -> NP.ndarray:
64
+ ```
65
+ ```
66
+ Loading data into numpy array.
67
+ Returns nothing, the data can be accessed by accessing the data field of this instance.
68
+ Warning: using a different value then 1 for t_step or channel_step can result in a high cpu-usage.
69
+ Consider using multithreaded=True in the constructor and a high amount of workers if needed.
70
+ Args:
71
+ t_start (datetime): datetime object which defines the start of the data to load.
72
+ t_end (datetime): datetime object which defines the end of the data to load.
73
+ channel_start (int): The starting index of the sensor position in the data (inclusive).
74
+ channel_end (int): The ending index of the sensors position in the data (exclusive).
75
+ t_step (int): Reduces the data on the time axis by factor t_step. Uses mean averaging. Default is 1.
76
+ channel_step (int): Like t_step, but for the sensor position.
77
+ Returns:
78
+ A 2d-numpy-array containing the data.
79
+ The first axis corresponds to the time, the second to the channel (sensor position)
80
+ ```
81
+
82
+ For more details have a look at the inline documentation of [chunk.py](src/das2numpy/chunk.py)
83
+
84
+
85
+ ## Command Line Interface
86
+
87
+ Creates a numpy file from the requested data. Optionally, the binary data can be printed to stdout.
88
+
89
+ Example call:
90
+ ```
91
+ python -m das2numpy "SILIXA" /pnfs/desy.de/m/project/iDAS/raw/2024-DESY/2024-07-23-desy 2024-07-23T10:01:00 2024-07-23T10:02:00 10 0 1000 10 default
92
+ ```
93
+
94
+ For more information:
95
+ ```
96
+ python -m das2numpy -h
97
+ ```
98
+
99
+
100
+ ## Issues
101
+
102
+ - Loading from OPTASENSE may not work anymore. I haven't tested it for a long time.
@@ -2,16 +2,15 @@ LICENSE
2
2
  README.md
3
3
  pyproject.toml
4
4
  src/example.py
5
- src/test_downsampled.py
6
5
  src/das2numpy/__init__.py
7
6
  src/das2numpy/__main__.py
8
7
  src/das2numpy/chunk.py
9
8
  src/das2numpy/filefinder.py
10
- src/das2numpy/test.py
11
9
  src/das2numpy/utils.py
12
10
  src/das2numpy.egg-info/PKG-INFO
13
11
  src/das2numpy.egg-info/SOURCES.txt
14
12
  src/das2numpy.egg-info/dependency_links.txt
13
+ src/das2numpy.egg-info/requires.txt
15
14
  src/das2numpy.egg-info/top_level.txt
16
15
  src/das2numpy/setups/flac_200hz.py
17
16
  src/das2numpy/setups/light_tdms_reader.py
@@ -0,0 +1,5 @@
1
+ numpy
2
+ ffmpeg-python
3
+ h5py
4
+ scipy
5
+ numba
@@ -1,3 +1,2 @@
1
1
  das2numpy
2
2
  example
3
- test_downsampled
@@ -6,21 +6,24 @@ from das2numpy import loader, utils
6
6
 
7
7
 
8
8
  print("Load data to numpy-array")
9
- t_start = datetime(2025, 3, 25, 1, 0, 0)
10
- t_end = datetime(2025, 3, 25, 1, 1, 0)
9
+ t_start = datetime(2024, 7, 23, 1, 0, 0)
10
+ t_end = datetime(2024, 7, 23, 1, 1, 0)
11
11
  channel_start = 0
12
12
  channel_end = -1
13
- loader = loader("/pnfs/desy.de/m/project/iDAS/raw/2025-DESY/2025-03-25-desy", "SILIXA", 1)
13
+ #loader = loader("/pnfs/desy.de/m/project/iDAS/raw/2024-DESY/2024-07-23-desy", "SILIXA", 1) # 1000 Hz
14
+ loader = loader("/pnfs/desy.de/m/project/iDAS/work/IDAS_200HZ/", "FLAC_200HZ", 1) # 200 Hz
14
15
  data = loader.load_array(t_start, t_end, channel_start, channel_end)
15
16
 
16
17
  print("Reduce data by binning (mean averaging)")
17
18
  bin_factors = (100, 10)
18
19
  data = utils.bin(data, bin_factors) # Reduce time sampling and spatial sampling by averaging.
19
- sampling_hz = 1000.0 / bin_factors[0]
20
+ sampling_hz = 200.0 / bin_factors[0]
20
21
  channel_spacing = 1.0 * bin_factors[1]
21
22
 
22
- NP.save("data.npy", data)
23
+ # Saving loaded data to numpy file
24
+ NP.save("data.npy", data)
23
25
 
26
+ # Creating a waterfall plot
24
27
  print("Create plot with pyplot")
25
28
  PP.title(f"{t_start.isoformat()}")
26
29
  PP.imshow(
das2numpy-1.0/PKG-INFO DELETED
@@ -1,93 +0,0 @@
1
- Metadata-Version: 2.4
2
- Name: das2numpy
3
- Version: 1.0
4
- Summary: A simple and universal package for loading large amounts of distributed acoustic sensing (DAS) data.
5
- Author-email: Erik Genthe <erik.genthe@desy.de>
6
- Project-URL: Homepage, https://git.physnet.uni-hamburg.de/wave/das2numpy
7
- Classifier: Programming Language :: Python :: 3
8
- Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3)
9
- Classifier: Operating System :: OS Independent
10
- Requires-Python: >=3.8
11
- Description-Content-Type: text/markdown
12
- License-File: LICENSE
13
- Dynamic: license-file
14
-
15
- # Module for loading Distributed Acoustic Sensing (DAS) data. SILIXA / OPTASENSE
16
-
17
- Python: If you want to get started quickly, have a look at the [example.py](src/example.py).
18
-
19
-
20
- ## Install
21
-
22
- You can install via PIP.
23
- ```
24
- python -m pip install das2numpy
25
- ```
26
-
27
- If you want to run the source have a look at *install_dependencies.sh*.
28
-
29
-
30
- ## Use as python module
31
- ### API
32
-
33
-
34
- #### Recommended: simplest interface
35
- ```python
36
- def load_array(t_start:datetime, t_end:datetime, channel_start:int, channel_end:int) -> NP.ndarray:
37
- ```
38
-
39
- ```
40
- Loads data and returns it as a numpy array.
41
- Args:
42
- t_start (datetime): datetime object which defines the start of the data to load.
43
- t_end (datetime): datetime object which defines the end of the data to load.
44
- channel_start (int): The starting index of sensor in the data (inclusive).
45
- channel_end (int): The ending index of sensors in the data (exclusive).
46
- Returns:
47
- A 2d-numpy-array containing the data.
48
- The first axis corresponds to the time, the second, to the channel
49
- ```
50
-
51
-
52
-
53
- #### More detailed interface
54
- ```python
55
- def load_array(t_start:datetime, t_end:datetime, t_step:int, channel_start:int, channel_end:int, channel_step:int) -> NP.ndarray:
56
- ```
57
-
58
- ``` Loading data into numpy array.
59
- Returns nothing, the data can be accessed by accessing the data field of this instance.
60
- Warning: using a different value then 1 for t_step or channel_step can result in a high cpu-usage.
61
- Consider using multithreaded=True in the constructor and a high amount of workers if needed.
62
- Constraints:
63
- t_start has to be less or equal t_end,
64
- same for channel_start and channel_end.
65
- t_step and channel_step have to be greater then 0
66
- Args:
67
- t_start (datetime): datetime object which defines the start of the data to load.
68
- t_end (datetime): datetime object which defines the end of the data to load.
69
- t_step (int): If you, for example only want to load the data of every fourth timestep use t_end=4
70
- channel_start (int): The starting index of sensor in the data (inclusive).
71
- channel_end (int): The ending index of sensors in the data (exclusive).
72
- channel_step (int): Like t_step, but for the sensor position.
73
- Returns:
74
- A 2d-numpy-array containing the data.
75
- The first axis corresponds to the time, the second, to the channel
76
- ```
77
-
78
- ### Lower level interfaces
79
- There are also lower level interfaces in the module.
80
- For example, the above interfaces also exist with POSIX timestamps in milliseconds instead of datetime objects. These timestamps have exactly the same resolution as the time axis of the resulting array.
81
-
82
-
83
- ## Use as command line interface
84
-
85
- Example call:
86
- ```
87
- python -m das2numpy "SILIXA" /pnfs/desy.de/m/project/iDAS/raw/2025-DESY/2025-03-25-desy 2025-03-25T10:01:00 2025-03-25T10:02:00 10 0 1000 10 default
88
- ```
89
-
90
- For more information:
91
- ```
92
- python -m das2numpy -h
93
- ```
das2numpy-1.0/README.md DELETED
@@ -1,79 +0,0 @@
1
- # Module for loading Distributed Acoustic Sensing (DAS) data. SILIXA / OPTASENSE
2
-
3
- Python: If you want to get started quickly, have a look at the [example.py](src/example.py).
4
-
5
-
6
- ## Install
7
-
8
- You can install via PIP.
9
- ```
10
- python -m pip install das2numpy
11
- ```
12
-
13
- If you want to run the source have a look at *install_dependencies.sh*.
14
-
15
-
16
- ## Use as python module
17
- ### API
18
-
19
-
20
- #### Recommended: simplest interface
21
- ```python
22
- def load_array(t_start:datetime, t_end:datetime, channel_start:int, channel_end:int) -> NP.ndarray:
23
- ```
24
-
25
- ```
26
- Loads data and returns it as a numpy array.
27
- Args:
28
- t_start (datetime): datetime object which defines the start of the data to load.
29
- t_end (datetime): datetime object which defines the end of the data to load.
30
- channel_start (int): The starting index of sensor in the data (inclusive).
31
- channel_end (int): The ending index of sensors in the data (exclusive).
32
- Returns:
33
- A 2d-numpy-array containing the data.
34
- The first axis corresponds to the time, the second, to the channel
35
- ```
36
-
37
-
38
-
39
- #### More detailed interface
40
- ```python
41
- def load_array(t_start:datetime, t_end:datetime, t_step:int, channel_start:int, channel_end:int, channel_step:int) -> NP.ndarray:
42
- ```
43
-
44
- ``` Loading data into numpy array.
45
- Returns nothing, the data can be accessed by accessing the data field of this instance.
46
- Warning: using a different value then 1 for t_step or channel_step can result in a high cpu-usage.
47
- Consider using multithreaded=True in the constructor and a high amount of workers if needed.
48
- Constraints:
49
- t_start has to be less or equal t_end,
50
- same for channel_start and channel_end.
51
- t_step and channel_step have to be greater then 0
52
- Args:
53
- t_start (datetime): datetime object which defines the start of the data to load.
54
- t_end (datetime): datetime object which defines the end of the data to load.
55
- t_step (int): If you, for example only want to load the data of every fourth timestep use t_end=4
56
- channel_start (int): The starting index of sensor in the data (inclusive).
57
- channel_end (int): The ending index of sensors in the data (exclusive).
58
- channel_step (int): Like t_step, but for the sensor position.
59
- Returns:
60
- A 2d-numpy-array containing the data.
61
- The first axis corresponds to the time, the second, to the channel
62
- ```
63
-
64
- ### Lower level interfaces
65
- There are also lower level interfaces in the module.
66
- For example, the above interfaces also exist with POSIX timestamps in milliseconds instead of datetime objects. These timestamps have exactly the same resolution as the time axis of the resulting array.
67
-
68
-
69
- ## Use as command line interface
70
-
71
- Example call:
72
- ```
73
- python -m das2numpy "SILIXA" /pnfs/desy.de/m/project/iDAS/raw/2025-DESY/2025-03-25-desy 2025-03-25T10:01:00 2025-03-25T10:02:00 10 0 1000 10 default
74
- ```
75
-
76
- For more information:
77
- ```
78
- python -m das2numpy -h
79
- ```
@@ -1,158 +0,0 @@
1
- """
2
- Deprecated
3
-
4
- Unittests for this dataloader-module
5
- by Erik Genthe
6
- 05.01.2022
7
- """
8
- from math import ceil, floor
9
- import sys as SYS
10
- from os import path as P
11
- import datetime as DT
12
- import h5py as H5PY
13
- import numpy as NP
14
-
15
- try:
16
- import dataloader as D
17
- except ModuleNotFoundError as e:
18
- raise RuntimeError("TO RUN THIS TEST, MOVE IT INTO THE PARENT DIR FIRST!") from e
19
- from dataloader.filefinder import to_posix_timestamp_ms
20
-
21
-
22
-
23
- def test_silixa_filefinder():
24
- #file_path = '/wave/seismic-rawdata/desy_12km_1m_P7gauss/desy_UTC_20210522_155121.950.tdms'
25
- #ls /wave/seismic-rawdata/desy_12km_1m_P7gauss -l | grep -n --invert-match 504946688
26
-
27
- # Find one specific file...
28
- time = DT.datetime(2021, 5, 30, 14, 00, 00)
29
- filelist = D.silixa.FILE_FINDER.get_range(time, time)
30
- assert len(filelist) == 1
31
- assert filelist[0][1].endswith('/desy_UTC_20210530_135950.619.tdms')
32
-
33
- # Find all files...
34
- filelist = D.silixa.FILE_FINDER.get_range_posix(0, D.to_posix_timestamp_ms(DT.datetime.now()))
35
- assert len(filelist) > 9000
36
-
37
-
38
- def test_optasense_filefinder():
39
- # Find one specific file...
40
- time = DT.datetime(2021, 5, 30, 14, 00, 00)
41
- filelist = D.optasense.FILE_FINDER.get_range(time, time)
42
- assert len(filelist) == 1
43
- assert filelist[0][1].endswith('2021-05-30T135924Z.h5')
44
-
45
- # Find all files...
46
- filelist = D.optasense.FILE_FINDER.get_range_posix(0, D.to_posix_timestamp_ms(DT.datetime.now()))
47
- assert len(filelist) > 9000
48
-
49
-
50
- def test_fast_optasense_filefinder():
51
- # Find one specific file...
52
- time = DT.datetime(2021, 5, 30, 14, 00, 00)
53
- filelist = D.fast_optasense.FILE_FINDER.get_range(time, time)
54
- assert len(filelist) == 1
55
- assert filelist[0][1].endswith('2021-05-30T135924Z.h5.bin')
56
-
57
- # Find all files...
58
- filelist = D.optasense.FILE_FINDER.get_range_posix(0, D.to_posix_timestamp_ms(DT.datetime.now()))
59
- assert len(filelist) > 9000
60
-
61
-
62
-
63
- def test_chunk(chunk, MAX_CHANNEL):
64
- import time as TIME
65
- #MAX_CHANNEL = 12608
66
- #chunk = D.silixa.create_chunk()
67
- t_start: int = to_posix_timestamp_ms(DT.datetime(2021, 5, 30, 14, 00, 00))
68
- t_end1: int = to_posix_timestamp_ms(DT.datetime(2021, 5, 30, 14, 00, 1))
69
- t_end2: int = to_posix_timestamp_ms(DT.datetime(2021, 5, 30, 14, 1, 30))
70
- t_end3: int = to_posix_timestamp_ms(DT.datetime(2021, 5, 30, 14, 10, 00))
71
- t_end_one_hour: int = to_posix_timestamp_ms(DT.datetime(2021, 5, 30, 15, 00, 00))
72
- print()
73
-
74
- chunk.load(t_start, t_end1, 1, 0, MAX_CHANNEL, 1)
75
- assert chunk.data.shape == (1000, MAX_CHANNEL)
76
- print()
77
-
78
- chunk.load(t_start, t_end2, 3, 0, MAX_CHANNEL, 9)
79
- assert chunk.data.shape == (30000, ceil(MAX_CHANNEL / 9))
80
- print()
81
-
82
- # Now some benchmarks...
83
- #bench_start = TIME.time()
84
- #file_handle = open("/wave/seismic-rawdata/OPTA/Disk2/DESY-Rec-11-GL8m-Chan10000_2021-05-30T07_55_42+0100/DESY-Rec-11-GL8m-Chan10000_2021-05-30T135924Z.h5", 'rb')
85
- #file:H5PY.File = H5PY.File(file_handle, 'r')
86
- #data = file['Acquisition']['Raw[0]']['RawData'] # Data is not loaded into memory at this point! (Lazy evaluation)
87
- #data = NP.array(data)
88
- #print("TIME for loading one whole file using h5py:", TIME.time() - bench_start, "\n")
89
-
90
- bench_start = TIME.time()
91
- chunk.load(t_start, t_end3, 1, 0, 1000, 1)
92
- print("Time for loading the first 1000 sensors of one hour of data: %4f\n" % (TIME.time() - bench_start))
93
- assert chunk.data.shape == (600000, 1000)
94
-
95
- bench_start = TIME.time()
96
- chunk.load(t_start, t_end_one_hour, 1, 0, MAX_CHANNEL, 10)
97
- print("Time for loading one hour of data with with sensor_step=10: %4f\n" % (TIME.time() - bench_start))
98
- assert chunk.data.shape == (1000*60*60, ceil(MAX_CHANNEL/10))
99
-
100
- bench_start = TIME.time()
101
- chunk.load(t_start, t_end_one_hour, 1, 0, 100, 1)
102
- print("Time for loading 100 sensors with 1 hour of data: %4f\n" % (TIME.time() - bench_start))
103
-
104
- bench_start = TIME.time()
105
- chunk.load(t_start, t_end_one_hour, 1, 0, 1000, 1)
106
- print("Time for loading 1000 sensors with 1 hour of data: %4f\n" % (TIME.time() - bench_start))
107
-
108
- bench_start = TIME.time()
109
- chunk.load(t_start, t_end_one_hour, 1, 0, MAX_CHANNEL, 1)
110
- print("Time for loading 1 hour completely: %4f\n" % (TIME.time() - bench_start))
111
-
112
-
113
-
114
- def test_equalness_of_fast_opta_simple():
115
- t_start: int = to_posix_timestamp_ms(DT.datetime(2021, 5, 30, 14, 00, 00))
116
- t_end: int = to_posix_timestamp_ms(DT.datetime(2021, 5, 30, 14, 00, 1))
117
-
118
- chunk_fast = D.fast_optasense.create_chunk()
119
- chunk_fast.load(t_start, t_end, 1, 0, 10, 1)
120
-
121
- chunk_normal = D.optasense.create_chunk()
122
- chunk_normal.load(t_start, t_end, 1, 0, 10, 1)
123
-
124
- assert chunk_fast.data.shape == chunk_normal.data.shape
125
- assert NP.array_equiv(chunk_fast.data, chunk_normal.data)
126
-
127
-
128
- def test_equalness_of_fast_opta():
129
- t_start: int = to_posix_timestamp_ms(DT.datetime(2021, 5, 30, 14, 00, 00))
130
- t_end: int = to_posix_timestamp_ms(DT.datetime(2021, 5, 30, 14, 00, 1))
131
-
132
- chunk_fast = D.fast_optasense.create_chunk()
133
- chunk_fast.load(t_start, t_end, 3, 2000, 7000, 9)
134
-
135
- chunk_normal = D.optasense.create_chunk()
136
- chunk_normal.load(t_start, t_end, 3, 2000, 7000, 9)
137
-
138
- assert chunk_fast.data.shape == chunk_normal.data.shape
139
- assert NP.array_equiv(chunk_fast.data, chunk_normal.data)
140
-
141
-
142
- if __name__ == '__main__':
143
- #test_equalness_of_fast_opta_simple()
144
- #test_equalness_of_fast_opta()
145
- #test_fast_optasense_filefinder()
146
- #test_silixa_filefinder()
147
- #test_optasense_filefinder()
148
-
149
-
150
-
151
- print("\nSilixa benchmark:")
152
- test_chunk(D.silixa.create_chunk(), 12608)
153
-
154
- print("\nFast Optasense benchmark:")
155
- test_chunk(D.fast_optasense.create_chunk(), 10000)
156
-
157
- #print("\nOptasense benchmark:")
158
- #test_chunk(D.optasense.create_chunk(), 10000)
@@ -1,93 +0,0 @@
1
- Metadata-Version: 2.4
2
- Name: das2numpy
3
- Version: 1.0
4
- Summary: A simple and universal package for loading large amounts of distributed acoustic sensing (DAS) data.
5
- Author-email: Erik Genthe <erik.genthe@desy.de>
6
- Project-URL: Homepage, https://git.physnet.uni-hamburg.de/wave/das2numpy
7
- Classifier: Programming Language :: Python :: 3
8
- Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3)
9
- Classifier: Operating System :: OS Independent
10
- Requires-Python: >=3.8
11
- Description-Content-Type: text/markdown
12
- License-File: LICENSE
13
- Dynamic: license-file
14
-
15
- # Module for loading Distributed Acoustic Sensing (DAS) data. SILIXA / OPTASENSE
16
-
17
- Python: If you want to get started quickly, have a look at the [example.py](src/example.py).
18
-
19
-
20
- ## Install
21
-
22
- You can install via PIP.
23
- ```
24
- python -m pip install das2numpy
25
- ```
26
-
27
- If you want to run the source have a look at *install_dependencies.sh*.
28
-
29
-
30
- ## Use as python module
31
- ### API
32
-
33
-
34
- #### Recommended: simplest interface
35
- ```python
36
- def load_array(t_start:datetime, t_end:datetime, channel_start:int, channel_end:int) -> NP.ndarray:
37
- ```
38
-
39
- ```
40
- Loads data and returns it as a numpy array.
41
- Args:
42
- t_start (datetime): datetime object which defines the start of the data to load.
43
- t_end (datetime): datetime object which defines the end of the data to load.
44
- channel_start (int): The starting index of sensor in the data (inclusive).
45
- channel_end (int): The ending index of sensors in the data (exclusive).
46
- Returns:
47
- A 2d-numpy-array containing the data.
48
- The first axis corresponds to the time, the second, to the channel
49
- ```
50
-
51
-
52
-
53
- #### More detailed interface
54
- ```python
55
- def load_array(t_start:datetime, t_end:datetime, t_step:int, channel_start:int, channel_end:int, channel_step:int) -> NP.ndarray:
56
- ```
57
-
58
- ``` Loading data into numpy array.
59
- Returns nothing, the data can be accessed by accessing the data field of this instance.
60
- Warning: using a different value then 1 for t_step or channel_step can result in a high cpu-usage.
61
- Consider using multithreaded=True in the constructor and a high amount of workers if needed.
62
- Constraints:
63
- t_start has to be less or equal t_end,
64
- same for channel_start and channel_end.
65
- t_step and channel_step have to be greater then 0
66
- Args:
67
- t_start (datetime): datetime object which defines the start of the data to load.
68
- t_end (datetime): datetime object which defines the end of the data to load.
69
- t_step (int): If you, for example only want to load the data of every fourth timestep use t_end=4
70
- channel_start (int): The starting index of sensor in the data (inclusive).
71
- channel_end (int): The ending index of sensors in the data (exclusive).
72
- channel_step (int): Like t_step, but for the sensor position.
73
- Returns:
74
- A 2d-numpy-array containing the data.
75
- The first axis corresponds to the time, the second, to the channel
76
- ```
77
-
78
- ### Lower level interfaces
79
- There are also lower level interfaces in the module.
80
- For example, the above interfaces also exist with POSIX timestamps in milliseconds instead of datetime objects. These timestamps have exactly the same resolution as the time axis of the resulting array.
81
-
82
-
83
- ## Use as command line interface
84
-
85
- Example call:
86
- ```
87
- python -m das2numpy "SILIXA" /pnfs/desy.de/m/project/iDAS/raw/2025-DESY/2025-03-25-desy 2025-03-25T10:01:00 2025-03-25T10:02:00 10 0 1000 10 default
88
- ```
89
-
90
- For more information:
91
- ```
92
- python -m das2numpy -h
93
- ```
@@ -1,54 +0,0 @@
1
- import numpy as NP
2
- import sys
3
- from datetime import datetime
4
- import matplotlib.pyplot as PP
5
- from das2numpy import loader, utils
6
-
7
- USE_DOWNSAMPLED = False
8
-
9
- print("Load data to numpy-array")
10
- t_start = datetime(2025, 10, 14, 2, 58, 59)
11
- t_end = datetime(2025, 10, 14, 2, 59, 1)
12
- channel_start = 1000
13
- channel_end = 3000
14
-
15
- if USE_DOWNSAMPLED:
16
- loader = loader("/pnfs/desy.de/m/project/iDAS/work/derived-data/DOWNSAMPLED_200HZ/2025-10/", "SILIXA_200HZ", 1)
17
- else:
18
- loader = loader("/pnfs/desy.de/m/project/iDAS/raw/2025-DESY/2025-10-14-desy", "SILIXA", 1)
19
- data = loader.load_array(t_start, t_end, channel_start, channel_end)
20
-
21
- print("Reduce data by binning (mean averaging)")
22
- if USE_DOWNSAMPLED:
23
- bin_factors = (1, 1)
24
- data = utils.bin(data, bin_factors) # Reduce time sampling and spatial sampling by averaging.
25
- sampling_hz = 200.0 / bin_factors[0]
26
- else:
27
- bin_factors = (5, 1)
28
- data = utils.bin(data, bin_factors) # Reduce time sampling and spatial sampling by averaging.
29
- sampling_hz = 1000.0 / bin_factors[0]
30
- channel_spacing = 1.0 * bin_factors[1]
31
-
32
- NP.save("data.npy", data)
33
-
34
- print("Create plot with pyplot")
35
- PP.title(f"{t_start.isoformat()}")
36
- PP.imshow(
37
- data,
38
- cmap = "seismic",
39
- aspect = "auto",
40
- interpolation = "nearest",
41
- vmin = -1e-7,
42
- vmax = +1e-7,
43
- extent = (
44
- channel_start, channel_start + (data.shape[1] * channel_spacing),
45
- data.shape[0] / sampling_hz, 0
46
- )
47
- )
48
- PP.xlabel("Position [m]")
49
- PP.ylabel("Time [s]")
50
- PP.colorbar(label="Strain-rate [$\\frac{m}{m \\cdot s}$]")
51
- if USE_DOWNSAMPLED:
52
- PP.savefig("waterfall_downsampled.png")
53
- else:
54
- PP.savefig("waterfall.png")
File without changes
File without changes
File without changes