shepherd-data 2023.12.1__py3-none-any.whl → 2024.4.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
shepherd_data/__init__.py CHANGED
@@ -4,11 +4,12 @@ Provides classes for storing and retrieving sampled IV data to/from
4
4
  HDF5 files.
5
5
 
6
6
  """
7
+
7
8
  from shepherd_core import Writer
8
9
 
9
10
  from .reader import Reader
10
11
 
11
- __version__ = "2023.12.1"
12
+ __version__ = "2024.04.1"
12
13
 
13
14
  __all__ = [
14
15
  "Reader",
shepherd_data/cli.py CHANGED
@@ -1,5 +1,5 @@
1
- """Command definitions for CLI
2
- """
1
+ """Command definitions for CLI"""
2
+
3
3
  import logging
4
4
  import os
5
5
  import sys
@@ -365,7 +365,7 @@ def plot(
365
365
  else:
366
366
  shpr.plot_to_file(start, end, width, height)
367
367
  except TypeError as _xpc:
368
- logger.error("ERROR: will skip file, caught exception: %s", _xpc)
368
+ logger.exception("ERROR: will skip file, caught exception: %s", _xpc)
369
369
  if multiplot:
370
370
  logger.info("Got %d datasets to plot", len(data))
371
371
  mpl_path = Reader.multiplot_to_file(data, in_data, width, height)
shepherd_data/ivonne.py CHANGED
@@ -2,6 +2,7 @@
2
2
  to generate valid shepherd-data for emulation
3
3
 
4
4
  """
5
+
5
6
  import errno
6
7
  import logging
7
8
  import math
@@ -34,7 +35,7 @@ def get_isc(coeffs: pd.DataFrame): # noqa: ANN201
34
35
 
35
36
 
36
37
  class Reader:
37
- """container for converters to shepherd-data"""
38
+ """Container for converters that bridge the gap to shepherds data-files."""
38
39
 
39
40
  _logger: logging.Logger = logging.getLogger("SHPData.IVonne.Reader")
40
41
 
@@ -102,8 +103,9 @@ class Reader:
102
103
  pts_per_curve: int = 1000,
103
104
  duration_s: Optional[float] = None,
104
105
  ) -> None:
105
- """Transforms previously recorded parameters to shepherd hdf database with IV curves.
106
- Shepherd should work with IV 'surfaces', where we have a stream of IV curves
106
+ """Transform recorded parameters to shepherd hdf database with IV curves.
107
+
108
+ Shepherd works with IV 'surfaces', which is a stream of IV curves.
107
109
 
108
110
  :param shp_output: Path where the resulting hdf file shall be stored
109
111
  :param v_max: Maximum voltage supported by shepherd
@@ -134,7 +136,7 @@ class Reader:
134
136
  for idx in job_iter:
135
137
  idx_top = min(idx + max_elements, df_elements_n)
136
138
  df_slice = self._df.iloc[idx : idx_top + 1].copy()
137
- df_slice["timestamp"] = pd.TimedeltaIndex(data=df_slice["time"], unit="s")
139
+ df_slice["timestamp"] = pd.to_timedelta(df_slice["time"], unit="s")
138
140
  df_slice = df_slice.set_index("timestamp")
139
141
  # warning: .interpolate does crash in debug-mode with typeError
140
142
  df_slice = (
@@ -163,7 +165,7 @@ class Reader:
163
165
  duration_s: Optional[float] = None,
164
166
  tracker: Optional[MPPTracker] = None,
165
167
  ) -> None:
166
- """Transforms shepherd IV curves to shepherd IV traces.
168
+ """Transform shepherd IV curves to shepherd IV samples / traces.
167
169
 
168
170
  For the 'buck' and 'buck-boost' modes, shepherd takes voltage and current traces.
169
171
  These can be recorded with shepherd or generated from existing IV curves by, for
@@ -180,6 +182,7 @@ class Reader:
180
182
  :param v_max: Maximum voltage supported by shepherd
181
183
  :param duration_s: time to stop in seconds, counted from beginning
182
184
  :param tracker: VOC or OPT
185
+
183
186
  """
184
187
  if self._df is None:
185
188
  raise RuntimeError("IVonne Context was not entered - file not open!")
@@ -213,7 +216,7 @@ class Reader:
213
216
  df_slice.loc[:, "voc"] = get_voc(df_slice)
214
217
  df_slice.loc[df_slice["voc"] >= v_max, "voc"] = v_max
215
218
  df_slice = tracker.process(df_slice)
216
- df_slice["timestamp"] = pd.TimedeltaIndex(data=df_slice["time"], unit="s")
219
+ df_slice["timestamp"] = pd.to_timedelta(df_slice["time"], unit="s")
217
220
  df_slice = df_slice[["time", "v", "i", "timestamp"]].set_index("timestamp")
218
221
  # warning: .interpolate does crash in debug-mode with typeError
219
222
  df_slice = (
@@ -231,7 +234,7 @@ class Reader:
231
234
  v_max: float = 5.0,
232
235
  duration_s: Optional[float] = None,
233
236
  ) -> None:
234
- """Transforms ivonne-parameters to upsampled version for shepherd
237
+ """Transform ivonne-parameters to up-sampled versions for shepherd.
235
238
 
236
239
  :param shp_output: Path where the resulting hdf file shall be stored
237
240
  :param v_max: Maximum voltage supported by shepherd
@@ -264,7 +267,7 @@ class Reader:
264
267
  df_slice.loc[:, "voc"] = get_voc(df_slice)
265
268
  df_slice.loc[df_slice["voc"] >= v_max, "voc"] = v_max
266
269
  df_slice.loc[:, "isc"] = get_isc(df_slice)
267
- df_slice["timestamp"] = pd.TimedeltaIndex(data=df_slice["time"], unit="s")
270
+ df_slice["timestamp"] = pd.to_timedelta(df_slice["time"], unit="s")
268
271
  df_slice = df_slice[["time", "voc", "isc", "timestamp"]].set_index("timestamp")
269
272
  # warning: .interpolate does crash in debug-mode with typeError
270
273
  df_slice = (
shepherd_data/mppt.py CHANGED
@@ -1,6 +1,8 @@
1
1
  """Harvesters, simple and fast approach.
2
- Might be exchanged by shepherds py-model of pru-harvesters
2
+
3
+ Might be exchanged by shepherds py-model of pru-harvesters.
3
4
  """
5
+
4
6
  import numpy as np
5
7
  import pandas as pd
6
8
 
@@ -8,16 +10,11 @@ from shepherd_core import Calc_t
8
10
 
9
11
 
10
12
  def iv_model(voltages: Calc_t, coeffs: pd.Series) -> Calc_t:
11
- """Simple diode based model of a solar panel IV curve.
12
-
13
- Args:
14
- ----
15
- :param voltages: Load voltage of the solar panel
16
- :param coeffs: three generic coefficients
13
+ """Calculate simple diode based model (equivalent circuit diagram) of a solar panel IV curve.
17
14
 
18
- Returns:
19
- -------
20
- Solar current at given load voltage
15
+ :param voltages: Load voltage of the solar panel
16
+ :param coeffs: three generic coefficients
17
+ :return: Solar current at given load voltage
21
18
  """
22
19
  currents = float(coeffs["a"]) - float(coeffs["b"]) * (
23
20
  np.exp(float(coeffs["c"]) * voltages) - 1.0
@@ -40,7 +37,7 @@ def find_oc(v_arr: np.ndarray, i_arr: np.ndarray, ratio: float = 0.05) -> np.nda
40
37
 
41
38
 
42
39
  class MPPTracker:
43
- """Prototype
40
+ """Prototype for a MPPT-class.
44
41
 
45
42
  :param v_max: Maximum voltage supported by shepherd
46
43
  :param pts_per_curve: resolution of internal ivcurve
@@ -52,15 +49,15 @@ class MPPTracker:
52
49
  self.v_proto: np.ndarray = np.linspace(0, v_max, pts_per_curve)
53
50
 
54
51
  def process(self, coeffs: pd.DataFrame) -> pd.DataFrame:
55
- """Apply harvesting model to input data
52
+ """Apply harvesting model to input data.
56
53
 
57
54
  :param coeffs: ivonne coefficients
58
- :return:
55
+ :return: ivsample-data
59
56
  """
60
57
 
61
58
 
62
59
  class OpenCircuitTracker(MPPTracker):
63
- """Open-circuit based MPPT
60
+ """Open-circuit (-voltage) based MPPT.
64
61
 
65
62
  :param v_max: Maximum voltage supported by shepherd
66
63
  :param pts_per_curve: resolution of internal ivcurve
@@ -72,6 +69,11 @@ class OpenCircuitTracker(MPPTracker):
72
69
  self.ratio = ratio
73
70
 
74
71
  def process(self, coeffs: pd.DataFrame) -> pd.DataFrame:
72
+ """Apply harvesting model to input data.
73
+
74
+ :param coeffs: ivonne coefficients
75
+ :return: ivsample-data
76
+ """
75
77
  coeffs["icurve"] = coeffs.apply(lambda x: iv_model(self.v_proto, x), axis=1)
76
78
  if "voc" not in coeffs.columns:
77
79
  coeffs["voc"] = coeffs.apply(lambda x: find_oc(self.v_proto, x["ivcurve"]), axis=1)
@@ -85,7 +87,7 @@ class OpenCircuitTracker(MPPTracker):
85
87
 
86
88
 
87
89
  class OptimalTracker(MPPTracker):
88
- """Optimal MPPT
90
+ """Optimal MPPT by looking at the whole curve.
89
91
 
90
92
  Calculates optimal harvesting voltage for every time and corresponding IV curve.
91
93
 
@@ -97,6 +99,11 @@ class OptimalTracker(MPPTracker):
97
99
  super().__init__(v_max, pts_per_curve)
98
100
 
99
101
  def process(self, coeffs: pd.DataFrame) -> pd.DataFrame:
102
+ """Apply harvesting model to input data.
103
+
104
+ :param coeffs: ivonne coefficients
105
+ :return: ivsample-data
106
+ """
100
107
  coeffs["icurve"] = coeffs.apply(lambda x: iv_model(self.v_proto, x), axis=1)
101
108
  coeffs["pcurve"] = coeffs.apply(lambda x: self.v_proto * x["icurve"], axis=1)
102
109
  coeffs["max_pos"] = coeffs.apply(lambda x: np.argmax(x["pcurve"]), axis=1)
shepherd_data/reader.py CHANGED
@@ -1,5 +1,5 @@
1
- """Reader-Baseclass
2
- """
1
+ """Reader-Baseclass for opening shepherds hdf5-files."""
2
+
3
3
  import math
4
4
  from datetime import datetime
5
5
  from pathlib import Path
@@ -26,6 +26,7 @@ class Reader(CoreReader):
26
26
  ----
27
27
  file_path: Path of hdf5 file containing shepherd data with iv-samples, iv-curves or isc&voc
28
28
  verbose: more info during usage, 'None' skips the setter
29
+
29
30
  """
30
31
 
31
32
  def __init__(
@@ -37,7 +38,7 @@ class Reader(CoreReader):
37
38
  super().__init__(file_path, verbose=verbose)
38
39
 
39
40
  def save_csv(self, h5_group: h5py.Group, separator: str = ";") -> int:
40
- """Extract numerical data via csv
41
+ """Extract numerical data from group and store it into csv.
41
42
 
42
43
  :param h5_group: can be external and should probably be downsampled
43
44
  :param separator: used between columns
@@ -73,7 +74,7 @@ class Reader(CoreReader):
73
74
  return h5_group["time"][:].shape[0]
74
75
 
75
76
  def save_log(self, h5_group: h5py.Group, *, add_timestamp: bool = True) -> int:
76
- """Save dataset in group as log, optimal for logged dmesg and exceptions
77
+ """Save dataset from group as log, optimal for logged 'dmesg' and console-output.
77
78
 
78
79
  :param h5_group: can be external
79
80
  :param add_timestamp: can be external
@@ -114,6 +115,7 @@ class Reader(CoreReader):
114
115
  *,
115
116
  show: bool = True,
116
117
  ) -> int:
118
+ """Print warning messages from log in data-group."""
117
119
  _count = self.count_errors_in_log(group_name, min_level)
118
120
  if _count < 1:
119
121
  return 0
@@ -145,7 +147,7 @@ class Reader(CoreReader):
145
147
 
146
148
  def downsample(
147
149
  self,
148
- data_src: h5py.Dataset,
150
+ data_src: Union[h5py.Dataset, np.ndarray],
149
151
  data_dst: Union[None, h5py.Dataset, np.ndarray],
150
152
  start_n: int = 0,
151
153
  end_n: Optional[int] = None,
@@ -153,15 +155,17 @@ class Reader(CoreReader):
153
155
  *,
154
156
  is_time: bool = False,
155
157
  ) -> Union[h5py.Dataset, np.ndarray]:
156
- """Warning: only valid for IV-Stream, not IV-Curves
158
+ """Sample down iv-data.
159
+
160
+ Warning: only valid for IV-Stream, not IV-Curves
157
161
 
158
162
  :param data_src: a h5-dataset to digest, can be external
159
163
  :param data_dst: can be a dataset, numpy-array or None (will be created internally then)
160
164
  :param start_n: start-sample
161
165
  :param end_n: ending-sample (not included)
162
- :param ds_factor: downsampling-factor
163
- :param is_time: time is not really downsamples, but just decimated
164
- :return: downsampled h5-dataset or numpy-array
166
+ :param ds_factor: sampling-factor
167
+ :param is_time: time is not really down-sampled, but decimated
168
+ :return: resampled h5-dataset or numpy-array
165
169
  """
166
170
  from scipy import signal # here due to massive delay
167
171
 
@@ -222,7 +226,7 @@ class Reader(CoreReader):
222
226
 
223
227
  def resample(
224
228
  self,
225
- data_src: h5py.Dataset,
229
+ data_src: Union[h5py.Dataset, np.ndarray],
226
230
  data_dst: Union[None, h5py.Dataset, np.ndarray],
227
231
  start_n: int = 0,
228
232
  end_n: Optional[int] = None,
@@ -230,13 +234,15 @@ class Reader(CoreReader):
230
234
  *,
231
235
  is_time: bool = False,
232
236
  ) -> Union[h5py.Dataset, np.ndarray]:
233
- """:param data_src:
234
- :param data_dst:
235
- :param start_n:
236
- :param end_n:
237
- :param samplerate_dst:
238
- :param is_time:
239
- :return:
237
+ """Up- or down-sample the original trace-data.
238
+
239
+ :param data_src: original iv-data
240
+ :param data_dst: resampled iv-traces
241
+ :param start_n: start index of the source
242
+ :param end_n: end index of the source
243
+ :param samplerate_dst: desired sampling rate
244
+ :param is_time: time-array is handled differently than IV-Samples
245
+ :return: resampled iv-data
240
246
  """
241
247
  self._logger.error("Resampling is still under construction - do not use for now!")
242
248
  if self.get_datatype() == "ivcurve":
@@ -327,7 +333,7 @@ class Reader(CoreReader):
327
333
  *,
328
334
  relative_timestamp: bool = True,
329
335
  ) -> Dict:
330
- """Provides down-sampled iv-data that can be feed into plot_to_file()
336
+ """Provide down-sampled iv-data that can be fed into plot_to_file().
331
337
 
332
338
  :param start_s: time in seconds, relative to start of recording
333
339
  :param end_s: time in seconds, relative to start of recording
@@ -371,14 +377,15 @@ class Reader(CoreReader):
371
377
 
372
378
  @staticmethod
373
379
  def assemble_plot(data: Union[dict, list], width: int = 20, height: int = 10) -> plt.Figure:
374
- """TODO: add power (if wanted)
380
+ """Create the actual figure.
375
381
 
376
382
  :param data: plottable / down-sampled iv-data with some meta-data
377
383
  -> created with generate_plot_data()
378
384
  :param width: plot-width
379
385
  :param height: plot-height
380
- :return:
386
+ :return: figure
381
387
  """
388
+ # TODO: add power (if wanted)
382
389
  if isinstance(data, dict):
383
390
  data = [data]
384
391
  fig, axes = plt.subplots(2, 1, sharex="all")
@@ -403,8 +410,9 @@ class Reader(CoreReader):
403
410
  width: int = 20,
404
411
  height: int = 10,
405
412
  ) -> None:
406
- """Creates (down-sampled) IV-Plot
407
- -> omitting start- and end-time will use the whole duration
413
+ """Create (down-sampled) IV-Plots.
414
+
415
+ Omitting start- and end-time will use the whole trace (full duration).
408
416
 
409
417
  :param start_s: time in seconds, relative to start of recording, optional
410
418
  :param end_s: time in seconds, relative to start of recording, optional
@@ -432,7 +440,7 @@ class Reader(CoreReader):
432
440
  def multiplot_to_file(
433
441
  data: list, plot_path: Path, width: int = 20, height: int = 10
434
442
  ) -> Optional[Path]:
435
- """Creates (down-sampled) IV-Multi-Plot
443
+ """Create (down-sampled) IV-Multi-Plots (of more than one trace).
436
444
 
437
445
  :param data: plottable / down-sampled iv-data with some meta-data
438
446
  -> created with generate_plot_data()
@@ -0,0 +1,88 @@
1
+ Metadata-Version: 2.1
2
+ Name: shepherd_data
3
+ Version: 2024.4.1
4
+ Summary: Programming- and CLI-Interface for the h5-dataformat of the Shepherd-Testbed
5
+ Author-email: Ingmar Splitt <ingmar.splitt@tu-dresden.de>
6
+ Maintainer-email: Ingmar Splitt <ingmar.splitt@tu-dresden.de>
7
+ Project-URL: Documentation, https://github.com/orgua/shepherd-datalib/blob/main/README.md
8
+ Project-URL: Issues, https://pypi.org/project/shepherd-data/issues
9
+ Project-URL: Source, https://pypi.org/project/shepherd-data/
10
+ Keywords: testbed,beaglebone,pru,batteryless,energyharvesting,solar
11
+ Platform: unix
12
+ Platform: linux
13
+ Platform: osx
14
+ Platform: cygwin
15
+ Platform: win32
16
+ Platform: win64
17
+ Classifier: Development Status :: 5 - Production/Stable
18
+ Classifier: Intended Audience :: Developers
19
+ Classifier: Intended Audience :: Information Technology
20
+ Classifier: Intended Audience :: Science/Research
21
+ Classifier: Programming Language :: Python :: 3.8
22
+ Classifier: Programming Language :: Python :: 3.9
23
+ Classifier: Programming Language :: Python :: 3.10
24
+ Classifier: Programming Language :: Python :: 3.11
25
+ Classifier: Programming Language :: Python :: 3.12
26
+ Classifier: License :: OSI Approved :: MIT License
27
+ Classifier: Operating System :: OS Independent
28
+ Classifier: Natural Language :: English
29
+ Requires-Python: >=3.8
30
+ Description-Content-Type: text/markdown
31
+ Requires-Dist: click
32
+ Requires-Dist: h5py
33
+ Requires-Dist: matplotlib
34
+ Requires-Dist: numpy
35
+ Requires-Dist: pandas >=2.0.0
36
+ Requires-Dist: pyYAML
37
+ Requires-Dist: scipy
38
+ Requires-Dist: shepherd-core[inventory] >=2024.04.1
39
+ Requires-Dist: tqdm
40
+ Provides-Extra: dev
41
+ Requires-Dist: shepherd-core[dev] ; extra == 'dev'
42
+ Requires-Dist: pandas-stubs ; extra == 'dev'
43
+ Provides-Extra: elf
44
+ Requires-Dist: shepherd-core[elf] ; extra == 'elf'
45
+ Provides-Extra: test
46
+ Requires-Dist: shepherd-core[test] ; extra == 'test'
47
+ Requires-Dist: pytest ; extra == 'test'
48
+ Requires-Dist: pytest-click ; extra == 'test'
49
+ Requires-Dist: coverage ; extra == 'test'
50
+
51
+ # Shepherd-Data-Tool
52
+
53
+ [![PyPiVersion](https://img.shields.io/pypi/v/shepherd_data.svg)](https://pypi.org/project/shepherd_data)
54
+ [![image](https://img.shields.io/pypi/pyversions/shepherd_data.svg)](https://pypi.python.org/pypi/shepherd-data)
55
+ [![Pytest](https://github.com/orgua/shepherd-datalib/actions/workflows/py_unittest.yml/badge.svg)](https://github.com/orgua/shepherd-datalib/actions/workflows/py_unittest.yml)
56
+ [![CodeStyle](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json)](https://github.com/astral-sh/ruff)
57
+
58
+ **Main Documentation**: <https://orgua.github.io/shepherd>
59
+
60
+ **Source Code**: <https://github.com/orgua/shepherd-datalib>
61
+
62
+ **Main Project**: <https://github.com/orgua/shepherd>
63
+
64
+ ---
65
+
66
+ `shepherd-data` eases the handling of hdf5-recordings used by the [shepherd](https://github.com/orgua/shepherd)-testbed. Users can read, validate and create files and also extract, down-sample and plot information.
67
+
68
+ ## Installation
69
+
70
+ ### PIP - Online
71
+
72
+ ```shell
73
+ pip3 install shepherd-data -U
74
+ ```
75
+
76
+ For bleeding-edge-features or dev-work it is possible to install directly from GitHub-Sources (here `dev`-branch):
77
+
78
+ ```Shell
79
+ pip install git+https://github.com/orgua/shepherd-datalib.git@dev#subdirectory=shepherd_data -U
80
+ ```
81
+
82
+ ## More
83
+
84
+ Please consult the [official documentation](https://orgua.github.io/shepherd) for more, it covers:
85
+
86
+ - general context
87
+ - command-line interface
88
+ - programming interface
@@ -0,0 +1,12 @@
1
+ shepherd_data/__init__.py,sha256=EZ_RzvcBkL2fMTCCWtKcbzMcrmmwgDb7SD-GZhvqUnA,243
2
+ shepherd_data/cli.py,sha256=2GDpTgd9cHJQS-GZJSvn2bhyeh6oG_srtITfc3FGAmg,15017
3
+ shepherd_data/debug_resampler.py,sha256=0VNGuUqOeKii6fvkRJUpv-27uv9p1VFsvShU3tvxwYQ,961
4
+ shepherd_data/ivonne.py,sha256=ArojlpWWYwaetkDtBhi-RdFidt79GHoMLZLRR6KljRo,11600
5
+ shepherd_data/mppt.py,sha256=588KSrLuJfNRKKnnL6ewePLi3zrwaO_PAZypikACrks,3925
6
+ shepherd_data/reader.py,sha256=JKTVemjH_6678MCNf2cDzRIsCyfD2B6CTUzBiG110QY,18531
7
+ shepherd_data-2024.4.1.dist-info/METADATA,sha256=ZuZcImaE9O7tvzyibfM4wVcToX_p7nImzS4lq8DEUoA,3388
8
+ shepherd_data-2024.4.1.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
9
+ shepherd_data-2024.4.1.dist-info/entry_points.txt,sha256=6PBfY36A1xNOdzLiz-Qoukya_UzFZAwOapwmRNnPeZ8,56
10
+ shepherd_data-2024.4.1.dist-info/top_level.txt,sha256=7-SCTY-TG1mLY72OVKCaqte1hy-X8woxknIUAD3OIxs,14
11
+ shepherd_data-2024.4.1.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
12
+ shepherd_data-2024.4.1.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: bdist_wheel (0.42.0)
2
+ Generator: bdist_wheel (0.43.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -1,274 +0,0 @@
1
- Metadata-Version: 2.1
2
- Name: shepherd-data
3
- Version: 2023.12.1
4
- Summary: Programming- and CLI-Interface for the h5-dataformat of the Shepherd-Testbed
5
- Home-page: https://pypi.org/project/shepherd-data/
6
- Author: Ingmar Splitt, Kai Geissdoerfer
7
- Author-email: ingmar.splitt@tu-dresden.de
8
- Maintainer-email: ingmar.splitt@tu-dresden.de
9
- License: MIT
10
- Project-URL: Tracker, https://github.com/orgua/shepherd-datalib/issues
11
- Project-URL: Source, https://github.com/orgua/shepherd-datalib
12
- Keywords: testbed,beaglebone,pru,batteryless,energyharvesting,solar
13
- Platform: unix
14
- Platform: linux
15
- Platform: osx
16
- Platform: cygwin
17
- Platform: win32
18
- Platform: win64
19
- Classifier: Development Status :: 5 - Production/Stable
20
- Classifier: Intended Audience :: Developers
21
- Classifier: Intended Audience :: Information Technology
22
- Classifier: Intended Audience :: Science/Research
23
- Classifier: Programming Language :: Python :: 3.8
24
- Classifier: Programming Language :: Python :: 3.9
25
- Classifier: Programming Language :: Python :: 3.10
26
- Classifier: Programming Language :: Python :: 3.11
27
- Classifier: Programming Language :: Python :: 3.12
28
- Classifier: License :: OSI Approved :: MIT License
29
- Classifier: Operating System :: OS Independent
30
- Classifier: Natural Language :: English
31
- Requires-Python: >=3.8
32
- Description-Content-Type: text/markdown
33
- Requires-Dist: h5py
34
- Requires-Dist: numpy
35
- Requires-Dist: pyYAML
36
- Requires-Dist: shepherd-core[inventory] >=2023.12.1
37
- Requires-Dist: click
38
- Requires-Dist: matplotlib
39
- Requires-Dist: pandas
40
- Requires-Dist: scipy
41
- Requires-Dist: tqdm
42
- Provides-Extra: dev
43
- Requires-Dist: shepherd-core[dev] ; extra == 'dev'
44
- Requires-Dist: pandas-stubs ; extra == 'dev'
45
- Provides-Extra: elf
46
- Requires-Dist: shepherd-core[elf] ; extra == 'elf'
47
- Provides-Extra: test
48
- Requires-Dist: shepherd-core[test] ; extra == 'test'
49
- Requires-Dist: pytest-click ; extra == 'test'
50
-
51
- # Data Module
52
-
53
- [![PyPiVersion](https://img.shields.io/pypi/v/shepherd_data.svg)](https://pypi.org/project/shepherd_data)
54
- [![image](https://img.shields.io/pypi/pyversions/shepherd_data.svg)](https://pypi.python.org/pypi/shepherd-data)
55
- [![Pytest](https://github.com/orgua/shepherd-datalib/actions/workflows/py_unittest.yml/badge.svg)](https://github.com/orgua/shepherd-datalib/actions/workflows/py_unittest.yml)
56
- [![CodeStyle](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json)](https://github.com/astral-sh/ruff)
57
-
58
- **Documentation**: <https://orgua.github.io/shepherd/external/shepherd_data.html>
59
-
60
- **Source Code**: <https://github.com/orgua/shepherd-datalib>
61
-
62
- **Main Project**: <https://github.com/orgua/shepherd>
63
-
64
- ---
65
-
66
- This Python Module eases the handling of hdf5-recordings used by the [shepherd](https://github.com/orgua/shepherd)-testbed. Users can read, validate and create files and also extract, down-sample and plot information.
67
-
68
- ## Installation
69
-
70
- ### PIP - Online
71
-
72
- ```shell
73
- pip3 install shepherd-data -U
74
- ```
75
-
76
- For bleeding-edge-features or dev-work it is possible to install directly from GitHub-Sources (here `dev`-branch):
77
-
78
- ```Shell
79
- pip install git+https://github.com/orgua/shepherd-datalib.git@dev#subdirectory=shepherd_data -U
80
- ```
81
-
82
- ## Programming Interface
83
-
84
- ### Basic Usage (recommendation)
85
-
86
- ```python
87
- import shepherd_data as sd
88
-
89
- with sd.Reader("./hrv_sawtooth_1h.h5") as db:
90
- print(f"Mode: {db.get_mode()}")
91
- print(f"Window: {db.get_window_samples()}")
92
- print(f"Config: {db.get_config()}")
93
- ```
94
-
95
- ### Available Functionality
96
-
97
- - `Reader()`
98
- - file can be checked for plausibility and validity (`is_valid()`)
99
- - internal structure of h5file (`get_metadata()` or `save_metadata()` ... to yaml) with lots of additional data
100
- - access data and various converters, calculators
101
- - `read_buffers()` -> generator that provides one buffer per call, can be configured on first call
102
- - `get_calibration_data()`
103
- - `get_windows_samples()`
104
- - `get_mode()`
105
- - `get_config()`
106
- - direct access to root h5-structure via `reader['element']`
107
- - converters for raw / physical units: `si_to_raw()` & `raw_to_si()`
108
- - `energy()` sums up recorded power over time
109
- - `downsample()` (if needed) visualize recording (`plot_to_file()`)
110
- - `Writer()`
111
- - inherits all functionality from Reader
112
- - `append_iv_data_raw()`
113
- - `append_iv_data_si()`
114
- - `set_config()`
115
- - `set_windows_samples()`
116
- - IVonne Reader
117
- - `convert_2_ivcurves()` converts ivonne-recording into a shepherd ivcurve
118
- - `upsample_2_isc_voc()` TODO: for now a upsampled but unusable version of samples of short-circuit-current and open-circuit-voltage
119
- - `convert_2_ivsamples()` already applies a simple harvesting-algo and creates ivsamples
120
- - `./examples/`
121
- - `example_convert_ivonne.py` converts IVonne recording (`jogging_10m.iv`) to shepherd ivcurves, NOTE: slow implementation
122
- - `example_extract_logs.py` is analyzing all files in directory, saves logging-data and calculates cpu-load and data-rate
123
- - `example_generate_sawtooth.py` is using Writer to generate a 60s ramp with 1h repetition and uses Reader to dump metadata of that file
124
- - `example_plot_traces.py` demos some mpl-plots with various zoom levels
125
- - `example_repair_recordings.py` makes old recordings from shepherd 1.x fit for v2
126
- - `jogging_10m.iv`
127
- - 50 Hz measurement with Short-Circuit-Current and two other parameters
128
- - recorded with "IVonne"
129
-
130
- ### Functionality Update (WIP)
131
-
132
- - Core.`Reader`
133
- - `__repr__()`
134
- - `read_buffers`
135
- - `get_calibration_data`
136
- - `get_window_samples`
137
- - `get_mode`
138
- - `get_config`
139
- - `get_hostname`
140
- - `get_datatype`
141
- - `get_hrv_config`
142
- - `is_valid`
143
- - `energy()`
144
- - `check_timediffs()`
145
- - `get_metadata()`
146
- - `save_metadata()`
147
-
148
- - `Writer(Reader)` (core, data are the same)
149
- - `append_iv_data_raw`
150
- - `append_iv_data_si`
151
- - `store_config`
152
- - `store_hostname`
153
-
154
- - data.`Reader(CoreReader)`
155
- - `save_csv()`
156
- - `save_log()`
157
- - `downsample()`
158
- - `resample()`
159
- - `generate_plot_data()`
160
- - `assemble_plot()`
161
- - `plot_to_file()`
162
- - `multiplot_to_file()`
163
-
164
- ## CLI-Interface
165
-
166
- After installing the module the datalib offers some often needed functionality on the command line:
167
-
168
- **Validate Recordings**
169
-
170
- - takes a file or directory as an argument
171
-
172
- ```shell
173
- shepherd-data validate dir_or_file
174
-
175
- # examples:
176
- shepherd-data validate ./
177
- shepherd-data validate hrv_saw_1h.h5
178
- ```
179
-
180
- **Extract IV-Samples to csv**
181
-
182
- - takes a file or directory as an argument
183
- - can take down-sample-factor as an argument
184
-
185
- ```shell
186
- shepherd-data extract [-f ds-factor] [-s separator_symbol] dir_or_file
187
-
188
- # examples:
189
- shepherd-data extract ./
190
- shepherd-data extract -f 1000 -s ; hrv_saw_1h.h5
191
- ```
192
-
193
- **Extract meta-data and sys-logs**
194
-
195
- - takes a file or directory as an argument
196
-
197
- ```shell
198
- shepherd-data extract-meta dir_or_file
199
-
200
- # examples:
201
- shepherd-data extract-meta ./
202
- shepherd-data extract-meta hrv_saw_1h.h5
203
- ```
204
-
205
- **Plot IVSamples**
206
-
207
- - takes a file or directory as an argument
208
- - can take start- and end-time as an argument
209
- - can take image-width and -height as an argument
210
-
211
- ```shell
212
- shepherd-data plot [-s start_time] [-e end_time] [-w plot_width] [-h plot_height] [--multiplot] dir_or_file
213
-
214
- # examples:
215
- shepherd-data plot --multiplot ./
216
- shepherd-data plot -s10 -e20 hrv_saw_1h.h5
217
- ```
218
-
219
- **Downsample IVSamples (for later GUI-usage, TODO)**
220
-
221
- - generates a set of downsamplings (20 kHz to 0.1 Hz in x4 to x5 Steps)
222
- - takes a file or directory as an argument
223
- - can take down-sample-factor as an argument
224
-
225
- ```shell
226
- shepherd-data downsample [-f ds-factor] [-r sample-rate] dir_or_file
227
-
228
- # examples:
229
- shepherd-data downsample ./
230
- shepherd-data downsample -f 1000 hrv_saw_1h.h5
231
- shepherd-data downsample -r 100 hrv_saw_1h.h5
232
- ```
233
-
234
- ## Data-Layout and Design choices
235
-
236
- Details about the file-structure can be found in the [main-project](https://github.com/orgua/shepherd/blob/main/docs/user/data_format.rst).
237
-
238
- TODO:
239
- - update design of file
240
- - data dtype, mode, ...
241
-
242
- ### Modes and Datatypes
243
-
244
- - Mode `harvester` recorded a harvesting-source like solar with one of various algorithms
245
- - Datatype `ivsample` is directly usable by shepherd, input for virtual source / converter
246
- - Datatype `ivcurve` is directly usable by shepherd, input for a virtual harvester (output are ivsamples)
247
- - Datatype `isc_voc` is specially for solar-cells and needs to be (at least) transformed into ivcurves later
248
- - Mode `emulator` replayed a harvester-recording through a virtual converter and supplied a target while recording the power-consumption
249
- - Datatype `ivsample` is the only output of this mode
250
-
251
- ### Compression & Beaglebone
252
-
253
- - supported are uncompressed, lzf and gzip with level 1 (order of recommendation)
254
- - lzf seems better-suited due to lower load, or if space isn't a constraint: uncompressed (None as argument)
255
- - note: lzf seems to cause trouble with some third party hdf5-tools
256
- - compression is a heavy load for the beaglebone, but it got more performant with recent python-versions
257
- - size-experiment A: 24 h of ramping / sawtooth (data is repetitive with 1 minute ramp)
258
- - gzip-1: 49'646 MiB -> 588 KiB/s
259
- - lzf: 106'445 MiB -> 1262 KiB/s
260
- - uncompressed: 131'928 MiB -> 1564 KiB/s
261
- - cpu-load-experiments (input is 24h sawtooth, python 3.10 with most recent libs as of 2022-04)
262
- - warning: gpio-traffic and other logging-data can cause lots of load
263
-
264
- ```
265
- emu_120s_gz1_to_gz1.h5 -> emulator, cpu_util [%] = 65.59, data-rate = 352.0 KiB/s
266
- emu_120s_gz1_to_lzf.h5 -> emulator, cpu_util [%] = 57.37, data-rate = 686.0 KiB/s
267
- emu_120s_gz1_to_unc.h5 -> emulator, cpu_util [%] = 53.63, data-rate = 1564.0 KiB/s
268
- emu_120s_lzf_to_gz1.h5 -> emulator, cpu_util [%] = 63.18, data-rate = 352.0 KiB/s
269
- emu_120s_lzf_to_lzf.h5 -> emulator, cpu_util [%] = 58.60, data-rate = 686.0 KiB/s
270
- emu_120s_lzf_to_unc.h5 -> emulator, cpu_util [%] = 55.75, data-rate = 1564.0 KiB/s
271
- emu_120s_unc_to_gz1.h5 -> emulator, cpu_util [%] = 63.84, data-rate = 351.0 KiB/s
272
- emu_120s_unc_to_lzf.h5 -> emulator, cpu_util [%] = 57.28, data-rate = 686.0 KiB/s
273
- emu_120s_unc_to_unc.h5 -> emulator, cpu_util [%] = 51.69, data-rate = 1564.0 KiB/s
274
- ```
@@ -1,22 +0,0 @@
1
- shepherd_data/__init__.py,sha256=ysHj9J6TMflWnfscRnZ0Z4I622XD0mmyeVQ_NimE95E,242
2
- shepherd_data/cli.py,sha256=XnW-SGEUZpFS0IkrhC0wAB_t3eafB7MD2Z1-iJHJW6Y,15013
3
- shepherd_data/debug_resampler.py,sha256=0VNGuUqOeKii6fvkRJUpv-27uv9p1VFsvShU3tvxwYQ,961
4
- shepherd_data/ivonne.py,sha256=wkEciyxXuNMFtA6HEm9zR9CIrukwR0CDiuJMtt6_wEA,11601
5
- shepherd_data/mppt.py,sha256=Ifvx7NdsBoyBJGbc5PEEpHFja20SuouXmRpaCHoODWs,3583
6
- shepherd_data/reader.py,sha256=hAxvMUt86c3EJif_Q4oYb4zhhCr3nS6Rjk-fWmMrDSY,18035
7
- tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
- tests/conftest.py,sha256=BCtm2TbfVf85_AsGOUZLXd6TquicWRMgBq2-q5UA2UM,985
9
- tests/test_cli.py,sha256=PPbRwgOQbWjNcgR7mIXTnGdboCQ4QDC4Kh74kSaEFhc,181
10
- tests/test_cli_downsample.py,sha256=i8AKw-doGlw5xtg9YG3kFGShV9we6zv7F48D-t-l8us,1775
11
- tests/test_cli_extract.py,sha256=-8HdLL1sDF-ygAGpbUeA46fKu1mAP8aZN5xde9tMP3M,2504
12
- tests/test_cli_plot.py,sha256=jDDoDtvyChQyS4CX2cPq1jDLJNfzDjtJXOftBxL-meM,3242
13
- tests/test_cli_validate.py,sha256=DOfaRq6r05eu8x4SnZOPrPfbbMNu0PTus7lSV8Gkzwo,405
14
- tests/test_examples.py,sha256=e_uc5h_I4wwJnIWuh-PjpSSTICkYemI831iqhxIy_R0,576
15
- tests/test_ivonne.py,sha256=cIMtpx7tjRFxL9hpr5OPPgroGLzRZxGC4x9Y7vWgze8,1367
16
- tests/test_reader.py,sha256=Xt__Khgldv8klZJ3OQcMZevC6_3gPH2_TpOorq2ytNI,90
17
- shepherd_data-2023.12.1.dist-info/METADATA,sha256=GnWaKvPEEr5JbBXZIeUiCCeSEIVA845WhBOb1dwLLN4,9999
18
- shepherd_data-2023.12.1.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
19
- shepherd_data-2023.12.1.dist-info/entry_points.txt,sha256=6PBfY36A1xNOdzLiz-Qoukya_UzFZAwOapwmRNnPeZ8,56
20
- shepherd_data-2023.12.1.dist-info/top_level.txt,sha256=Bk61YO7iYS43TSWtwdSKb2moGBvJqJIYfGaZsQsv09M,20
21
- shepherd_data-2023.12.1.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
22
- shepherd_data-2023.12.1.dist-info/RECORD,,
tests/__init__.py DELETED
File without changes
tests/conftest.py DELETED
@@ -1,33 +0,0 @@
1
- from pathlib import Path
2
-
3
- import numpy as np
4
- import pytest
5
-
6
- from shepherd_core import Compression
7
- from shepherd_data import Writer
8
-
9
-
10
- def generate_h5_file(file_path: Path, file_name: str = "harvest_example.h5") -> Path:
11
- store_path = file_path / file_name
12
-
13
- with Writer(store_path, compression=Compression.gzip1) as file:
14
- file.store_hostname("artificial")
15
-
16
- duration_s = 2
17
- repetitions = 5
18
- timestamp_vector = np.arange(0.0, duration_s, file.sample_interval_ns / 1e9)
19
-
20
- # values in SI units
21
- voltages = np.linspace(3.60, 1.90, int(file.samplerate_sps * duration_s))
22
- currents = np.linspace(100e-6, 2000e-6, int(file.samplerate_sps * duration_s))
23
-
24
- for idx in range(repetitions):
25
- timestamps = idx * duration_s + timestamp_vector
26
- file.append_iv_data_si(timestamps, voltages, currents)
27
-
28
- return store_path
29
-
30
-
31
- @pytest.fixture
32
- def data_h5(tmp_path: Path) -> Path:
33
- return generate_h5_file(tmp_path)
tests/test_cli.py DELETED
@@ -1,8 +0,0 @@
1
- from click.testing import CliRunner
2
-
3
- from shepherd_data.cli import cli
4
-
5
-
6
- def test_cli_invoke_help() -> None:
7
- res = CliRunner().invoke(cli, ["-h"])
8
- assert res.exit_code == 0
@@ -1,47 +0,0 @@
1
- from pathlib import Path
2
-
3
- from click.testing import CliRunner
4
-
5
- from shepherd_data.cli import cli
6
-
7
-
8
- def test_cli_downsample_file_full(data_h5: Path) -> None:
9
- res = CliRunner().invoke(cli, ["--verbose", "downsample", "--ds-factor", "10", str(data_h5)])
10
- assert res.exit_code == 0
11
- assert data_h5.with_suffix(".downsampled_x10.h5").exists()
12
-
13
-
14
- def test_cli_downsample_file_short(data_h5: Path) -> None:
15
- res = CliRunner().invoke(cli, ["-v", "downsample", "-f", "20", str(data_h5)])
16
- assert res.exit_code == 0
17
- assert data_h5.with_suffix(".downsampled_x20.h5").exists()
18
-
19
-
20
- def test_cli_downsample_file_min(data_h5: Path) -> None:
21
- res = CliRunner().invoke(cli, ["--verbose", "downsample", str(data_h5)])
22
- assert res.exit_code == 0
23
- assert data_h5.with_suffix(".downsampled_x5.h5").exists()
24
- assert data_h5.with_suffix(".downsampled_x25.h5").exists()
25
- assert data_h5.with_suffix(".downsampled_x100.h5").exists()
26
-
27
-
28
- def test_cli_downsample_dir_full(data_h5: Path) -> None:
29
- print(data_h5.parent)
30
- print(data_h5.parent.is_dir())
31
- res = CliRunner().invoke(
32
- cli, ["--verbose", "downsample", "--ds-factor", "40", str(data_h5.parent)]
33
- )
34
- assert res.exit_code == 0
35
- assert data_h5.with_suffix(".downsampled_x40.h5").exists()
36
-
37
-
38
- def test_cli_downsample_rate_file_full(data_h5: Path) -> None:
39
- res = CliRunner().invoke(cli, ["--verbose", "downsample", "--sample-rate", "100", str(data_h5)])
40
- assert res.exit_code == 0
41
- assert data_h5.with_suffix(".downsampled_x1000.h5").exists()
42
-
43
-
44
- def test_cli_downsample_rate_file_short(data_h5: Path) -> None:
45
- res = CliRunner().invoke(cli, ["-v", "downsample", "-r", "200", str(data_h5)])
46
- assert res.exit_code == 0
47
- assert data_h5.with_suffix(".downsampled_x500.h5").exists()
tests/test_cli_extract.py DELETED
@@ -1,80 +0,0 @@
1
- from pathlib import Path
2
-
3
- from click.testing import CliRunner
4
-
5
- from shepherd_data.cli import cli
6
-
7
-
8
- def test_cli_extract_file_full(data_h5: Path) -> None:
9
- res = CliRunner().invoke(
10
- cli,
11
- [
12
- "--verbose",
13
- "extract",
14
- "--ds-factor",
15
- "100",
16
- "--separator",
17
- ",",
18
- str(data_h5),
19
- ],
20
- )
21
- assert res.exit_code == 0
22
- assert data_h5.with_suffix(".downsampled_x100.h5").exists()
23
- assert data_h5.with_suffix(".downsampled_x100.data.csv").exists()
24
-
25
-
26
- def test_cli_extract_file_short(data_h5: Path) -> None:
27
- res = CliRunner().invoke(cli, ["-v", "extract", "-f", "200", "-s", ";", str(data_h5)])
28
- assert res.exit_code == 0
29
- assert data_h5.with_suffix(".downsampled_x200.h5").exists()
30
- assert data_h5.with_suffix(".downsampled_x200.data.csv").exists()
31
-
32
-
33
- def test_cli_extract_file_min(data_h5: Path) -> None:
34
- res = CliRunner().invoke(cli, ["-v", "extract", str(data_h5)])
35
- assert res.exit_code == 0
36
- assert data_h5.with_suffix(".downsampled_x1000.h5").exists()
37
- assert data_h5.with_suffix(".downsampled_x1000.data.csv").exists()
38
-
39
-
40
- def test_cli_extract_dir_full(data_h5: Path) -> None:
41
- print(data_h5.parent)
42
- print(data_h5.parent.is_dir())
43
- res = CliRunner().invoke(
44
- cli,
45
- [
46
- "--verbose",
47
- "extract",
48
- "--ds-factor",
49
- "2000",
50
- "--separator",
51
- ";",
52
- str(data_h5.parent),
53
- ],
54
- )
55
- assert res.exit_code == 0
56
- assert data_h5.with_suffix(".downsampled_x2000.h5").exists()
57
- assert data_h5.with_suffix(".downsampled_x2000.data.csv").exists()
58
-
59
-
60
- def test_cli_extract_meta_file_full(data_h5: Path) -> None:
61
- res = CliRunner().invoke(cli, ["--verbose", "extract-meta", "--separator", ";", str(data_h5)])
62
- assert res.exit_code == 0
63
- # TODO: nothing to grab here, add in base-file, same for tests below
64
-
65
-
66
- def test_cli_extract_meta_file_short(data_h5: Path) -> None:
67
- res = CliRunner().invoke(cli, ["-v", "extract-meta", "-s", "-", str(data_h5)])
68
- assert res.exit_code == 0
69
-
70
-
71
- def test_cli_extract_meta_file_min(data_h5: Path) -> None:
72
- res = CliRunner().invoke(cli, ["-v", "extract-meta", "-s", "-", str(data_h5)])
73
- assert res.exit_code == 0
74
-
75
-
76
- def test_cli_extract_meta_dir_full(data_h5: Path) -> None:
77
- res = CliRunner().invoke(
78
- cli, ["--verbose", "extract-meta", "--separator", ";", str(data_h5.parent)]
79
- )
80
- assert res.exit_code == 0
tests/test_cli_plot.py DELETED
@@ -1,120 +0,0 @@
1
- from pathlib import Path
2
-
3
- from click.testing import CliRunner
4
-
5
- from shepherd_data.cli import cli
6
-
7
- from .conftest import generate_h5_file
8
-
9
-
10
- def test_cli_plot_file_full(data_h5: Path) -> None:
11
- res = CliRunner().invoke(
12
- cli,
13
- [
14
- "--verbose",
15
- "plot",
16
- "--start",
17
- "0",
18
- "--end",
19
- "8",
20
- "--width",
21
- "50",
22
- "--height",
23
- "10",
24
- str(data_h5),
25
- ],
26
- )
27
- assert res.exit_code == 0
28
- assert data_h5.with_suffix(".plot_0s000_to_8s000.png").exists()
29
-
30
-
31
- def test_cli_plot_file_short(data_h5: Path) -> None:
32
- res = CliRunner().invoke(
33
- cli,
34
- [
35
- "-v",
36
- "plot",
37
- "-s",
38
- "2.345",
39
- "-e",
40
- "8.765",
41
- "-w",
42
- "30",
43
- "-h",
44
- "20",
45
- str(data_h5),
46
- ],
47
- )
48
- assert res.exit_code == 0
49
- assert data_h5.with_suffix(".plot_2s345_to_8s765.png").exists()
50
-
51
-
52
- def test_cli_plot_file_min(data_h5: Path) -> None:
53
- res = CliRunner().invoke(cli, ["-v", "plot", str(data_h5)])
54
- assert res.exit_code == 0
55
- assert data_h5.with_suffix(".plot_0s000_to_10s000.png").exists() # full duration of file
56
-
57
-
58
- def test_cli_plot_dir_min(tmp_path: Path) -> None:
59
- file1_path = generate_h5_file(tmp_path, "hrv_file1.h5")
60
- file2_path = generate_h5_file(tmp_path, "hrv_file2.h5")
61
- res = CliRunner().invoke(cli, ["-v", "plot", str(tmp_path.resolve())])
62
- assert res.exit_code == 0
63
- assert file1_path.with_suffix(".plot_0s000_to_10s000.png").exists() # full duration of file
64
- assert file2_path.with_suffix(".plot_0s000_to_10s000.png").exists() # full duration of file
65
-
66
-
67
- def test_cli_multiplot_dir_full(tmp_path: Path) -> None:
68
- generate_h5_file(tmp_path, "hrv_file1.h5")
69
- generate_h5_file(tmp_path, "hrv_file2.h5")
70
- res = CliRunner().invoke(
71
- cli,
72
- [
73
- "--verbose",
74
- "plot",
75
- "--start",
76
- "1",
77
- "--end",
78
- "7",
79
- "--width",
80
- "40",
81
- "--height",
82
- "10",
83
- "--multiplot",
84
- str(tmp_path),
85
- ],
86
- )
87
- assert res.exit_code == 0
88
- assert tmp_path.with_suffix(".multiplot_1s000_to_7s000.png").exists()
89
-
90
-
91
- def test_cli_multiplot_dir_short(tmp_path: Path) -> None:
92
- generate_h5_file(tmp_path, "hrv_file1.h5")
93
- generate_h5_file(tmp_path, "hrv_file2.h5")
94
- res = CliRunner().invoke(
95
- cli,
96
- [
97
- "-v",
98
- "plot",
99
- "-s",
100
- "2.345",
101
- "-e",
102
- "8.765",
103
- "-w",
104
- "30",
105
- "-h",
106
- "20",
107
- "-m",
108
- str(tmp_path),
109
- ],
110
- )
111
- assert res.exit_code == 0
112
- assert tmp_path.with_suffix(".multiplot_2s345_to_8s765.png").exists()
113
-
114
-
115
- def test_cli_multiplot_dir_min(tmp_path: Path) -> None:
116
- generate_h5_file(tmp_path, "hrv_file1.h5")
117
- generate_h5_file(tmp_path, "hrv_file2.h5")
118
- res = CliRunner().invoke(cli, ["-v", "plot", "-m", str(tmp_path)])
119
- assert res.exit_code == 0
120
- assert tmp_path.with_suffix(".multiplot_0s000_to_10s000.png").exists() # full duration of file
@@ -1,15 +0,0 @@
1
- from pathlib import Path
2
-
3
- from click.testing import CliRunner
4
-
5
- from shepherd_data.cli import cli
6
-
7
-
8
- def test_cli_validate_file(data_h5: Path) -> None:
9
- res = CliRunner().invoke(cli, ["-v", "validate", str(data_h5)])
10
- assert res.exit_code == 0
11
-
12
-
13
- def test_cli_validate_dir(data_h5: Path) -> None:
14
- res = CliRunner().invoke(cli, ["-v", "validate", str(data_h5.parent)])
15
- assert res.exit_code == 0
tests/test_examples.py DELETED
@@ -1,26 +0,0 @@
1
- import os
2
- import subprocess
3
- from pathlib import Path
4
-
5
- import pytest
6
-
7
-
8
- @pytest.fixture
9
- def example_path() -> Path:
10
- path = Path(__file__).resolve().parent.parent / "examples"
11
- os.chdir(path)
12
- return path
13
-
14
-
15
- examples = [
16
- "example_convert_ivonne.py",
17
- "example_extract_logs.py",
18
- "example_generate_sawtooth.py",
19
- "example_plot_traces.py",
20
- "example_repair_recordings.py",
21
- ]
22
-
23
-
24
- @pytest.mark.parametrize("file", examples)
25
- def test_example_scripts(example_path: Path, file: str) -> None:
26
- subprocess.check_call(f"python {example_path / file}", shell=True)
tests/test_ivonne.py DELETED
@@ -1,42 +0,0 @@
1
- from pathlib import Path
2
-
3
- import pytest
4
-
5
- from shepherd_data import Reader
6
- from shepherd_data import ivonne
7
- from shepherd_data import mppt
8
-
9
-
10
- @pytest.fixture
11
- def example_path() -> Path:
12
- here = Path(__file__).resolve().parent
13
- return here.parent / "examples"
14
-
15
-
16
- def test_convert_ivonne(tmp_path: Path, example_path: Path) -> None:
17
- input_file = "jogging_10m"
18
- inp_path = example_path / (input_file + ".iv")
19
- isc_path = tmp_path / (input_file + "_isc.h5")
20
- ivc_path = tmp_path / (input_file + "_ivc.h5")
21
- voc_path = tmp_path / (input_file + "_voc.h5")
22
- opt_path = tmp_path / (input_file + "_opt.h5")
23
-
24
- with ivonne.Reader(inp_path) as ifr:
25
- ifr.upsample_2_isc_voc(isc_path, duration_s=20)
26
- ifr.convert_2_ivcurves(ivc_path, duration_s=20)
27
-
28
- tr_voc = mppt.OpenCircuitTracker(ratio=0.76)
29
- tr_opt = mppt.OptimalTracker()
30
-
31
- ifr.convert_2_ivsamples(voc_path, tracker=tr_voc, duration_s=20)
32
- ifr.convert_2_ivsamples(opt_path, tracker=tr_opt, duration_s=20)
33
-
34
- energies = {}
35
- for file_path in [isc_path, ivc_path, voc_path, opt_path]:
36
- with Reader(file_path) as sfr:
37
- assert sfr.runtime_s == 20
38
- energies[file_path.stem[-3:]] = sfr.energy()
39
-
40
- assert energies["isc"] > energies["opt"]
41
- assert energies["opt"] > energies["voc"]
42
- assert energies["voc"] > energies["ivc"]
tests/test_reader.py DELETED
@@ -1,3 +0,0 @@
1
- # TODO:
2
- # - confirm energy stays same after resampling
3
- # - length should also stay same