shepherd-data 2023.11.1__tar.gz → 2024.4.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (32) hide show
  1. shepherd_data-2024.4.1/PKG-INFO +88 -0
  2. shepherd_data-2024.4.1/README.md +38 -0
  3. shepherd_data-2024.4.1/pyproject.toml +101 -0
  4. shepherd_data-2024.4.1/setup.cfg +4 -0
  5. {shepherd_data-2023.11.1 → shepherd_data-2024.4.1}/shepherd_data/__init__.py +2 -1
  6. {shepherd_data-2023.11.1 → shepherd_data-2024.4.1}/shepherd_data/cli.py +20 -43
  7. {shepherd_data-2023.11.1 → shepherd_data-2024.4.1}/shepherd_data/debug_resampler.py +1 -3
  8. {shepherd_data-2023.11.1 → shepherd_data-2024.4.1}/shepherd_data/ivonne.py +20 -41
  9. {shepherd_data-2023.11.1 → shepherd_data-2024.4.1}/shepherd_data/mppt.py +24 -22
  10. {shepherd_data-2023.11.1 → shepherd_data-2024.4.1}/shepherd_data/reader.py +45 -67
  11. shepherd_data-2024.4.1/shepherd_data.egg-info/PKG-INFO +88 -0
  12. {shepherd_data-2023.11.1 → shepherd_data-2024.4.1}/shepherd_data.egg-info/SOURCES.txt +6 -17
  13. {shepherd_data-2023.11.1 → shepherd_data-2024.4.1}/shepherd_data.egg-info/requires.txt +6 -4
  14. {shepherd_data-2023.11.1 → shepherd_data-2024.4.1}/shepherd_data.egg-info/top_level.txt +0 -1
  15. {shepherd_data-2023.11.1 → shepherd_data-2024.4.1}/tests/test_cli_downsample.py +2 -6
  16. {shepherd_data-2023.11.1 → shepherd_data-2024.4.1}/tests/test_cli_extract.py +2 -6
  17. {shepherd_data-2023.11.1 → shepherd_data-2024.4.1}/tests/test_cli_plot.py +4 -12
  18. {shepherd_data-2023.11.1 → shepherd_data-2024.4.1}/tests/test_examples.py +3 -2
  19. {shepherd_data-2023.11.1 → shepherd_data-2024.4.1}/tests/test_ivonne.py +1 -1
  20. shepherd_data-2023.11.1/PKG-INFO +0 -274
  21. shepherd_data-2023.11.1/README.md +0 -224
  22. shepherd_data-2023.11.1/pyproject.toml +0 -7
  23. shepherd_data-2023.11.1/setup.cfg +0 -82
  24. shepherd_data-2023.11.1/shepherd_data.egg-info/PKG-INFO +0 -274
  25. shepherd_data-2023.11.1/tests/__init__.py +0 -0
  26. shepherd_data-2023.11.1/tests/conftest.py +0 -33
  27. {shepherd_data-2023.11.1 → shepherd_data-2024.4.1}/shepherd_data.egg-info/dependency_links.txt +0 -0
  28. {shepherd_data-2023.11.1 → shepherd_data-2024.4.1}/shepherd_data.egg-info/entry_points.txt +0 -0
  29. {shepherd_data-2023.11.1 → shepherd_data-2024.4.1}/shepherd_data.egg-info/zip-safe +0 -0
  30. {shepherd_data-2023.11.1 → shepherd_data-2024.4.1}/tests/test_cli.py +0 -0
  31. {shepherd_data-2023.11.1 → shepherd_data-2024.4.1}/tests/test_cli_validate.py +0 -0
  32. {shepherd_data-2023.11.1 → shepherd_data-2024.4.1}/tests/test_reader.py +0 -0
@@ -0,0 +1,88 @@
1
+ Metadata-Version: 2.1
2
+ Name: shepherd_data
3
+ Version: 2024.4.1
4
+ Summary: Programming- and CLI-Interface for the h5-dataformat of the Shepherd-Testbed
5
+ Author-email: Ingmar Splitt <ingmar.splitt@tu-dresden.de>
6
+ Maintainer-email: Ingmar Splitt <ingmar.splitt@tu-dresden.de>
7
+ Project-URL: Documentation, https://github.com/orgua/shepherd-datalib/blob/main/README.md
8
+ Project-URL: Issues, https://pypi.org/project/shepherd-data/issues
9
+ Project-URL: Source, https://pypi.org/project/shepherd-data/
10
+ Keywords: testbed,beaglebone,pru,batteryless,energyharvesting,solar
11
+ Platform: unix
12
+ Platform: linux
13
+ Platform: osx
14
+ Platform: cygwin
15
+ Platform: win32
16
+ Platform: win64
17
+ Classifier: Development Status :: 5 - Production/Stable
18
+ Classifier: Intended Audience :: Developers
19
+ Classifier: Intended Audience :: Information Technology
20
+ Classifier: Intended Audience :: Science/Research
21
+ Classifier: Programming Language :: Python :: 3.8
22
+ Classifier: Programming Language :: Python :: 3.9
23
+ Classifier: Programming Language :: Python :: 3.10
24
+ Classifier: Programming Language :: Python :: 3.11
25
+ Classifier: Programming Language :: Python :: 3.12
26
+ Classifier: License :: OSI Approved :: MIT License
27
+ Classifier: Operating System :: OS Independent
28
+ Classifier: Natural Language :: English
29
+ Requires-Python: >=3.8
30
+ Description-Content-Type: text/markdown
31
+ Requires-Dist: click
32
+ Requires-Dist: h5py
33
+ Requires-Dist: matplotlib
34
+ Requires-Dist: numpy
35
+ Requires-Dist: pandas>=2.0.0
36
+ Requires-Dist: pyYAML
37
+ Requires-Dist: scipy
38
+ Requires-Dist: shepherd-core[inventory]>=2024.04.1
39
+ Requires-Dist: tqdm
40
+ Provides-Extra: elf
41
+ Requires-Dist: shepherd-core[elf]; extra == "elf"
42
+ Provides-Extra: dev
43
+ Requires-Dist: shepherd-core[dev]; extra == "dev"
44
+ Requires-Dist: pandas-stubs; extra == "dev"
45
+ Provides-Extra: test
46
+ Requires-Dist: shepherd-core[test]; extra == "test"
47
+ Requires-Dist: pytest; extra == "test"
48
+ Requires-Dist: pytest-click; extra == "test"
49
+ Requires-Dist: coverage; extra == "test"
50
+
51
+ # Shepherd-Data-Tool
52
+
53
+ [![PyPiVersion](https://img.shields.io/pypi/v/shepherd_data.svg)](https://pypi.org/project/shepherd_data)
54
+ [![image](https://img.shields.io/pypi/pyversions/shepherd_data.svg)](https://pypi.python.org/pypi/shepherd-data)
55
+ [![Pytest](https://github.com/orgua/shepherd-datalib/actions/workflows/py_unittest.yml/badge.svg)](https://github.com/orgua/shepherd-datalib/actions/workflows/py_unittest.yml)
56
+ [![CodeStyle](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json)](https://github.com/astral-sh/ruff)
57
+
58
+ **Main Documentation**: <https://orgua.github.io/shepherd>
59
+
60
+ **Source Code**: <https://github.com/orgua/shepherd-datalib>
61
+
62
+ **Main Project**: <https://github.com/orgua/shepherd>
63
+
64
+ ---
65
+
66
+ `shepherd-data` eases the handling of hdf5-recordings used by the [shepherd](https://github.com/orgua/shepherd)-testbed. Users can read, validate and create files and also extract, down-sample and plot information.
67
+
68
+ ## Installation
69
+
70
+ ### PIP - Online
71
+
72
+ ```shell
73
+ pip3 install shepherd-data -U
74
+ ```
75
+
76
+ For bleeding-edge-features or dev-work it is possible to install directly from GitHub-Sources (here `dev`-branch):
77
+
78
+ ```Shell
79
+ pip install git+https://github.com/orgua/shepherd-datalib.git@dev#subdirectory=shepherd_data -U
80
+ ```
81
+
82
+ ## More
83
+
84
+ Please consult the [official documentation](https://orgua.github.io/shepherd) for more, it covers:
85
+
86
+ - general context
87
+ - command-line interface
88
+ - programming interface
@@ -0,0 +1,38 @@
1
+ # Shepherd-Data-Tool
2
+
3
+ [![PyPiVersion](https://img.shields.io/pypi/v/shepherd_data.svg)](https://pypi.org/project/shepherd_data)
4
+ [![image](https://img.shields.io/pypi/pyversions/shepherd_data.svg)](https://pypi.python.org/pypi/shepherd-data)
5
+ [![Pytest](https://github.com/orgua/shepherd-datalib/actions/workflows/py_unittest.yml/badge.svg)](https://github.com/orgua/shepherd-datalib/actions/workflows/py_unittest.yml)
6
+ [![CodeStyle](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json)](https://github.com/astral-sh/ruff)
7
+
8
+ **Main Documentation**: <https://orgua.github.io/shepherd>
9
+
10
+ **Source Code**: <https://github.com/orgua/shepherd-datalib>
11
+
12
+ **Main Project**: <https://github.com/orgua/shepherd>
13
+
14
+ ---
15
+
16
+ `shepherd-data` eases the handling of hdf5-recordings used by the [shepherd](https://github.com/orgua/shepherd)-testbed. Users can read, validate and create files and also extract, down-sample and plot information.
17
+
18
+ ## Installation
19
+
20
+ ### PIP - Online
21
+
22
+ ```shell
23
+ pip3 install shepherd-data -U
24
+ ```
25
+
26
+ For bleeding-edge-features or dev-work it is possible to install directly from GitHub-Sources (here `dev`-branch):
27
+
28
+ ```Shell
29
+ pip install git+https://github.com/orgua/shepherd-datalib.git@dev#subdirectory=shepherd_data -U
30
+ ```
31
+
32
+ ## More
33
+
34
+ Please consult the [official documentation](https://orgua.github.io/shepherd) for more, it covers:
35
+
36
+ - general context
37
+ - command-line interface
38
+ - programming interface
@@ -0,0 +1,101 @@
1
+ [project]
2
+ name = "shepherd_data"
3
+ description = "Programming- and CLI-Interface for the h5-dataformat of the Shepherd-Testbed"
4
+ keywords = ["testbed", "beaglebone", "pru", "batteryless", "energyharvesting", "solar"]
5
+
6
+ authors = [
7
+ {name = "Ingmar Splitt", email = "ingmar.splitt@tu-dresden.de"},
8
+ ]
9
+ maintainers = [
10
+ {name = "Ingmar Splitt", email = "ingmar.splitt@tu-dresden.de"},
11
+ ]
12
+
13
+ readme = {file = "README.md", content-type = "text/markdown"}
14
+ license = {file = "LICENSE"}
15
+ dynamic = ["version"]
16
+
17
+ classifiers = [
18
+ "Development Status :: 5 - Production/Stable",
19
+ "Intended Audience :: Developers",
20
+ "Intended Audience :: Information Technology",
21
+ "Intended Audience :: Science/Research",
22
+ "Programming Language :: Python :: 3.8",
23
+ "Programming Language :: Python :: 3.9",
24
+ "Programming Language :: Python :: 3.10",
25
+ "Programming Language :: Python :: 3.11",
26
+ "Programming Language :: Python :: 3.12",
27
+ "License :: OSI Approved :: MIT License",
28
+ "Operating System :: OS Independent",
29
+ "Natural Language :: English",
30
+ ]
31
+
32
+ requires-python = ">=3.8"
33
+ dependencies = [
34
+ "click",
35
+ "h5py",
36
+ "matplotlib", # full-version
37
+ "numpy",
38
+ "pandas>=2.0.0", # full-version, v2 is OK
39
+ "pyYAML",
40
+ "scipy", # full-version
41
+ "shepherd-core[inventory]>=2024.04.1",
42
+ "tqdm", # full-version
43
+ ]
44
+
45
+ [project.optional-dependencies]
46
+ elf = [
47
+ "shepherd-core[elf]"
48
+ ]
49
+
50
+ dev = [
51
+ "shepherd-core[dev]",
52
+ "pandas-stubs", # for pyright with pandas
53
+ ]
54
+
55
+ test = [
56
+ "shepherd-core[test]",
57
+ "pytest",
58
+ "pytest-click",
59
+ "coverage",
60
+ ]
61
+
62
+ [project.urls]
63
+ Documentation = "https://github.com/orgua/shepherd-datalib/blob/main/README.md"
64
+ Issues = "https://pypi.org/project/shepherd-data/issues"
65
+ Source = "https://pypi.org/project/shepherd-data/"
66
+
67
+ [project.scripts]
68
+ shepherd-data = "shepherd_data.cli:cli"
69
+
70
+ [build-system]
71
+ requires = ["setuptools"]
72
+ build-backend = "setuptools.build_meta"
73
+
74
+ [tool.setuptools]
75
+ platforms = ["unix", "linux", "osx", "cygwin", "win32", "win64"]
76
+ zip-safe = true
77
+ #include-package-data = true
78
+
79
+ [tool.setuptools.package-dir]
80
+ shepherd_data = "shepherd_data"
81
+
82
+ [tool.setuptools.package-data]
83
+ shepherd_data = [
84
+ "README.md",
85
+ "src/examples/*.py",
86
+ "src/examples/*.iv",
87
+ ]
88
+
89
+ [tool.setuptools.dynamic]
90
+ version = {attr = "shepherd_data.__version__"}
91
+
92
+ [tool.pytest.ini_options]
93
+ addopts = "-vvv --stepwise" # opts: verbose result for each tests
94
+ # TODO: add something like "--cov --cov-report html --cov-report term-missing --cov-fail-under 95"
95
+
96
+ [tool.coverage.run]
97
+ source = ["shepherd_data"]
98
+
99
+ [tool.mypy]
100
+ python_version = 3.8
101
+ ignore_missing_imports = true
@@ -0,0 +1,4 @@
1
+ [egg_info]
2
+ tag_build =
3
+ tag_date = 0
4
+
@@ -4,11 +4,12 @@ Provides classes for storing and retrieving sampled IV data to/from
4
4
  HDF5 files.
5
5
 
6
6
  """
7
+
7
8
  from shepherd_core import Writer
8
9
 
9
10
  from .reader import Reader
10
11
 
11
- __version__ = "2023.11.1"
12
+ __version__ = "2024.04.1"
12
13
 
13
14
  __all__ = [
14
15
  "Reader",
@@ -1,5 +1,5 @@
1
- """Command definitions for CLI
2
- """
1
+ """Command definitions for CLI"""
2
+
3
3
  import logging
4
4
  import os
5
5
  import sys
@@ -120,9 +120,7 @@ def extract(in_data: Path, ds_factor: float, separator: str) -> None:
120
120
  # will create a downsampled h5-file (if not existing) and then saving to csv
121
121
  ds_file = file.with_suffix(f".downsampled_x{round(ds_factor)}.h5")
122
122
  if not ds_file.exists():
123
- logger.info(
124
- "Downsampling '%s' by factor x%f ...", file.name, ds_factor
125
- )
123
+ logger.info("Downsampling '%s' by factor x%f ...", file.name, ds_factor)
126
124
  with Writer(
127
125
  ds_file,
128
126
  mode=shpr.get_mode(),
@@ -140,12 +138,8 @@ def extract(in_data: Path, ds_factor: float, separator: str) -> None:
140
138
  ds_factor=ds_factor,
141
139
  is_time=True,
142
140
  )
143
- shpr.downsample(
144
- shpr.ds_voltage, shpw.ds_voltage, ds_factor=ds_factor
145
- )
146
- shpr.downsample(
147
- shpr.ds_current, shpw.ds_current, ds_factor=ds_factor
148
- )
141
+ shpr.downsample(shpr.ds_voltage, shpw.ds_voltage, ds_factor=ds_factor)
142
+ shpr.downsample(shpr.ds_current, shpw.ds_current, ds_factor=ds_factor)
149
143
 
150
144
  with Reader(ds_file, verbose=verbose_level > 2) as shpd:
151
145
  shpd.save_csv(shpd["data"], separator)
@@ -184,6 +178,8 @@ def extract_meta(in_data: Path, separator: str) -> None:
184
178
  for element in logs + logs_depr:
185
179
  if element in shpr.h5file:
186
180
  shpr.save_log(shpr[element])
181
+ # TODO: allow omitting timestamp,
182
+ # also test if segmented uart is correctly written
187
183
  shpr.warn_logs(element, show=True)
188
184
  except TypeError as _xpc:
189
185
  logger.error("ERROR: will skip file, caught exception: %s", _xpc)
@@ -212,21 +208,16 @@ def extract_uart(in_data: Path) -> None:
212
208
  with log_path.open("w") as log_file:
213
209
  for line in lines:
214
210
  with suppress(TypeError):
215
- timestamp = datetime.fromtimestamp(
216
- float(line[0]), tz=local_tz()
217
- )
218
- log_file.write(
219
- timestamp.strftime("%Y-%m-%d %H:%M:%S.%f") + ":"
220
- )
211
+ timestamp = datetime.fromtimestamp(float(line[0]), tz=local_tz())
212
+ log_file.write(timestamp.strftime("%Y-%m-%d %H:%M:%S.%f") + ":")
213
+ # TODO: allow to skip Timestamp and export raw text
221
214
  log_file.write(f"\t{str.encode(line[1])}")
222
215
  log_file.write("\n")
223
216
  except TypeError as _xpc:
224
217
  logger.error("ERROR: will skip file, caught exception: %s", _xpc)
225
218
 
226
219
 
227
- @cli.command(
228
- short_help="Extracts gpio-trace from file or directory containing shepherd-recordings"
229
- )
220
+ @cli.command(short_help="Extracts gpio-trace from file or directory containing shepherd-recordings")
230
221
  @click.argument("in_data", type=click.Path(exists=True, resolve_path=True))
231
222
  @click.option(
232
223
  "--separator",
@@ -269,9 +260,7 @@ def extract_gpio(in_data: Path, separator: str) -> None:
269
260
  type=click.INT,
270
261
  help="Alternative Input to determine a downsample-factor (Choose One)",
271
262
  )
272
- def downsample(
273
- in_data: Path, ds_factor: Optional[float], sample_rate: Optional[int]
274
- ) -> None:
263
+ def downsample(in_data: Path, ds_factor: Optional[float], sample_rate: Optional[int]) -> None:
275
264
  """Creates an array of downsampling-files from file
276
265
  or directory containing shepherd-recordings
277
266
  """
@@ -289,7 +278,7 @@ def downsample(
289
278
  try:
290
279
  with Reader(file, verbose=verbose_level > 2) as shpr:
291
280
  for _factor in ds_list:
292
- if shpr.ds_time.shape[0] / _factor < 1000:
281
+ if shpr.ds_voltage.shape[0] / _factor < 1000:
293
282
  logger.warning(
294
283
  "will skip downsampling for %s because "
295
284
  "resulting sample-size is too small",
@@ -299,9 +288,7 @@ def downsample(
299
288
  ds_file = file.with_suffix(f".downsampled_x{round(_factor)}.h5")
300
289
  if ds_file.exists():
301
290
  continue
302
- logger.info(
303
- "Downsampling '%s' by factor x%f ...", file.name, _factor
304
- )
291
+ logger.info("Downsampling '%s' by factor x%f ...", file.name, _factor)
305
292
  with Writer(
306
293
  ds_file,
307
294
  mode=shpr.get_mode(),
@@ -313,22 +300,14 @@ def downsample(
313
300
  shpw["ds_factor"] = _factor
314
301
  shpw.store_hostname(shpr.get_hostname())
315
302
  shpw.store_config(shpr.get_config())
316
- shpr.downsample(
317
- shpr.ds_time, shpw.ds_time, ds_factor=_factor, is_time=True
318
- )
319
- shpr.downsample(
320
- shpr.ds_voltage, shpw.ds_voltage, ds_factor=_factor
321
- )
322
- shpr.downsample(
323
- shpr.ds_current, shpw.ds_current, ds_factor=_factor
324
- )
303
+ shpr.downsample(shpr.ds_time, shpw.ds_time, ds_factor=_factor, is_time=True)
304
+ shpr.downsample(shpr.ds_voltage, shpw.ds_voltage, ds_factor=_factor)
305
+ shpr.downsample(shpr.ds_current, shpw.ds_current, ds_factor=_factor)
325
306
  except TypeError as _xpc:
326
307
  logger.error("ERROR: will skip file, caught exception: %s", _xpc)
327
308
 
328
309
 
329
- @cli.command(
330
- short_help="Plots IV-trace from file or directory containing shepherd-recordings"
331
- )
310
+ @cli.command(short_help="Plots IV-trace from file or directory containing shepherd-recordings")
332
311
  @click.argument("in_data", type=click.Path(exists=True, resolve_path=True))
333
312
  @click.option(
334
313
  "--start",
@@ -382,13 +361,11 @@ def plot(
382
361
  try:
383
362
  with Reader(file, verbose=verbose_level > 2) as shpr:
384
363
  if multiplot:
385
- data.append(
386
- shpr.generate_plot_data(start, end, relative_timestamp=True)
387
- )
364
+ data.append(shpr.generate_plot_data(start, end, relative_timestamp=True))
388
365
  else:
389
366
  shpr.plot_to_file(start, end, width, height)
390
367
  except TypeError as _xpc:
391
- logger.error("ERROR: will skip file, caught exception: %s", _xpc)
368
+ logger.exception("ERROR: will skip file, caught exception: %s", _xpc)
392
369
  if multiplot:
393
370
  logger.info("Got %d datasets to plot", len(data))
394
371
  mpl_path = Reader.multiplot_to_file(data, in_data, width, height)
@@ -24,9 +24,7 @@ with shpd.Reader(file) as shpr:
24
24
  window_samples=shpr.get_window_samples(),
25
25
  cal_data=shpr.get_calibration_data(),
26
26
  ) as shpw:
27
- shpr.resample(
28
- shpr.ds_time, shpw.ds_time, samplerate_dst=samplerate_sps, is_time=True
29
- )
27
+ shpr.resample(shpr.ds_time, shpw.ds_time, samplerate_dst=samplerate_sps, is_time=True)
30
28
  shpr.resample(shpr.ds_voltage, shpw.ds_voltage, samplerate_dst=samplerate_sps)
31
29
  shpr.resample(shpr.ds_current, shpw.ds_current, samplerate_dst=samplerate_sps)
32
30
  shpw.save_metadata()
@@ -2,6 +2,7 @@
2
2
  to generate valid shepherd-data for emulation
3
3
 
4
4
  """
5
+
5
6
  import errno
6
7
  import logging
7
8
  import math
@@ -34,7 +35,7 @@ def get_isc(coeffs: pd.DataFrame): # noqa: ANN201
34
35
 
35
36
 
36
37
  class Reader:
37
- """container for converters to shepherd-data"""
38
+ """Container for converters that bridge the gap to shepherds data-files."""
38
39
 
39
40
  _logger: logging.Logger = logging.getLogger("SHPData.IVonne.Reader")
40
41
 
@@ -63,9 +64,7 @@ class Reader:
63
64
 
64
65
  def __enter__(self) -> Self:
65
66
  if not self.file_path.exists():
66
- raise FileNotFoundError(
67
- errno.ENOENT, os.strerror(errno.ENOENT), self.file_path.name
68
- )
67
+ raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), self.file_path.name)
69
68
  with self.file_path.open("rb") as ifr:
70
69
  self._df = pickle.load(ifr) # noqa: S301
71
70
  self._refresh_file_stats()
@@ -104,8 +103,9 @@ class Reader:
104
103
  pts_per_curve: int = 1000,
105
104
  duration_s: Optional[float] = None,
106
105
  ) -> None:
107
- """Transforms previously recorded parameters to shepherd hdf database with IV curves.
108
- Shepherd should work with IV 'surfaces', where we have a stream of IV curves
106
+ """Transform recorded parameters to shepherd hdf database with IV curves.
107
+
108
+ Shepherd works with IV 'surfaces', which is a stream of IV curves.
109
109
 
110
110
  :param shp_output: Path where the resulting hdf file shall be stored
111
111
  :param v_max: Maximum voltage supported by shepherd
@@ -116,9 +116,7 @@ class Reader:
116
116
  raise RuntimeError("IVonne Context was not entered - file not open!")
117
117
  if isinstance(duration_s, (float, int)) and self.runtime_s > duration_s:
118
118
  self._logger.info(" -> gets trimmed to %f s", duration_s)
119
- df_elements_n = min(
120
- self._df.shape[0], int(duration_s * self.samplerate_sps)
121
- )
119
+ df_elements_n = min(self._df.shape[0], int(duration_s * self.samplerate_sps))
122
120
  else:
123
121
  df_elements_n = self._df.shape[0]
124
122
 
@@ -128,9 +126,7 @@ class Reader:
128
126
 
129
127
  v_proto = np.linspace(0, v_max, pts_per_curve)
130
128
 
131
- with Writer(
132
- shp_output, datatype="ivcurve", window_samples=pts_per_curve
133
- ) as sfw:
129
+ with Writer(shp_output, datatype="ivcurve", window_samples=pts_per_curve) as sfw:
134
130
  sfw.store_hostname("IVonne")
135
131
  curve_interval_us = round(sfw.sample_interval_ns * pts_per_curve / 1000)
136
132
  up_factor = self.sample_interval_ns // sfw.sample_interval_ns
@@ -140,9 +136,7 @@ class Reader:
140
136
  for idx in job_iter:
141
137
  idx_top = min(idx + max_elements, df_elements_n)
142
138
  df_slice = self._df.iloc[idx : idx_top + 1].copy()
143
- df_slice["timestamp"] = pd.TimedeltaIndex(
144
- data=df_slice["time"], unit="s"
145
- )
139
+ df_slice["timestamp"] = pd.to_timedelta(df_slice["time"], unit="s")
146
140
  df_slice = df_slice.set_index("timestamp")
147
141
  # warning: .interpolate does crash in debug-mode with typeError
148
142
  df_slice = (
@@ -171,7 +165,7 @@ class Reader:
171
165
  duration_s: Optional[float] = None,
172
166
  tracker: Optional[MPPTracker] = None,
173
167
  ) -> None:
174
- """Transforms shepherd IV curves to shepherd IV traces.
168
+ """Transform shepherd IV curves to shepherd IV samples / traces.
175
169
 
176
170
  For the 'buck' and 'buck-boost' modes, shepherd takes voltage and current traces.
177
171
  These can be recorded with shepherd or generated from existing IV curves by, for
@@ -188,14 +182,13 @@ class Reader:
188
182
  :param v_max: Maximum voltage supported by shepherd
189
183
  :param duration_s: time to stop in seconds, counted from beginning
190
184
  :param tracker: VOC or OPT
185
+
191
186
  """
192
187
  if self._df is None:
193
188
  raise RuntimeError("IVonne Context was not entered - file not open!")
194
189
  if isinstance(duration_s, (float, int)) and self.runtime_s > duration_s:
195
190
  self._logger.info(" -> gets trimmed to %f s", duration_s)
196
- df_elements_n = min(
197
- self._df.shape[0], int(duration_s * self.samplerate_sps)
198
- )
191
+ df_elements_n = min(self._df.shape[0], int(duration_s * self.samplerate_sps))
199
192
  else:
200
193
  df_elements_n = self._df.shape[0]
201
194
 
@@ -223,17 +216,11 @@ class Reader:
223
216
  df_slice.loc[:, "voc"] = get_voc(df_slice)
224
217
  df_slice.loc[df_slice["voc"] >= v_max, "voc"] = v_max
225
218
  df_slice = tracker.process(df_slice)
226
- df_slice["timestamp"] = pd.TimedeltaIndex(
227
- data=df_slice["time"], unit="s"
228
- )
229
- df_slice = df_slice[["time", "v", "i", "timestamp"]].set_index(
230
- "timestamp"
231
- )
219
+ df_slice["timestamp"] = pd.to_timedelta(df_slice["time"], unit="s")
220
+ df_slice = df_slice[["time", "v", "i", "timestamp"]].set_index("timestamp")
232
221
  # warning: .interpolate does crash in debug-mode with typeError
233
222
  df_slice = (
234
- df_slice.resample(f"{interval_us}us")
235
- .interpolate(method="cubic")
236
- .iloc[:-1]
223
+ df_slice.resample(f"{interval_us}us").interpolate(method="cubic").iloc[:-1]
237
224
  )
238
225
  sfw.append_iv_data_si(
239
226
  df_slice["time"].to_numpy(),
@@ -247,7 +234,7 @@ class Reader:
247
234
  v_max: float = 5.0,
248
235
  duration_s: Optional[float] = None,
249
236
  ) -> None:
250
- """Transforms ivonne-parameters to upsampled version for shepherd
237
+ """Transform ivonne-parameters to up-sampled versions for shepherd.
251
238
 
252
239
  :param shp_output: Path where the resulting hdf file shall be stored
253
240
  :param v_max: Maximum voltage supported by shepherd
@@ -257,9 +244,7 @@ class Reader:
257
244
  raise RuntimeError("IVonne Context was not entered - file not open!")
258
245
  if isinstance(duration_s, (float, int)) and self.runtime_s > duration_s:
259
246
  self._logger.info(" -> gets trimmed to %f s", duration_s)
260
- df_elements_n = min(
261
- self._df.shape[0], int(duration_s * self.samplerate_sps)
262
- )
247
+ df_elements_n = min(self._df.shape[0], int(duration_s * self.samplerate_sps))
263
248
  else:
264
249
  df_elements_n = self._df.shape[0]
265
250
 
@@ -282,17 +267,11 @@ class Reader:
282
267
  df_slice.loc[:, "voc"] = get_voc(df_slice)
283
268
  df_slice.loc[df_slice["voc"] >= v_max, "voc"] = v_max
284
269
  df_slice.loc[:, "isc"] = get_isc(df_slice)
285
- df_slice["timestamp"] = pd.TimedeltaIndex(
286
- data=df_slice["time"], unit="s"
287
- )
288
- df_slice = df_slice[["time", "voc", "isc", "timestamp"]].set_index(
289
- "timestamp"
290
- )
270
+ df_slice["timestamp"] = pd.to_timedelta(df_slice["time"], unit="s")
271
+ df_slice = df_slice[["time", "voc", "isc", "timestamp"]].set_index("timestamp")
291
272
  # warning: .interpolate does crash in debug-mode with typeError
292
273
  df_slice = (
293
- df_slice.resample(f"{interval_us}us")
294
- .interpolate(method="cubic")
295
- .iloc[:-1]
274
+ df_slice.resample(f"{interval_us}us").interpolate(method="cubic").iloc[:-1]
296
275
  )
297
276
  sfw.append_iv_data_si(
298
277
  df_slice["time"].to_numpy(),
@@ -1,6 +1,8 @@
1
1
  """Harvesters, simple and fast approach.
2
- Might be exchanged by shepherds py-model of pru-harvesters
2
+
3
+ Might be exchanged by shepherds py-model of pru-harvesters.
3
4
  """
5
+
4
6
  import numpy as np
5
7
  import pandas as pd
6
8
 
@@ -8,16 +10,11 @@ from shepherd_core import Calc_t
8
10
 
9
11
 
10
12
  def iv_model(voltages: Calc_t, coeffs: pd.Series) -> Calc_t:
11
- """Simple diode based model of a solar panel IV curve.
12
-
13
- Args:
14
- ----
15
- :param voltages: Load voltage of the solar panel
16
- :param coeffs: three generic coefficients
13
+ """Calculate simple diode based model (equivalent circuit diagram) of a solar panel IV curve.
17
14
 
18
- Returns:
19
- -------
20
- Solar current at given load voltage
15
+ :param voltages: Load voltage of the solar panel
16
+ :param coeffs: three generic coefficients
17
+ :return: Solar current at given load voltage
21
18
  """
22
19
  currents = float(coeffs["a"]) - float(coeffs["b"]) * (
23
20
  np.exp(float(coeffs["c"]) * voltages) - 1.0
@@ -40,7 +37,7 @@ def find_oc(v_arr: np.ndarray, i_arr: np.ndarray, ratio: float = 0.05) -> np.nda
40
37
 
41
38
 
42
39
  class MPPTracker:
43
- """Prototype
40
+ """Prototype for a MPPT-class.
44
41
 
45
42
  :param v_max: Maximum voltage supported by shepherd
46
43
  :param pts_per_curve: resolution of internal ivcurve
@@ -52,34 +49,34 @@ class MPPTracker:
52
49
  self.v_proto: np.ndarray = np.linspace(0, v_max, pts_per_curve)
53
50
 
54
51
  def process(self, coeffs: pd.DataFrame) -> pd.DataFrame:
55
- """Apply harvesting model to input data
52
+ """Apply harvesting model to input data.
56
53
 
57
54
  :param coeffs: ivonne coefficients
58
- :return:
55
+ :return: ivsample-data
59
56
  """
60
- pass
61
57
 
62
58
 
63
59
  class OpenCircuitTracker(MPPTracker):
64
- """Open-circuit based MPPT
60
+ """Open-circuit (-voltage) based MPPT.
65
61
 
66
62
  :param v_max: Maximum voltage supported by shepherd
67
63
  :param pts_per_curve: resolution of internal ivcurve
68
64
  :param ratio: (float) Ratio of open-circuit voltage to track
69
65
  """
70
66
 
71
- def __init__(
72
- self, v_max: float = 5.0, pts_per_curve: int = 1000, ratio: float = 0.8
73
- ) -> None:
67
+ def __init__(self, v_max: float = 5.0, pts_per_curve: int = 1000, ratio: float = 0.8) -> None:
74
68
  super().__init__(v_max, pts_per_curve)
75
69
  self.ratio = ratio
76
70
 
77
71
  def process(self, coeffs: pd.DataFrame) -> pd.DataFrame:
72
+ """Apply harvesting model to input data.
73
+
74
+ :param coeffs: ivonne coefficients
75
+ :return: ivsample-data
76
+ """
78
77
  coeffs["icurve"] = coeffs.apply(lambda x: iv_model(self.v_proto, x), axis=1)
79
78
  if "voc" not in coeffs.columns:
80
- coeffs["voc"] = coeffs.apply(
81
- lambda x: find_oc(self.v_proto, x["ivcurve"]), axis=1
82
- )
79
+ coeffs["voc"] = coeffs.apply(lambda x: find_oc(self.v_proto, x["ivcurve"]), axis=1)
83
80
  coeffs["rvoc_pos"] = coeffs.apply(
84
81
  lambda x: np.argmax(self.v_proto[self.v_proto < self.ratio * x["voc"]]),
85
82
  axis=1,
@@ -90,7 +87,7 @@ class OpenCircuitTracker(MPPTracker):
90
87
 
91
88
 
92
89
  class OptimalTracker(MPPTracker):
93
- """Optimal MPPT
90
+ """Optimal MPPT by looking at the whole curve.
94
91
 
95
92
  Calculates optimal harvesting voltage for every time and corresponding IV curve.
96
93
 
@@ -102,6 +99,11 @@ class OptimalTracker(MPPTracker):
102
99
  super().__init__(v_max, pts_per_curve)
103
100
 
104
101
  def process(self, coeffs: pd.DataFrame) -> pd.DataFrame:
102
+ """Apply harvesting model to input data.
103
+
104
+ :param coeffs: ivonne coefficients
105
+ :return: ivsample-data
106
+ """
105
107
  coeffs["icurve"] = coeffs.apply(lambda x: iv_model(self.v_proto, x), axis=1)
106
108
  coeffs["pcurve"] = coeffs.apply(lambda x: self.v_proto * x["icurve"], axis=1)
107
109
  coeffs["max_pos"] = coeffs.apply(lambda x: np.argmax(x["pcurve"]), axis=1)