shepherd-core 2025.4.2__py3-none-any.whl → 2025.5.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. shepherd_core/data_models/__init__.py +2 -0
  2. shepherd_core/data_models/base/content.py +4 -13
  3. shepherd_core/data_models/content/_external_fixtures.yaml +43 -43
  4. shepherd_core/data_models/content/energy_environment.py +2 -2
  5. shepherd_core/data_models/content/virtual_harvester.py +245 -16
  6. shepherd_core/data_models/content/virtual_harvester_fixture.yaml +2 -2
  7. shepherd_core/data_models/content/virtual_source.py +5 -2
  8. shepherd_core/data_models/content/virtual_source_fixture.yaml +3 -3
  9. shepherd_core/data_models/experiment/experiment.py +8 -8
  10. shepherd_core/data_models/experiment/observer_features.py +129 -18
  11. shepherd_core/data_models/experiment/target_config.py +5 -0
  12. shepherd_core/data_models/task/__init__.py +6 -3
  13. shepherd_core/data_models/task/emulation.py +21 -5
  14. shepherd_core/data_models/task/harvest.py +3 -2
  15. shepherd_core/data_models/task/observer_tasks.py +5 -4
  16. shepherd_core/data_models/task/programming.py +3 -1
  17. shepherd_core/data_models/task/testbed_tasks.py +3 -2
  18. shepherd_core/data_models/testbed/cape_fixture.yaml +8 -0
  19. shepherd_core/data_models/testbed/gpio.py +7 -0
  20. shepherd_core/data_models/testbed/mcu_fixture.yaml +4 -4
  21. shepherd_core/data_models/testbed/observer_fixture.yaml +17 -0
  22. shepherd_core/data_models/testbed/target_fixture.yaml +13 -0
  23. shepherd_core/data_models/testbed/testbed_fixture.yaml +11 -0
  24. shepherd_core/data_models/virtual_source_doc.txt +3 -3
  25. shepherd_core/fw_tools/converter.py +6 -3
  26. shepherd_core/fw_tools/validation.py +8 -4
  27. shepherd_core/reader.py +77 -47
  28. shepherd_core/testbed_client/client_abc_fix.py +2 -3
  29. shepherd_core/testbed_client/fixtures.py +15 -17
  30. shepherd_core/testbed_client/user_model.py +3 -6
  31. shepherd_core/version.py +1 -1
  32. shepherd_core/vsource/virtual_harvester_simulation.py +1 -1
  33. shepherd_core/vsource/virtual_source_simulation.py +1 -1
  34. shepherd_core/writer.py +8 -8
  35. {shepherd_core-2025.4.2.dist-info → shepherd_core-2025.5.3.dist-info}/METADATA +1 -1
  36. {shepherd_core-2025.4.2.dist-info → shepherd_core-2025.5.3.dist-info}/RECORD +39 -39
  37. {shepherd_core-2025.4.2.dist-info → shepherd_core-2025.5.3.dist-info}/WHEEL +1 -1
  38. {shepherd_core-2025.4.2.dist-info → shepherd_core-2025.5.3.dist-info}/top_level.txt +0 -0
  39. {shepherd_core-2025.4.2.dist-info → shepherd_core-2025.5.3.dist-info}/zip-safe +0 -0
@@ -35,14 +35,14 @@ class VirtualSourceDoc(ShpModel, title="Virtual Source (Documented, Testversion)
35
35
  )
36
36
 
37
37
  interval_startup_delay_drain_ms: float = Field(
38
- description="Model begins running but Target is not draining the buffer",
38
+ description="Model begins running but Target is not draining the storage capacitor",
39
39
  default=0,
40
40
  ge=0,
41
41
  le=10e3,
42
42
  )
43
43
 
44
44
  harvester: VirtualHarvesterConfig = Field(
45
- description="Only active / needed if input is 'ivcurves'",
45
+ description="Only active / needed if input is ivsurface / curves,
46
46
  default=VirtualHarvesterConfig(name="mppt_opt"),
47
47
  )
48
48
 
@@ -85,7 +85,7 @@ class VirtualSourceDoc(ShpModel, title="Virtual Source (Documented, Testversion)
85
85
  le=10_000,
86
86
  )
87
87
  I_intermediate_leak_nA: float = Field(
88
- description="Current leakage of intermediate buffer capacitor",
88
+ description="Current leakage of intermediate storage capacitor",
89
89
  default=0,
90
90
  ge=0,
91
91
  le=4.29e9,
@@ -28,7 +28,8 @@ def firmware_to_hex(file_path: Path) -> Path:
28
28
  return elf_to_hex(file_path)
29
29
  if is_hex(file_path):
30
30
  return file_path
31
- raise FileNotFoundError("FW2Hex: unknown file '%s', it should be ELF or HEX", file_path.name)
31
+ msg = (f"FW2Hex: unknown file '{file_path.name}', it should be ELF or HEX",)
32
+ raise FileNotFoundError(msg)
32
33
 
33
34
 
34
35
  @validate_call
@@ -98,10 +99,12 @@ def extract_firmware(data: Union[str, Path], data_type: FirmwareDType, file_path
98
99
  elif data_type == FirmwareDType.path_hex:
99
100
  file = file_path.with_suffix(".hex")
100
101
  else:
101
- raise ValueError("FW-Extraction failed due to unknown datatype '%s'", data_type)
102
+ msg = "FW-Extraction failed due to unknown datatype '{data_type}'"
103
+ raise ValueError(msg)
102
104
  if not file.parent.exists():
103
105
  file.parent.mkdir(parents=True)
104
106
  shutil.copy(data, file)
105
107
  else:
106
- raise ValueError("FW-Extraction failed due to unknown data-type '%s'", type(data))
108
+ msg = f"FW-Extraction failed due to unknown data-type '{type(data)}'"
109
+ raise ValueError(msg)
107
110
  return file
@@ -137,7 +137,8 @@ def determine_type(file: Path) -> FirmwareDType:
137
137
  return FirmwareDType.path_hex
138
138
  if is_elf(file):
139
139
  return FirmwareDType.path_elf
140
- raise ValueError("Type of file '%s' could not be determined", file.name)
140
+ msg = f"Type of file '{file.name}' could not be determined"
141
+ raise ValueError(msg)
141
142
 
142
143
 
143
144
  def determine_arch(file: Path) -> str:
@@ -148,11 +149,14 @@ def determine_arch(file: Path) -> str:
148
149
  return "msp430"
149
150
  if is_elf_nrf52(file):
150
151
  return "nrf52"
151
- raise ValueError("Arch of ELF '%s' could not be determined", file.name)
152
+ msg = f"Arch of ELF '{file.name}' could not be determined"
153
+ raise ValueError(msg)
152
154
  if file_t == FirmwareDType.path_hex:
153
155
  if is_hex_msp430(file):
154
156
  return "msp430"
155
157
  if is_hex_nrf52(file):
156
158
  return "nrf52"
157
- raise ValueError("Arch of HEX '%s' could not be determined", file.name)
158
- raise ValueError("Arch of file '%s' could not be determined", file.name)
159
+ msg = f"Arch of HEX '{file.name}' could not be determined"
160
+ raise ValueError(msg)
161
+ msg = f"Arch of file '{file.name}' could not be determined"
162
+ raise ValueError(msg)
shepherd_core/reader.py CHANGED
@@ -11,6 +11,7 @@ from itertools import product
11
11
  from pathlib import Path
12
12
  from types import MappingProxyType
13
13
  from typing import TYPE_CHECKING
14
+ from typing import Annotated
14
15
  from typing import Any
15
16
  from typing import Optional
16
17
  from typing import Union
@@ -21,6 +22,7 @@ import yaml
21
22
  from pydantic import validate_call
22
23
  from tqdm import trange
23
24
  from typing_extensions import Self
25
+ from typing_extensions import deprecated
24
26
 
25
27
  from .commons import SAMPLERATE_SPS_DEFAULT
26
28
  from .data_models.base.calibration import CalibrationPair
@@ -45,7 +47,7 @@ class Reader:
45
47
 
46
48
  """
47
49
 
48
- BUFFER_SAMPLES_N: int = 10_000
50
+ CHUNK_SAMPLES_N: int = 10_000
49
51
 
50
52
  MODE_TO_DTYPE: Mapping[str, Sequence[EnergyDType]] = MappingProxyType(
51
53
  {
@@ -82,10 +84,13 @@ class Reader:
82
84
 
83
85
  # init stats
84
86
  self.runtime_s: float = 0
85
- self.buffers_n: int = 0
87
+ self.samples_n: int = 0
88
+ self.chunks_n: int = 0
86
89
  self.file_size: int = 0
87
90
  self.data_rate: float = 0
88
91
 
92
+ self.buffers_n: Annotated[int, deprecated("use .chunk_n instead")] = 0
93
+
89
94
  # open file (if not already done by writer)
90
95
  self._reader_opened: bool = False
91
96
  if not hasattr(self, "h5file"):
@@ -113,7 +118,8 @@ class Reader:
113
118
  )
114
119
 
115
120
  if not isinstance(self.h5file, h5py.File):
116
- raise TypeError("Type of opened file is not h5py.File, for %s", self.file_path.name)
121
+ msg = (f"Type of opened file is not h5py.File, for {self.file_path.name}",)
122
+ raise TypeError(msg)
117
123
 
118
124
  self.ds_time: h5py.Dataset = self.h5file["data"]["time"]
119
125
  self.ds_voltage: h5py.Dataset = self.h5file["data"]["voltage"]
@@ -169,59 +175,63 @@ class Reader:
169
175
  def _refresh_file_stats(self) -> None:
170
176
  """Update internal states, helpful after resampling or other changes in data-group."""
171
177
  self.h5file.flush()
172
- sample_count = self.ds_time.shape[0]
178
+ self.samples_n = min(
179
+ self.ds_time.shape[0], self.ds_current.shape[0], self.ds_voltage.shape[0]
180
+ )
173
181
  duration_raw = (
174
- (int(self.ds_time[sample_count - 1]) - int(self.ds_time[0])) if sample_count > 0 else 0
182
+ (int(self.ds_time[self.samples_n - 1]) - int(self.ds_time[0]))
183
+ if self.samples_n > 0
184
+ else 0
175
185
  )
176
186
  # above's typecasting prevents overflow in u64-format
177
- if (sample_count > 0) and (duration_raw > 0):
178
- # this assumes iso-chronous sampling
187
+ if (self.samples_n > 0) and (duration_raw > 0):
188
+ # this assumes iso-chronous sampling, TODO: not the best choice?
179
189
  duration_s = self._cal.time.raw_to_si(duration_raw)
180
- self.sample_interval_s = duration_s / sample_count
190
+ self.sample_interval_s = duration_s / self.samples_n
181
191
  self.sample_interval_ns = round(10**9 * self.sample_interval_s)
182
- self.samplerate_sps = max(round((sample_count - 1) / duration_s), 1)
183
- self.runtime_s = round(self.ds_voltage.shape[0] / self.samplerate_sps, 1)
184
- self.buffers_n = int(self.ds_voltage.shape[0] // self.BUFFER_SAMPLES_N)
192
+ self.samplerate_sps = max(round((self.samples_n - 1) / duration_s), 1)
193
+ self.runtime_s = round(self.samples_n / self.samplerate_sps, 1)
194
+ self.chunks_n = self.buffers_n = int(self.samples_n // self.CHUNK_SAMPLES_N)
185
195
  if isinstance(self.file_path, Path):
186
196
  self.file_size = self.file_path.stat().st_size
187
197
  else:
188
198
  self.file_size = 0
189
199
  self.data_rate = self.file_size / self.runtime_s if self.runtime_s > 0 else 0
190
200
 
191
- def read_buffers(
201
+ def read(
192
202
  self,
193
203
  start_n: int = 0,
194
204
  end_n: Optional[int] = None,
195
- n_samples_per_buffer: Optional[int] = None,
205
+ n_samples_per_chunk: Optional[int] = None,
196
206
  *,
197
207
  is_raw: bool = False,
198
- omit_ts: bool = False,
208
+ omit_timestamps: bool = False,
199
209
  ) -> Generator[tuple, None, None]:
200
- """Read the specified range of buffers from the hdf5 file.
210
+ """Read the specified range of chunks from the hdf5 file.
201
211
 
202
212
  Generator - can be configured on first call
203
213
 
204
214
  Args:
205
215
  ----
206
- :param start_n: (int) Index of first buffer to be read
207
- :param end_n: (int) Index of last buffer to be read
208
- :param n_samples_per_buffer: (int) allows changing
216
+ :param start_n: (int) Index of first chunk to be read
217
+ :param end_n: (int) Index of last chunk to be read
218
+ :param n_samples_per_chunk: (int) allows changing
209
219
  :param is_raw: (bool) output original data, not transformed to SI-Units
210
- :param omit_ts: (bool) optimize reading if timestamp is never used
211
- Yields: Buffers between start and end (tuple with time, voltage, current)
220
+ :param omit_timestamps: (bool) optimize reading if timestamp is never used
221
+ Yields: chunks between start and end (tuple with time, voltage, current)
212
222
 
213
223
  """
214
- if n_samples_per_buffer is None:
215
- n_samples_per_buffer = self.BUFFER_SAMPLES_N
216
- end_max = int(self.ds_voltage.shape[0] // n_samples_per_buffer)
224
+ if n_samples_per_chunk is None:
225
+ n_samples_per_chunk = self.CHUNK_SAMPLES_N
226
+ end_max = int(self.samples_n // n_samples_per_chunk)
217
227
  end_n = end_max if end_n is None else min(end_n, end_max)
218
- self._logger.debug("Reading blocks %d to %d from source-file", start_n, end_n)
228
+ self._logger.debug("Reading chunk %d to %d from source-file", start_n, end_n)
219
229
  _raw = is_raw
220
- _wts = not omit_ts
230
+ _wts = not omit_timestamps
221
231
 
222
232
  for i in range(start_n, end_n):
223
- idx_start = i * n_samples_per_buffer
224
- idx_end = idx_start + n_samples_per_buffer
233
+ idx_start = i * n_samples_per_chunk
234
+ idx_end = idx_start + n_samples_per_chunk
225
235
  if _raw:
226
236
  yield (
227
237
  self.ds_time[idx_start:idx_end] if _wts else None,
@@ -235,6 +245,24 @@ class Reader:
235
245
  self._cal.current.raw_to_si(self.ds_current[idx_start:idx_end]),
236
246
  )
237
247
 
248
+ @deprecated("use .read() instead")
249
+ def read_buffers(
250
+ self,
251
+ start_n: int = 0,
252
+ end_n: Optional[int] = None,
253
+ n_samples_per_buffer: Optional[int] = None,
254
+ *,
255
+ is_raw: bool = False,
256
+ omit_ts: bool = False,
257
+ ) -> Generator[tuple, None, None]:
258
+ return self.read(
259
+ start_n=start_n,
260
+ end_n=end_n,
261
+ n_samples_per_chunk=n_samples_per_buffer,
262
+ is_raw=is_raw,
263
+ omit_timestamps=omit_ts,
264
+ )
265
+
238
266
  def get_calibration_data(self) -> CalibrationSeries:
239
267
  """Read calibration-data from hdf5 file.
240
268
 
@@ -384,23 +412,23 @@ class Reader:
384
412
  self.file_path.name,
385
413
  )
386
414
  # same length of datasets:
387
- ds_volt_size = self.h5file["data"]["voltage"].shape[0]
388
- for dset in ["current", "time"]:
415
+ samples_n = self.h5file["data"]["time"].shape[0]
416
+ for dset in ["voltage", "current"]:
389
417
  ds_size = self.h5file["data"][dset].shape[0]
390
- if ds_volt_size != ds_size:
418
+ if ds_size != samples_n:
391
419
  self._logger.warning(
392
420
  "[FileValidation] dataset '%s' has different size (=%d), "
393
- "compared to time-ds (=%d), in '%s'",
421
+ "compared to time (=%d), in '%s'",
394
422
  dset,
395
423
  ds_size,
396
- ds_volt_size,
424
+ samples_n,
397
425
  self.file_path.name,
398
426
  )
399
- # dataset-length should be multiple of buffersize
400
- remaining_size = ds_volt_size % self.BUFFER_SAMPLES_N
427
+ # dataset-length should be multiple of chunk-size
428
+ remaining_size = samples_n % self.CHUNK_SAMPLES_N
401
429
  if remaining_size != 0:
402
430
  self._logger.warning(
403
- "[FileValidation] datasets are not aligned with buffer-size in '%s'",
431
+ "[FileValidation] datasets are not aligned with chunk-size in '%s'",
404
432
  self.file_path.name,
405
433
  )
406
434
  # check compression
@@ -455,10 +483,10 @@ class Reader:
455
483
 
456
484
  :return: sampled energy in Ws (watt-seconds)
457
485
  """
458
- iterations = math.ceil(self.ds_voltage.shape[0] / self.max_elements)
486
+ iterations = math.ceil(self.samples_n / self.max_elements)
459
487
  job_iter = trange(
460
488
  0,
461
- self.ds_voltage.shape[0],
489
+ self.samples_n,
462
490
  self.max_elements,
463
491
  desc="energy",
464
492
  leave=False,
@@ -466,7 +494,7 @@ class Reader:
466
494
  )
467
495
 
468
496
  def _calc_energy(idx_start: int) -> float:
469
- idx_stop = min(idx_start + self.max_elements, self.ds_voltage.shape[0])
497
+ idx_stop = min(idx_start + self.max_elements, self.samples_n)
470
498
  vol_v = self._cal.voltage.raw_to_si(self.ds_voltage[idx_start:idx_stop])
471
499
  cur_a = self._cal.current.raw_to_si(self.ds_current[idx_start:idx_stop])
472
500
  return (vol_v[:] * cur_a[:]).sum() * self.sample_interval_s
@@ -520,16 +548,18 @@ class Reader:
520
548
  return stats
521
549
 
522
550
  def _data_timediffs(self) -> list[float]:
523
- """Calculate list of unique time-deltas [s] between buffers.
551
+ """Calculate list of unique time-deltas [s] between chunks.
524
552
 
525
- Optimized version that only looks at the start of each buffer.
553
+ Optimized version that only looks at the start of each chunk.
554
+ Timestamps get converted to signed (it still fits > 100 years)
555
+ to allow calculating negative diffs.
526
556
 
527
- :return: list of (unique) time-deltas between buffers [s]
557
+ :return: list of (unique) time-deltas between chunks [s]
528
558
  """
529
- iterations = math.ceil(self.ds_time.shape[0] / self.max_elements)
559
+ iterations = math.ceil(self.samples_n / self.max_elements)
530
560
  job_iter = trange(
531
561
  0,
532
- self.h5file["data"]["time"].shape[0],
562
+ self.samples_n,
533
563
  self.max_elements,
534
564
  desc="timediff",
535
565
  leave=False,
@@ -538,14 +568,14 @@ class Reader:
538
568
 
539
569
  def calc_timediffs(idx_start: int) -> list:
540
570
  ds_time = self.ds_time[
541
- idx_start : (idx_start + self.max_elements) : self.BUFFER_SAMPLES_N
542
- ]
571
+ idx_start : (idx_start + self.max_elements) : self.CHUNK_SAMPLES_N
572
+ ].astype(np.int64)
543
573
  diffs_np = np.unique(ds_time[1:] - ds_time[0:-1], return_counts=False)
544
574
  return list(np.array(diffs_np))
545
575
 
546
576
  diffs_ll = [calc_timediffs(i) for i in job_iter]
547
577
  diffs = {
548
- round(self._cal.time.raw_to_si(j) / self.BUFFER_SAMPLES_N, 6)
578
+ round(self._cal.time.raw_to_si(j) / self.CHUNK_SAMPLES_N, 6)
549
579
  for i in diffs_ll
550
580
  for j in i
551
581
  }
@@ -563,7 +593,7 @@ class Reader:
563
593
  self._logger.warning(
564
594
  "Time-jumps detected -> expected equal steps, but got: %s s", diffs
565
595
  )
566
- return (len(diffs) <= 1) and diffs[0] == round(0.1 / self.BUFFER_SAMPLES_N, 6)
596
+ return (len(diffs) <= 1) and diffs[0] == round(0.1 / self.CHUNK_SAMPLES_N, 6)
567
597
 
568
598
  def count_errors_in_log(self, group_name: str = "sheep", min_level: int = 40) -> int:
569
599
  if group_name not in self.h5file:
@@ -72,9 +72,8 @@ class AbcClient(ABC):
72
72
  try:
73
73
  values = self.query_item(model_type, name=values.get("name"), uid=values.get("id"))
74
74
  except ValueError as err:
75
- raise ValueError(
76
- "Query %s by name / ID failed - %s is unknown!", model_type, values
77
- ) from err
75
+ msg = f"Query {model_type} by name / ID failed - {values} is unknown!"
76
+ raise ValueError(msg) from err
78
77
  return self.try_inheritance(model_type, values)
79
78
 
80
79
  @abstractmethod
@@ -2,6 +2,7 @@
2
2
 
3
3
  import copy
4
4
  import pickle
5
+ from collections.abc import Iterable
5
6
  from collections.abc import Mapping
6
7
  from datetime import datetime
7
8
  from datetime import timedelta
@@ -34,11 +35,11 @@ class Fixture:
34
35
 
35
36
  def __init__(self, model_type: str) -> None:
36
37
  self.model_type: str = model_type.lower()
37
- self.elements_by_name: dict[str, dict] = {}
38
- self.elements_by_id: dict[int, dict] = {}
38
+ self.elements_by_name: dict[str, dict[str, Any]] = {}
39
+ self.elements_by_id: dict[int, dict[str, Any]] = {}
39
40
  # Iterator reset
40
41
  self._iter_index: int = 0
41
- self._iter_list: list = list(self.elements_by_name.values())
42
+ self._iter_list: list[dict[str, Any]] = list(self.elements_by_name.values())
42
43
 
43
44
  def insert(self, data: Wrapper) -> None:
44
45
  # ⤷ TODO: could get easier
@@ -54,9 +55,10 @@ class Fixture:
54
55
  self.elements_by_name[name] = data_model
55
56
  self.elements_by_id[_id] = data_model
56
57
  # update iterator
57
- self._iter_list = list(self.elements_by_name.values())
58
+ self._iter_list: list[dict[str, Any]] = list(self.elements_by_name.values())
58
59
 
59
- def __getitem__(self, key: Union[str, int]) -> dict:
60
+ def __getitem__(self, key: Union[str, int]) -> dict[str, Any]:
61
+ original_key = key
60
62
  if isinstance(key, str):
61
63
  key = key.lower()
62
64
  if key in self.elements_by_name:
@@ -65,7 +67,7 @@ class Fixture:
65
67
  key = int(key)
66
68
  if key in self.elements_by_id:
67
69
  return self.elements_by_id[int(key)]
68
- msg = f"{self.model_type} '{key}' not found!"
70
+ msg = f"{self.model_type} '{original_key}' not found!"
69
71
  raise ValueError(msg)
70
72
 
71
73
  def __iter__(self) -> Self:
@@ -73,14 +75,14 @@ class Fixture:
73
75
  self._iter_list = list(self.elements_by_name.values())
74
76
  return self
75
77
 
76
- def __next__(self) -> Any:
78
+ def __next__(self) -> dict[str, Any]:
77
79
  if self._iter_index < len(self._iter_list):
78
80
  member = self._iter_list[self._iter_index]
79
81
  self._iter_index += 1
80
82
  return member
81
83
  raise StopIteration
82
84
 
83
- def keys(self): # noqa: ANN201
85
+ def keys(self) -> Iterable[str]:
84
86
  return self.elements_by_name.keys()
85
87
 
86
88
  def refs(self) -> dict:
@@ -101,14 +103,10 @@ class Fixture:
101
103
  base_name = values.get("name")
102
104
  if base_name in chain:
103
105
  msg = f"Inheritance-Circle detected ({base_name} already in {chain})"
104
- raise ValueError(
105
- msg,
106
- )
106
+ raise ValueError(msg)
107
107
  if base_name == fixture_name:
108
108
  msg = f"Inheritance-Circle detected ({base_name} == {fixture_name})"
109
- raise ValueError(
110
- msg,
111
- )
109
+ raise ValueError(msg)
112
110
  chain.append(base_name)
113
111
  fixture_base = copy.copy(self[fixture_name])
114
112
  logger.debug("'%s' will inherit from '%s'", self.model_type, fixture_name)
@@ -153,13 +151,13 @@ class Fixture:
153
151
  base[key] = value
154
152
  return base
155
153
 
156
- def query_id(self, _id: int) -> dict:
154
+ def query_id(self, _id: int) -> dict[str, Any]:
157
155
  if isinstance(_id, int) and _id in self.elements_by_id:
158
156
  return self.elements_by_id[_id]
159
157
  msg = f"Initialization of {self.model_type} by ID failed - {_id} is unknown!"
160
158
  raise ValueError(msg)
161
159
 
162
- def query_name(self, name: str) -> dict:
160
+ def query_name(self, name: str) -> dict[str, Any]:
163
161
  if isinstance(name, str) and name.lower() in self.elements_by_name:
164
162
  return self.elements_by_name[name.lower()]
165
163
  msg = f"Initialization of {self.model_type} by name failed - {name} is unknown!"
@@ -243,7 +241,7 @@ class Fixtures:
243
241
  msg = f"Component '{key}' not found!"
244
242
  raise ValueError(msg)
245
243
 
246
- def keys(self): # noqa: ANN201
244
+ def keys(self) -> Iterable[str]:
247
245
  return self.components.keys()
248
246
 
249
247
  @staticmethod
@@ -5,10 +5,7 @@ from hashlib import pbkdf2_hmac
5
5
  from typing import Annotated
6
6
  from typing import Any
7
7
  from typing import Optional
8
- from typing import Union
9
- from uuid import uuid4
10
8
 
11
- from pydantic import UUID4
12
9
  from pydantic import EmailStr
13
10
  from pydantic import Field
14
11
  from pydantic import SecretBytes
@@ -19,6 +16,7 @@ from pydantic import validate_call
19
16
 
20
17
  from shepherd_core.data_models.base.content import NameStr
21
18
  from shepherd_core.data_models.base.content import SafeStr
19
+ from shepherd_core.data_models.base.content import id_default
22
20
  from shepherd_core.data_models.base.shepherd import ShpModel
23
21
 
24
22
 
@@ -41,10 +39,9 @@ def hash_password(pw: Annotated[str, StringConstraints(min_length=20, max_length
41
39
  class User(ShpModel):
42
40
  """meta-data representation of a testbed-component (physical object)."""
43
41
 
44
- # id: UUID4 = Field( # TODO: db-migration - temp fix for documentation
45
- id: Union[UUID4, int] = Field(
42
+ id: int = Field(
46
43
  description="Unique ID",
47
- default_factory=uuid4,
44
+ default_factory=id_default,
48
45
  )
49
46
  name: NameStr
50
47
  description: Optional[SafeStr] = None
shepherd_core/version.py CHANGED
@@ -1,3 +1,3 @@
1
1
  """Separated string avoids circular imports."""
2
2
 
3
- version: str = "2025.04.2"
3
+ version: str = "2025.05.3"
@@ -54,7 +54,7 @@ def simulate_harvester(
54
54
  e_out_Ws = 0.0
55
55
 
56
56
  for _t, v_inp, i_inp in tqdm(
57
- file_inp.read_buffers(is_raw=True), total=file_inp.buffers_n, desc="Buffers", leave=False
57
+ file_inp.read(is_raw=True), total=file_inp.chunks_n, desc="Chunk", leave=False
58
58
  ):
59
59
  v_uV = cal_inp.voltage.raw_to_si(v_inp) * 1e6
60
60
  i_nA = cal_inp.current.raw_to_si(i_inp) * 1e9
@@ -74,7 +74,7 @@ def simulate_source(
74
74
  stats_internal = None
75
75
 
76
76
  for _t, v_inp, i_inp in tqdm(
77
- file_inp.read_buffers(is_raw=True), total=file_inp.buffers_n, desc="Buffers", leave=False
77
+ file_inp.read(is_raw=True), total=file_inp.chunks_n, desc="Chunk", leave=False
78
78
  ):
79
79
  v_uV = 1e6 * cal_inp.voltage.raw_to_si(v_inp)
80
80
  i_nA = 1e9 * cal_inp.current.raw_to_si(i_inp)
shepherd_core/writer.py CHANGED
@@ -94,7 +94,7 @@ class Writer(Reader):
94
94
  MODE_DEFAULT: str = "harvester"
95
95
  DATATYPE_DEFAULT: EnergyDType = EnergyDType.ivsample
96
96
 
97
- _CHUNK_SHAPE: tuple = (Reader.BUFFER_SAMPLES_N,)
97
+ _CHUNK_SHAPE: tuple = (Reader.CHUNK_SAMPLES_N,)
98
98
 
99
99
  @validate_call
100
100
  def __init__(
@@ -240,7 +240,7 @@ class Writer(Reader):
240
240
  # Store voltage and current samples in the data group,
241
241
  # both are stored as 4 Byte unsigned int
242
242
  grp_data = self.h5file.create_group("data")
243
- # the size of window_samples-attribute in harvest-data indicates ivcurves as input
243
+ # the size of window_samples-attribute in harvest-data indicates ivsurface / curves as input
244
244
  # -> emulator uses virtual-harvester, field will be adjusted by .embed_config()
245
245
  grp_data.attrs["window_samples"] = 0
246
246
 
@@ -287,7 +287,7 @@ class Writer(Reader):
287
287
 
288
288
  Args:
289
289
  ----
290
- timestamp: just start of buffer or whole ndarray
290
+ timestamp: just start of chunk (1 timestamp) or whole ndarray
291
291
  voltage: ndarray as raw unsigned integers
292
292
  current: ndarray as raw unsigned integers
293
293
 
@@ -331,7 +331,7 @@ class Writer(Reader):
331
331
  Args:
332
332
  ----
333
333
  timestamp: python timestamp (time.time()) in seconds (si-unit)
334
- -> provide start of buffer or whole ndarray
334
+ -> provide start of chunk (1 timestamp) or whole ndarray
335
335
  voltage: ndarray in physical-unit V
336
336
  current: ndarray in physical-unit A
337
337
 
@@ -343,16 +343,16 @@ class Writer(Reader):
343
343
  self.append_iv_data_raw(timestamp, voltage, current)
344
344
 
345
345
  def _align(self) -> None:
346
- """Align datasets with buffer-size of shepherd."""
346
+ """Align datasets with chunk-size of shepherd."""
347
347
  self._refresh_file_stats()
348
- n_buff = self.ds_voltage.size / self.BUFFER_SAMPLES_N
349
- size_new = int(math.floor(n_buff) * self.BUFFER_SAMPLES_N)
348
+ chunks_n = self.ds_voltage.size / self.CHUNK_SAMPLES_N
349
+ size_new = int(math.floor(chunks_n) * self.CHUNK_SAMPLES_N)
350
350
  if size_new < self.ds_voltage.size:
351
351
  if self.samplerate_sps != SAMPLERATE_SPS_DEFAULT:
352
352
  self._logger.debug("skipped alignment due to altered samplerate")
353
353
  return
354
354
  self._logger.info(
355
- "aligning with buffer-size, discarding last %d entries",
355
+ "aligning with chunk-size, discarding last %d entries",
356
356
  self.ds_voltage.size - size_new,
357
357
  )
358
358
  self.ds_time.resize((size_new,))
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: shepherd_core
3
- Version: 2025.4.2
3
+ Version: 2025.5.3
4
4
  Summary: Programming- and CLI-Interface for the h5-dataformat of the Shepherd-Testbed
5
5
  Author-email: Ingmar Splitt <ingmar.splitt@tu-dresden.de>
6
6
  Maintainer-email: Ingmar Splitt <ingmar.splitt@tu-dresden.de>