shepherd-core 2023.11.1__py3-none-any.whl → 2023.12.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. shepherd_core/__init__.py +1 -1
  2. shepherd_core/data_models/base/content.py +3 -8
  3. shepherd_core/data_models/base/shepherd.py +2 -6
  4. shepherd_core/data_models/content/virtual_source.py +13 -40
  5. shepherd_core/data_models/experiment/experiment.py +3 -8
  6. shepherd_core/data_models/experiment/observer_features.py +4 -9
  7. shepherd_core/data_models/experiment/target_config.py +4 -11
  8. shepherd_core/data_models/task/__init__.py +2 -6
  9. shepherd_core/data_models/task/emulation.py +7 -14
  10. shepherd_core/data_models/task/firmware_mod.py +1 -3
  11. shepherd_core/data_models/task/harvest.py +1 -3
  12. shepherd_core/data_models/testbed/observer.py +6 -22
  13. shepherd_core/data_models/testbed/testbed.py +1 -3
  14. shepherd_core/decoder_waveform/uart.py +8 -26
  15. shepherd_core/fw_tools/__init__.py +1 -3
  16. shepherd_core/fw_tools/converter.py +4 -12
  17. shepherd_core/fw_tools/patcher.py +4 -12
  18. shepherd_core/inventory/__init__.py +3 -9
  19. shepherd_core/inventory/system.py +2 -5
  20. shepherd_core/logger.py +1 -3
  21. shepherd_core/reader.py +26 -41
  22. shepherd_core/testbed_client/client.py +5 -16
  23. shepherd_core/testbed_client/fixtures.py +4 -14
  24. shepherd_core/testbed_client/user_model.py +1 -3
  25. shepherd_core/vsource/virtual_converter_model.py +4 -14
  26. shepherd_core/vsource/virtual_harvester_model.py +1 -3
  27. shepherd_core/vsource/virtual_source_model.py +2 -6
  28. shepherd_core/writer.py +11 -22
  29. {shepherd_core-2023.11.1.dist-info → shepherd_core-2023.12.1.dist-info}/METADATA +2 -5
  30. {shepherd_core-2023.11.1.dist-info → shepherd_core-2023.12.1.dist-info}/RECORD +42 -42
  31. {shepherd_core-2023.11.1.dist-info → shepherd_core-2023.12.1.dist-info}/WHEEL +1 -1
  32. tests/data_models/test_content_models.py +3 -9
  33. tests/data_models/test_experiment_models.py +2 -4
  34. tests/data_models/test_task_generation.py +1 -1
  35. tests/inventory/test_inventory.py +1 -3
  36. tests/test_cal_hw.py +2 -6
  37. tests/test_writer.py +2 -2
  38. tests/vsource/conftest.py +1 -3
  39. tests/vsource/test_converter.py +2 -6
  40. tests/vsource/test_harvester.py +3 -9
  41. {shepherd_core-2023.11.1.dist-info → shepherd_core-2023.12.1.dist-info}/top_level.txt +0 -0
  42. {shepherd_core-2023.11.1.dist-info → shepherd_core-2023.12.1.dist-info}/zip-safe +0 -0
@@ -53,11 +53,7 @@ class SystemInventory(ShpModel):
53
53
  )
54
54
  else:
55
55
  ifs1 = psutil.net_if_addrs().items()
56
- ifs2 = {
57
- name: (_if[1].address, _if[0].address)
58
- for name, _if in ifs1
59
- if len(_if) > 1
60
- }
56
+ ifs2 = {name: (_if[1].address, _if[0].address) for name, _if in ifs1 if len(_if) > 1}
61
57
  uptime = time.time() - psutil.boot_time()
62
58
 
63
59
  model_dict = {
@@ -69,6 +65,7 @@ class SystemInventory(ShpModel):
69
65
  "processor": platform.processor(),
70
66
  "hostname": platform.node(),
71
67
  "interfaces": ifs2,
68
+ # TODO: add free space on /
72
69
  }
73
70
 
74
71
  with suppress(FileNotFoundError):
shepherd_core/logger.py CHANGED
@@ -15,9 +15,7 @@ def get_verbose_level() -> int:
15
15
  return verbose_level
16
16
 
17
17
 
18
- def set_log_verbose_level(
19
- log_: Union[logging.Logger, logging.Handler], verbose: int
20
- ) -> None:
18
+ def set_log_verbose_level(log_: Union[logging.Logger, logging.Handler], verbose: int) -> None:
21
19
  if verbose == 0:
22
20
  log_.setLevel(logging.ERROR)
23
21
  logging.basicConfig(level=logging.ERROR)
shepherd_core/reader.py CHANGED
@@ -95,9 +95,7 @@ class Reader:
95
95
  self.h5file = h5py.File(self.file_path, "r") # = readonly
96
96
  self._reader_opened = True
97
97
  except OSError as _xcp:
98
- raise TypeError(
99
- f"Unable to open HDF5-File '{self.file_path.name}'"
100
- ) from _xcp
98
+ raise TypeError(f"Unable to open HDF5-File '{self.file_path.name}'") from _xcp
101
99
 
102
100
  if self.is_valid():
103
101
  self._logger.debug("File is available now")
@@ -109,9 +107,7 @@ class Reader:
109
107
  )
110
108
 
111
109
  if not isinstance(self.h5file, h5py.File):
112
- raise TypeError(
113
- "Type of opened file is not h5py.File, for %s", self.file_path.name
114
- )
110
+ raise TypeError("Type of opened file is not h5py.File, for %s", self.file_path.name)
115
111
 
116
112
  self.ds_time: h5py.Dataset = self.h5file["data"]["time"]
117
113
  self.ds_voltage: h5py.Dataset = self.h5file["data"]["voltage"]
@@ -120,9 +116,7 @@ class Reader:
120
116
  # retrieve cal-data
121
117
  if not hasattr(self, "_cal"):
122
118
  cal_dict = CalibrationSeries().model_dump()
123
- for ds, param in product(
124
- ["current", "voltage", "time"], ["gain", "offset"]
125
- ):
119
+ for ds, param in product(["current", "voltage", "time"], ["gain", "offset"]):
126
120
  cal_dict[ds][param] = self.h5file["data"][ds].attrs[param]
127
121
  self._cal = CalibrationSeries(**cal_dict)
128
122
 
@@ -167,12 +161,11 @@ class Reader:
167
161
  """update internal states, helpful after resampling or other changes in data-group"""
168
162
  self.h5file.flush()
169
163
  if (self.ds_time.shape[0] > 1) and (self.ds_time[1] != self.ds_time[0]):
170
- self.sample_interval_s = self._cal.time.raw_to_si(
171
- self.ds_time[1] - self.ds_time[0]
172
- )
164
+ # this assumes isochronal sampling
165
+ self.sample_interval_s = self._cal.time.raw_to_si(self.ds_time[1] - self.ds_time[0])
173
166
  self.sample_interval_ns = int(10**9 * self.sample_interval_s)
174
167
  self.samplerate_sps = max(int(10**9 // self.sample_interval_ns), 1)
175
- self.runtime_s = round(self.ds_time.shape[0] / self.samplerate_sps, 1)
168
+ self.runtime_s = round(self.ds_voltage.shape[0] / self.samplerate_sps, 1)
176
169
  if isinstance(self.file_path, Path):
177
170
  self.file_size = self.file_path.stat().st_size
178
171
  else:
@@ -185,6 +178,7 @@ class Reader:
185
178
  end_n: Optional[int] = None,
186
179
  *,
187
180
  is_raw: bool = False,
181
+ omit_ts: bool = False,
188
182
  ) -> Generator[tuple, None, None]:
189
183
  """Generator that reads the specified range of buffers from the hdf5 file.
190
184
  can be configured on first call
@@ -195,25 +189,27 @@ class Reader:
195
189
  :param start_n: (int) Index of first buffer to be read
196
190
  :param end_n: (int) Index of last buffer to be read
197
191
  :param is_raw: (bool) output original data, not transformed to SI-Units
192
+ :param omit_ts: (bool) optimize reading if timestamp is never used
198
193
  Yields: Buffers between start and end (tuple with time, voltage, current)
199
194
  """
200
195
  if end_n is None:
201
- end_n = int(self.ds_time.shape[0] // self.samples_per_buffer)
196
+ end_n = int(self.ds_voltage.shape[0] // self.samples_per_buffer)
202
197
  self._logger.debug("Reading blocks %d to %d from source-file", start_n, end_n)
203
198
  _raw = is_raw
199
+ _wts = not omit_ts
204
200
 
205
201
  for i in range(start_n, end_n):
206
202
  idx_start = i * self.samples_per_buffer
207
203
  idx_end = idx_start + self.samples_per_buffer
208
204
  if _raw:
209
205
  yield (
210
- self.ds_time[idx_start:idx_end],
206
+ self.ds_time[idx_start:idx_end] if _wts else None,
211
207
  self.ds_voltage[idx_start:idx_end],
212
208
  self.ds_current[idx_start:idx_end],
213
209
  )
214
210
  else:
215
211
  yield (
216
- self._cal.time.raw_to_si(self.ds_time[idx_start:idx_end]),
212
+ self._cal.time.raw_to_si(self.ds_time[idx_start:idx_end]) if _wts else None,
217
213
  self._cal.voltage.raw_to_si(self.ds_voltage[idx_start:idx_end]),
218
214
  self._cal.current.raw_to_si(self.ds_current[idx_start:idx_end]),
219
215
  )
@@ -339,20 +335,20 @@ class Reader:
339
335
  self.file_path.name,
340
336
  )
341
337
  # same length of datasets:
342
- ds_time_size = self.h5file["data"]["time"].shape[0]
343
- for dset in ["current", "voltage"]:
338
+ ds_volt_size = self.h5file["data"]["voltage"].shape[0]
339
+ for dset in ["current", "time"]:
344
340
  ds_size = self.h5file["data"][dset].shape[0]
345
- if ds_time_size != ds_size:
341
+ if ds_volt_size != ds_size:
346
342
  self._logger.warning(
347
343
  "[FileValidation] dataset '%s' has different size (=%d), "
348
344
  "compared to time-ds (=%d), in '%s'",
349
345
  dset,
350
346
  ds_size,
351
- ds_time_size,
347
+ ds_volt_size,
352
348
  self.file_path.name,
353
349
  )
354
350
  # dataset-length should be multiple of buffersize
355
- remaining_size = ds_time_size % self.samples_per_buffer
351
+ remaining_size = ds_volt_size % self.samples_per_buffer
356
352
  if remaining_size != 0:
357
353
  self._logger.warning(
358
354
  "[FileValidation] datasets are not aligned with buffer-size in '%s'",
@@ -409,10 +405,10 @@ class Reader:
409
405
 
410
406
  :return: sampled energy in Ws (watt-seconds)
411
407
  """
412
- iterations = math.ceil(self.ds_time.shape[0] / self.max_elements)
408
+ iterations = math.ceil(self.ds_voltage.shape[0] / self.max_elements)
413
409
  job_iter = trange(
414
410
  0,
415
- self.ds_time.shape[0],
411
+ self.ds_voltage.shape[0],
416
412
  self.max_elements,
417
413
  desc="energy",
418
414
  leave=False,
@@ -420,7 +416,7 @@ class Reader:
420
416
  )
421
417
 
422
418
  def _calc_energy(idx_start: int) -> float:
423
- idx_stop = min(idx_start + self.max_elements, self.ds_time.shape[0])
419
+ idx_stop = min(idx_start + self.max_elements, self.ds_voltage.shape[0])
424
420
  vol_v = self._cal.voltage.raw_to_si(self.ds_voltage[idx_start:idx_stop])
425
421
  cur_a = self._cal.current.raw_to_si(self.ds_current[idx_start:idx_stop])
426
422
  return (vol_v[:] * cur_a[:]).sum() * self.sample_interval_s
@@ -439,9 +435,7 @@ class Reader:
439
435
  si_converted = True
440
436
  if not isinstance(cal, CalibrationPair):
441
437
  if "gain" in dset.attrs and "offset" in dset.attrs:
442
- cal = CalibrationPair(
443
- gain=dset.attrs["gain"], offset=dset.attrs["offset"]
444
- )
438
+ cal = CalibrationPair(gain=dset.attrs["gain"], offset=dset.attrs["offset"])
445
439
  else:
446
440
  cal = CalibrationPair(gain=1)
447
441
  si_converted = False
@@ -459,8 +453,7 @@ class Reader:
459
453
  return [np.mean(data), np.min(data), np.max(data), np.std(data)]
460
454
 
461
455
  stats_list = [
462
- _calc_statistics(cal.raw_to_si(dset[i : i + self.max_elements]))
463
- for i in job_iter
456
+ _calc_statistics(cal.raw_to_si(dset[i : i + self.max_elements])) for i in job_iter
464
457
  ]
465
458
  if len(stats_list) < 1:
466
459
  return {}
@@ -519,9 +512,7 @@ class Reader:
519
512
  )
520
513
  return (len(diffs) <= 1) and diffs[0] == round(0.1 / self.samples_per_buffer, 6)
521
514
 
522
- def count_errors_in_log(
523
- self, group_name: str = "sheep", min_level: int = 40
524
- ) -> int:
515
+ def count_errors_in_log(self, group_name: str = "sheep", min_level: int = 40) -> int:
525
516
  if group_name not in self.h5file:
526
517
  return 0
527
518
  if "level" not in self.h5file["sheep"]:
@@ -599,9 +590,7 @@ class Reader:
599
590
  if yaml_path.exists():
600
591
  self._logger.info("File already exists, will skip '%s'", yaml_path.name)
601
592
  return {}
602
- metadata = self.get_metadata(
603
- node
604
- ) # {"h5root": self.get_metadata(self.h5file)}
593
+ metadata = self.get_metadata(node) # {"h5root": self.get_metadata(self.h5file)}
605
594
  with yaml_path.open("w", encoding="utf-8-sig") as yfd:
606
595
  yaml.safe_dump(metadata, yfd, default_flow_style=False, sort_keys=False)
607
596
  else:
@@ -637,9 +626,7 @@ class Reader:
637
626
  gpio_vs = self.h5file["gpio"]["value"]
638
627
 
639
628
  if name is None:
640
- descriptions = yaml.safe_load(
641
- self.h5file["gpio"]["value"].attrs["description"]
642
- )
629
+ descriptions = yaml.safe_load(self.h5file["gpio"]["value"].attrs["description"])
643
630
  pin_dict = {value["name"]: key for key, value in descriptions.items()}
644
631
  else:
645
632
  pin_dict = {name: self.get_gpio_pin_num(name)}
@@ -655,9 +642,7 @@ class Reader:
655
642
  )
656
643
  return waveforms
657
644
 
658
- def waveform_to_csv(
659
- self, pin_name: str, pin_wf: np.ndarray, separator: str = ","
660
- ) -> None:
645
+ def waveform_to_csv(self, pin_name: str, pin_wf: np.ndarray, separator: str = ",") -> None:
661
646
  path_csv = self.file_path.with_suffix(f".waveform.{pin_name}.csv")
662
647
  if path_csv.exists():
663
648
  self._logger.info("File already exists, will skip '%s'", path_csv.name)
@@ -18,9 +18,7 @@ from .user_model import User
18
18
  class TestbedClient:
19
19
  _instance: Optional[Self] = None
20
20
 
21
- def __init__(
22
- self, server: Optional[str] = None, token: Union[str, Path, None] = None
23
- ) -> None:
21
+ def __init__(self, server: Optional[str] = None, token: Union[str, Path, None] = None) -> None:
24
22
  if not hasattr(self, "_token"):
25
23
  self._token: str = "null"
26
24
  self._server: Optional[str] = testbed_server_default
@@ -42,9 +40,7 @@ class TestbedClient:
42
40
  TestbedClient._instance = None
43
41
 
44
42
  @validate_call
45
- def connect(
46
- self, server: Optional[str] = None, token: Union[str, Path, None] = None
47
- ) -> bool:
43
+ def connect(self, server: Optional[str] = None, token: Union[str, Path, None] = None) -> bool:
48
44
  """
49
45
  server: either "local" to use demo-fixtures or something like "https://HOST:PORT"
50
46
  token: your account validation
@@ -74,9 +70,7 @@ class TestbedClient:
74
70
  parameters=data.model_dump(),
75
71
  )
76
72
  if self._connected:
77
- r = self._req.post(
78
- self._server + "/add", data=wrap.model_dump_json(), timeout=2
79
- )
73
+ r = self._req.post(self._server + "/add", data=wrap.model_dump_json(), timeout=2)
80
74
  r.raise_for_status()
81
75
  else:
82
76
  self._fixtures.insert_model(wrap)
@@ -134,16 +128,11 @@ class TestbedClient:
134
128
  and value.lower() in self._fixtures[model_type].elements_by_name
135
129
  ):
136
130
  values = self.query_item(model_type, name=value)
137
- elif (
138
- isinstance(value, int)
139
- and value in self._fixtures[model_type].elements_by_id
140
- ):
131
+ elif isinstance(value, int) and value in self._fixtures[model_type].elements_by_id:
141
132
  # TODO: still depending on _fixture
142
133
  values = self.query_item(model_type, uid=value)
143
134
  else:
144
- raise ValueError(
145
- f"Query {model_type} by name / ID failed - " f"{values} is unknown!"
146
- )
135
+ raise ValueError(f"Query {model_type} by name / ID failed - {values} is unknown!")
147
136
  return self.try_inheritance(model_type, values)
148
137
 
149
138
  def fill_in_user_data(self, values: dict) -> dict:
@@ -148,16 +148,12 @@ class Fixture:
148
148
  def query_id(self, _id: int) -> dict:
149
149
  if isinstance(_id, int) and _id in self.elements_by_id:
150
150
  return self.elements_by_id[_id]
151
- raise ValueError(
152
- f"Initialization of {self.model_type} by ID failed - {_id} is unknown!"
153
- )
151
+ raise ValueError(f"Initialization of {self.model_type} by ID failed - {_id} is unknown!")
154
152
 
155
153
  def query_name(self, name: str) -> dict:
156
154
  if isinstance(name, str) and name.lower() in self.elements_by_name:
157
155
  return self.elements_by_name[name.lower()]
158
- raise ValueError(
159
- f"Initialization of {self.model_type} by name failed - {name} is unknown!"
160
- )
156
+ raise ValueError(f"Initialization of {self.model_type} by name failed - {name} is unknown!")
161
157
 
162
158
 
163
159
  def file_older_than(file: Path, delta: timedelta) -> bool:
@@ -172,9 +168,7 @@ class Fixtures:
172
168
  suffix = ".yaml"
173
169
 
174
170
  @validate_call
175
- def __init__(
176
- self, file_path: Optional[Path] = None, *, reset: bool = False
177
- ) -> None:
171
+ def __init__(self, file_path: Optional[Path] = None, *, reset: bool = False) -> None:
178
172
  if file_path is None:
179
173
  self.file_path = Path(__file__).parent.parent.resolve() / "data_models"
180
174
  else:
@@ -182,11 +176,7 @@ class Fixtures:
182
176
  self.components: Dict[str, Fixture] = {}
183
177
  save_path = self.file_path / "fixtures.pickle"
184
178
 
185
- if (
186
- save_path.exists()
187
- and not file_older_than(save_path, timedelta(hours=24))
188
- and not reset
189
- ):
179
+ if save_path.exists() and not file_older_than(save_path, timedelta(hours=24)) and not reset:
190
180
  # speedup
191
181
  with save_path.open("rb", buffering=-1) as fd:
192
182
  self.components = pickle.load(fd) # noqa: S301
@@ -19,9 +19,7 @@ from ..data_models.base.shepherd import ShpModel
19
19
 
20
20
 
21
21
  @validate_call
22
- def hash_password(
23
- pw: Annotated[str, StringConstraints(min_length=20, max_length=100)]
24
- ) -> bytes:
22
+ def hash_password(pw: Annotated[str, StringConstraints(min_length=20, max_length=100)]) -> bytes:
25
23
  # TODO: add salt of testbed -> this fn should be part of Testbed-Object
26
24
  # NOTE: 1M Iterations need 25s on beaglebone
27
25
  return pbkdf2_hmac(
@@ -52,9 +52,7 @@ class VirtualConverterModel:
52
52
  self.V_input_uV: float = 0.0
53
53
  self.P_inp_fW: float = 0.0
54
54
  self.P_out_fW: float = 0.0
55
- self.interval_startup_disabled_drain_n: int = (
56
- self._cfg.interval_startup_delay_drain_n
57
- )
55
+ self.interval_startup_disabled_drain_n: int = self._cfg.interval_startup_delay_drain_n
58
56
 
59
57
  # container for the stored energy
60
58
  self.V_mid_uV: float = self._cfg.V_intermediate_init_uV
@@ -71,12 +69,8 @@ class VirtualConverterModel:
71
69
 
72
70
  # prepare hysteresis-thresholds
73
71
  self.dV_enable_output_uV: float = self._cfg.dV_enable_output_uV
74
- self.V_enable_output_threshold_uV: float = (
75
- self._cfg.V_enable_output_threshold_uV
76
- )
77
- self.V_disable_output_threshold_uV: float = (
78
- self._cfg.V_disable_output_threshold_uV
79
- )
72
+ self.V_enable_output_threshold_uV: float = self._cfg.V_enable_output_threshold_uV
73
+ self.V_disable_output_threshold_uV: float = self._cfg.V_disable_output_threshold_uV
80
74
 
81
75
  if self.dV_enable_output_uV > self.V_enable_output_threshold_uV:
82
76
  self.V_enable_output_threshold_uV = self.dV_enable_output_uV
@@ -163,11 +157,7 @@ class VirtualConverterModel:
163
157
 
164
158
  if self.V_mid_uV > self._cfg.V_intermediate_max_uV:
165
159
  self.V_mid_uV = self._cfg.V_intermediate_max_uV
166
- if (
167
- (not self.enable_boost)
168
- and (self.P_inp_fW > 0.0)
169
- and (self.V_mid_uV > self.V_input_uV)
170
- ):
160
+ if (not self.enable_boost) and (self.P_inp_fW > 0.0) and (self.V_mid_uV > self.V_input_uV):
171
161
  # TODO: obfuscated - no "direct connection"?
172
162
  self.V_mid_uV = self.V_input_uV
173
163
  elif self.V_mid_uV < 1:
@@ -93,9 +93,7 @@ class VirtualHarvesterModel:
93
93
  if distance_now < distance_last and distance_now < self.voltage_step_x4_uV:
94
94
  self.voltage_hold = _voltage_uV
95
95
  self.current_hold = _current_nA
96
- elif (
97
- distance_last < distance_now and distance_last < self.voltage_step_x4_uV
98
- ):
96
+ elif distance_last < distance_now and distance_last < self.voltage_step_x4_uV:
99
97
  self.voltage_hold = self.voltage_last
100
98
  self.current_hold = self.current_last
101
99
 
@@ -37,9 +37,7 @@ class VirtualSourceModel:
37
37
  cnv_config = ConverterPRUConfig.from_vsrc(
38
38
  self.cfg_src, log_intermediate_node=log_intermediate
39
39
  )
40
- self.cnv: VirtualConverterModel = VirtualConverterModel(
41
- cnv_config, self._cal_pru
42
- )
40
+ self.cnv: VirtualConverterModel = VirtualConverterModel(cnv_config, self._cal_pru)
43
41
 
44
42
  hrv_config = HarvesterPRUConfig.from_vhrv(
45
43
  self.cfg_src.harvester,
@@ -53,9 +51,7 @@ class VirtualSourceModel:
53
51
  self.W_inp_fWs: float = 0.0
54
52
  self.W_out_fWs: float = 0.0
55
53
 
56
- def iterate_sampling(
57
- self, V_inp_uV: int = 0, I_inp_nA: int = 0, I_out_nA: int = 0
58
- ) -> int:
54
+ def iterate_sampling(self, V_inp_uV: int = 0, I_inp_nA: int = 0, I_out_nA: int = 0) -> int:
59
55
  """TEST-SIMPLIFICATION - code below is not part of pru-code,
60
56
  but in part sample_emulator() in sampling.c
61
57
 
shepherd_core/writer.py CHANGED
@@ -39,9 +39,7 @@ def path2str(
39
39
 
40
40
 
41
41
  def time2int(dumper: Dumper, data: timedelta) -> ScalarNode:
42
- return dumper.represent_scalar(
43
- "tag:yaml.org,2002:int", str(int(data.total_seconds()))
44
- )
42
+ return dumper.represent_scalar("tag:yaml.org,2002:int", str(int(data.total_seconds())))
45
43
 
46
44
 
47
45
  yaml.add_representer(pathlib.PosixPath, path2str, SafeDumper)
@@ -130,23 +128,20 @@ class Writer(Reader):
130
128
  base_dir = file_path.resolve().parents[0]
131
129
  self.file_path = unique_path(base_dir / file_path.stem, file_path.suffix)
132
130
  self._logger.warning(
133
- "File '%s' already exists -> " "storing under '%s' instead",
131
+ "File '%s' already exists -> storing under '%s' instead",
134
132
  file_path,
135
133
  self.file_path.name,
136
134
  )
137
135
 
138
136
  if isinstance(mode, str) and mode not in self.mode_dtype_dict:
139
- raise ValueError(
140
- f"Can't handle mode '{mode}' " f"(choose one of {self.mode_dtype_dict})"
141
- )
137
+ raise ValueError(f"Can't handle mode '{mode}' (choose one of {self.mode_dtype_dict})")
142
138
 
143
139
  _dtypes = self.mode_dtype_dict[mode if mode else self.mode_default]
144
140
  if isinstance(datatype, str):
145
141
  datatype = EnergyDType[datatype]
146
142
  if isinstance(datatype, EnergyDType) and datatype not in _dtypes:
147
143
  raise ValueError(
148
- f"Can't handle value '{datatype}' of datatype "
149
- f"(choose one of {_dtypes})"
144
+ f"Can't handle value '{datatype}' of datatype (choose one of {_dtypes})"
150
145
  )
151
146
 
152
147
  if self._modify:
@@ -158,9 +153,7 @@ class Writer(Reader):
158
153
  self._datatype = (
159
154
  datatype if isinstance(datatype, EnergyDType) else self.datatype_default
160
155
  )
161
- self._window_samples = (
162
- window_samples if isinstance(window_samples, int) else 0
163
- )
156
+ self._window_samples = window_samples if isinstance(window_samples, int) else 0
164
157
 
165
158
  if isinstance(cal_data, (CalEmu, CalHrv)):
166
159
  self._cal = CalSeries.from_cal(cal_data)
@@ -181,9 +174,7 @@ class Writer(Reader):
181
174
 
182
175
  # show key parameters for h5-performance
183
176
  settings = list(self.h5file.id.get_access_plist().get_cache())
184
- self._logger.debug(
185
- "H5Py Cache_setting=%s (_mdc, _nslots, _nbytes, _w0)", settings
186
- )
177
+ self._logger.debug("H5Py Cache_setting=%s (_mdc, _nslots, _nbytes, _w0)", settings)
187
178
 
188
179
  # Store the mode in order to allow user to differentiate harvesting vs emulation data
189
180
  if isinstance(self._mode, str) and self._mode in self.mode_dtype_dict:
@@ -257,9 +248,7 @@ class Writer(Reader):
257
248
  compression=self._compression,
258
249
  )
259
250
  grp_data["time"].attrs["unit"] = "s"
260
- grp_data["time"].attrs[
261
- "description"
262
- ] = "system time [s] = value * gain + (offset)"
251
+ grp_data["time"].attrs["description"] = "system time [s] = value * gain + (offset)"
263
252
 
264
253
  grp_data.create_dataset(
265
254
  "current",
@@ -309,7 +298,7 @@ class Writer(Reader):
309
298
  else:
310
299
  raise ValueError("timestamp-data was not usable")
311
300
 
312
- len_old = self.ds_time.shape[0]
301
+ len_old = self.ds_voltage.shape[0]
313
302
 
314
303
  # resize dataset
315
304
  self.ds_time.resize((len_old + len_new,))
@@ -346,15 +335,15 @@ class Writer(Reader):
346
335
  def _align(self) -> None:
347
336
  """Align datasets with buffer-size of shepherd"""
348
337
  self._refresh_file_stats()
349
- n_buff = self.ds_time.size / self.samples_per_buffer
338
+ n_buff = self.ds_voltage.size / self.samples_per_buffer
350
339
  size_new = int(math.floor(n_buff) * self.samples_per_buffer)
351
- if size_new < self.ds_time.size:
340
+ if size_new < self.ds_voltage.size:
352
341
  if self.samplerate_sps != samplerate_sps_default:
353
342
  self._logger.debug("skipped alignment due to altered samplerate")
354
343
  return
355
344
  self._logger.info(
356
345
  "aligning with buffer-size, discarding last %d entries",
357
- self.ds_time.size - size_new,
346
+ self.ds_voltage.size - size_new,
358
347
  )
359
348
  self.ds_time.resize((size_new,))
360
349
  self.ds_voltage.resize((size_new,))
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: shepherd-core
3
- Version: 2023.11.1
3
+ Version: 2023.12.1
4
4
  Summary: Programming- and CLI-Interface for the h5-dataformat of the Shepherd-Testbed
5
5
  Home-page: https://pypi.org/project/shepherd-core/
6
6
  Author: Ingmar Splitt, Kai Geissdoerfer
@@ -42,9 +42,6 @@ Requires-Dist: requests
42
42
  Requires-Dist: pyelftools
43
43
  Requires-Dist: zstandard
44
44
  Provides-Extra: dev
45
- Requires-Dist: black ; extra == 'dev'
46
- Requires-Dist: pylint ; extra == 'dev'
47
- Requires-Dist: flake8 ; extra == 'dev'
48
45
  Requires-Dist: twine ; extra == 'dev'
49
46
  Requires-Dist: pre-commit ; extra == 'dev'
50
47
  Requires-Dist: pyright ; extra == 'dev'
@@ -64,7 +61,7 @@ Requires-Dist: coverage ; extra == 'test'
64
61
  [![PyPiVersion](https://img.shields.io/pypi/v/shepherd_core.svg)](https://pypi.org/project/shepherd_core)
65
62
  [![image](https://img.shields.io/pypi/pyversions/shepherd_core.svg)](https://pypi.python.org/pypi/shepherd-core)
66
63
  [![Pytest](https://github.com/orgua/shepherd-datalib/actions/workflows/py_unittest.yml/badge.svg)](https://github.com/orgua/shepherd-datalib/actions/workflows/py_unittest.yml)
67
- [![CodeStyle](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
64
+ [![CodeStyle](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json)](https://github.com/astral-sh/ruff)
68
65
 
69
66
  **Documentation**: <https://orgua.github.io/shepherd/external/shepherd_core.html>
70
67