shepherd-core 2023.10.3__py3-none-any.whl → 2023.12.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. shepherd_core/__init__.py +1 -1
  2. shepherd_core/data_models/base/content.py +3 -8
  3. shepherd_core/data_models/base/shepherd.py +2 -6
  4. shepherd_core/data_models/content/virtual_source.py +13 -40
  5. shepherd_core/data_models/experiment/experiment.py +3 -8
  6. shepherd_core/data_models/experiment/observer_features.py +4 -9
  7. shepherd_core/data_models/experiment/target_config.py +4 -11
  8. shepherd_core/data_models/task/__init__.py +2 -6
  9. shepherd_core/data_models/task/emulation.py +7 -14
  10. shepherd_core/data_models/task/firmware_mod.py +1 -3
  11. shepherd_core/data_models/task/harvest.py +1 -3
  12. shepherd_core/data_models/testbed/cape_fixture.yaml +2 -1
  13. shepherd_core/data_models/testbed/observer.py +6 -22
  14. shepherd_core/data_models/testbed/observer_fixture.yaml +46 -16
  15. shepherd_core/data_models/testbed/target_fixture.yaml +18 -16
  16. shepherd_core/data_models/testbed/testbed.py +1 -3
  17. shepherd_core/data_models/testbed/testbed_fixture.yaml +2 -0
  18. shepherd_core/decoder_waveform/uart.py +8 -26
  19. shepherd_core/fw_tools/__init__.py +1 -3
  20. shepherd_core/fw_tools/converter.py +4 -12
  21. shepherd_core/fw_tools/patcher.py +4 -12
  22. shepherd_core/fw_tools/validation.py +2 -2
  23. shepherd_core/inventory/__init__.py +3 -9
  24. shepherd_core/inventory/python.py +7 -5
  25. shepherd_core/inventory/system.py +2 -5
  26. shepherd_core/logger.py +1 -3
  27. shepherd_core/reader.py +45 -41
  28. shepherd_core/testbed_client/client.py +5 -16
  29. shepherd_core/testbed_client/fixtures.py +4 -8
  30. shepherd_core/testbed_client/user_model.py +1 -3
  31. shepherd_core/vsource/virtual_converter_model.py +4 -14
  32. shepherd_core/vsource/virtual_harvester_model.py +1 -3
  33. shepherd_core/vsource/virtual_source_model.py +2 -6
  34. shepherd_core/writer.py +11 -22
  35. {shepherd_core-2023.10.3.dist-info → shepherd_core-2023.12.1.dist-info}/METADATA +16 -5
  36. {shepherd_core-2023.10.3.dist-info → shepherd_core-2023.12.1.dist-info}/RECORD +53 -53
  37. {shepherd_core-2023.10.3.dist-info → shepherd_core-2023.12.1.dist-info}/WHEEL +1 -1
  38. tests/conftest.py +32 -0
  39. tests/data_models/test_content_models.py +15 -9
  40. tests/data_models/test_experiment_models.py +2 -4
  41. tests/data_models/test_task_generation.py +1 -1
  42. tests/fw_tools/test_converter.py +11 -0
  43. tests/fw_tools/test_patcher.py +11 -4
  44. tests/fw_tools/test_validation.py +8 -0
  45. tests/inventory/test_inventory.py +1 -3
  46. tests/test_cal_hw.py +2 -6
  47. tests/test_examples.py +13 -2
  48. tests/test_writer.py +2 -2
  49. tests/vsource/conftest.py +1 -3
  50. tests/vsource/test_converter.py +2 -6
  51. tests/vsource/test_harvester.py +3 -9
  52. {shepherd_core-2023.10.3.dist-info → shepherd_core-2023.12.1.dist-info}/top_level.txt +0 -0
  53. {shepherd_core-2023.10.3.dist-info → shepherd_core-2023.12.1.dist-info}/zip-safe +0 -0
@@ -3,8 +3,8 @@
3
3
  # https://github.com/orgua/shepherd_v2_planning/blob/main/doc_testbed/Target_pre-deployment-tests.xlsx
4
4
  - datatype: target
5
5
  parameters:
6
- id: 6
7
- name: nRF52_FRAM_001
6
+ id: 6 # Outer ID - selected by user - can be rearranged
7
+ name: nRF52_FRAM_001 # inner ID - used to link all parts together
8
8
  version: v1.0
9
9
  description: nRF52 as MCU + Radio, MSP430FR as SPI-FRAM or additional MCU
10
10
  comment: Test3 21nA sleep, msp-programming was flaky before -> monitor!
@@ -34,29 +34,31 @@
34
34
  - datatype: target
35
35
  parameters:
36
36
  inherit_from: nRF52_FRAM_001
37
- id: 1
37
+ id: 13
38
38
  name: nRF52_FRAM_005
39
- comment: Test3 21nA sleep
39
+ comment: Test3 21nA sleep, changed Antenna to lambda/4
40
40
  - datatype: target
41
41
  parameters:
42
42
  inherit_from: nRF52_FRAM_001
43
- id: 7
43
+ id: 14
44
44
  name: nRF52_FRAM_006
45
- comment: Test3 21nA sleep
45
+ comment: Test3 21nA sleep, changed Antenna to lambda/4
46
46
  - datatype: target
47
47
  parameters:
48
48
  inherit_from: nRF52_FRAM_001
49
- id: 1307
49
+ id: 12
50
50
  name: nRF52_FRAM_007
51
- comment: msp-programming is failing -> defective
52
- active: false
51
+ comment: msp-programming is failing -> defective, msp removed & changed Antenna to lambda/4
52
+ mcu2: null
53
+ active: true
53
54
  - datatype: target
54
55
  parameters:
55
56
  inherit_from: nRF52_FRAM_001
56
- id: 1308
57
+ id: 8
57
58
  name: nRF52_FRAM_008
58
- comment: msp-programming is failing -> defective
59
- active: false
59
+ comment: msp-programming is failing -> defective, msp removed & changed Antenna to lambda/4
60
+ mcu2: null
61
+ active: true
60
62
  - datatype: target
61
63
  parameters:
62
64
  inherit_from: nRF52_FRAM_001
@@ -72,15 +74,15 @@
72
74
  - datatype: target
73
75
  parameters:
74
76
  inherit_from: nRF52_FRAM_001
75
- id: 12
77
+ id: 1
76
78
  name: nRF52_FRAM_011
77
- comment: Test3 21nA sleep
79
+ comment: Test3 21nA sleep, changed Antenna to lambda/4
78
80
  - datatype: target
79
81
  parameters:
80
82
  inherit_from: nRF52_FRAM_001
81
83
  id: 1312
82
84
  name: nRF52_FRAM_012
83
- comment: msp-programming is failing -> defective
85
+ comment: msp-programming is failing -> defective, Antenna-Port destroyed
84
86
  active: false
85
87
  - datatype: target
86
88
  parameters:
@@ -111,7 +113,7 @@
111
113
  - datatype: target
112
114
  parameters:
113
115
  inherit_from: nRF52_FRAM_001
114
- id: 8
116
+ id: 7
115
117
  name: nRF52_FRAM_017
116
118
  comment: Test3 21nA sleep, nrf was miss-aligned -> hot air rework
117
119
  - datatype: target
@@ -87,6 +87,4 @@ class Testbed(ShpModel):
87
87
  continue
88
88
  if _observer.has_target(target_id):
89
89
  return _observer
90
- raise ValueError(
91
- f"Target-ID {target_id} was not found in Testbed '{self.name}'"
92
- )
90
+ raise ValueError(f"Target-ID {target_id} was not found in Testbed '{self.name}'")
@@ -21,3 +21,5 @@
21
21
  - name: sheep10
22
22
  - name: sheep11
23
23
  - name: sheep12
24
+ - name: sheep13
25
+ - name: sheep14
@@ -56,9 +56,7 @@ class Uart:
56
56
  (some detectors still missing)
57
57
  """
58
58
  if isinstance(content, Path):
59
- self.events_sig: np.ndarray = np.loadtxt(
60
- content.as_posix(), delimiter=",", skiprows=1
61
- )
59
+ self.events_sig: np.ndarray = np.loadtxt(content.as_posix(), delimiter=",", skiprows=1)
62
60
  # TODO: if float fails load as str -
63
61
  # cast first col as np.datetime64 with ns-resolution, convert to delta
64
62
  else:
@@ -66,9 +64,7 @@ class Uart:
66
64
 
67
65
  # verify table
68
66
  if self.events_sig.shape[1] != 2:
69
- raise TypeError(
70
- "Input file should have 2 rows -> (comma-separated) timestamp & value"
71
- )
67
+ raise TypeError("Input file should have 2 rows -> (comma-separated) timestamp & value")
72
68
  if self.events_sig.shape[0] < 8:
73
69
  raise TypeError("Input file is too short (< state-changes)")
74
70
  # verify timestamps
@@ -80,23 +76,17 @@ class Uart:
80
76
  self._convert_analog2digital()
81
77
  self._filter_redundant_states()
82
78
 
83
- self.baud_rate: int = (
84
- baud_rate if baud_rate is not None else self.detect_baud_rate()
85
- )
79
+ self.baud_rate: int = baud_rate if baud_rate is not None else self.detect_baud_rate()
86
80
  self.dur_tick: float = 1.0 / self.baud_rate
87
81
 
88
82
  self._add_duration()
89
83
 
90
- self.inversion: bool = (
91
- inversion if inversion is not None else self.detect_inversion()
92
- )
84
+ self.inversion: bool = inversion if inversion is not None else self.detect_inversion()
93
85
  self.half_stop: bool = self.detect_half_stop() # not needed ATM
94
86
 
95
87
  # TODO: add detectors
96
88
  self.parity: Parity = parity if parity is not None else Parity.no
97
- self.bit_order: BitOrder = (
98
- bit_order if bit_order is not None else BitOrder.lsb_first
99
- )
89
+ self.bit_order: BitOrder = bit_order if bit_order is not None else BitOrder.lsb_first
100
90
  self.frame_length: int = frame_length if frame_length is not None else 8
101
91
 
102
92
  if not (0 < self.frame_length <= 64):
@@ -146,14 +136,10 @@ class Uart:
146
136
  logger.warning("Tried to add state-duration, but it seems already present")
147
137
  return
148
138
  if not hasattr(self, "dur_tick"):
149
- raise ValueError(
150
- "Make sure that baud-rate was calculated before running add_dur()"
151
- )
139
+ raise ValueError("Make sure that baud-rate was calculated before running add_dur()")
152
140
  dur_steps = self.events_sig[1:, 0] - self.events_sig[:-1, 0]
153
141
  dur_steps = np.reshape(dur_steps, (dur_steps.size, 1))
154
- self.events_sig = np.append(
155
- self.events_sig[:-1, :], dur_steps / self.dur_tick, axis=1
156
- )
142
+ self.events_sig = np.append(self.events_sig[:-1, :], dur_steps / self.dur_tick, axis=1)
157
143
 
158
144
  def detect_inversion(self) -> bool:
159
145
  """Analyze bit-state during long pauses (unchanged states)
@@ -182,16 +168,12 @@ class Uart:
182
168
  def detect_half_stop(self) -> bool:
183
169
  """Looks into the spacing between time-steps"""
184
170
  events = self.events_sig[:1000, :] # speedup for large datasets
185
- return (
186
- np.sum((events > 1.333 * self.dur_tick) & (events < 1.667 * self.dur_tick))
187
- > 0
188
- )
171
+ return np.sum((events > 1.333 * self.dur_tick) & (events < 1.667 * self.dur_tick)) > 0
189
172
 
190
173
  def detect_dataframe_length(self) -> int:
191
174
  """Look after longest pauses
192
175
  - accumulate steps until a state with uneven step-size is found
193
176
  """
194
- pass
195
177
 
196
178
  def get_symbols(self, *, force_redo: bool = False) -> np.ndarray:
197
179
  """Ways to detect EOF:
@@ -25,9 +25,7 @@ except ImportError:
25
25
  "cffi",
26
26
  ]
27
27
  # only update when module is not avail
28
- MOCK_MODULES = [
29
- mod_name for mod_name in MOCK_MODULES if find_spec(mod_name) is None
30
- ]
28
+ MOCK_MODULES = [mod_name for mod_name in MOCK_MODULES if find_spec(mod_name) is None]
31
29
  sys.modules.update((mod_name, Mock()) for mod_name in MOCK_MODULES)
32
30
 
33
31
  from .converter import base64_to_file
@@ -22,9 +22,7 @@ def firmware_to_hex(file_path: Path) -> Path:
22
22
  return elf_to_hex(file_path)
23
23
  if is_hex(file_path):
24
24
  return file_path
25
- raise ValueError(
26
- "FW2Hex: unknown file '%s', it should be ELF or HEX", file_path.name
27
- )
25
+ raise ValueError("FW2Hex: unknown file '%s', it should be ELF or HEX", file_path.name)
28
26
 
29
27
 
30
28
  @validate_call
@@ -72,9 +70,7 @@ def base64_to_hash(content: str) -> str:
72
70
 
73
71
 
74
72
  @validate_call
75
- def extract_firmware(
76
- data: Union[str, Path], data_type: FirmwareDType, file_path: Path
77
- ) -> Path:
73
+ def extract_firmware(data: Union[str, Path], data_type: FirmwareDType, file_path: Path) -> Path:
78
74
  """- base64-string will be transformed into file
79
75
  - if data is a path the file will be copied to the destination
80
76
  """
@@ -90,14 +86,10 @@ def extract_firmware(
90
86
  elif data_type == FirmwareDType.path_hex:
91
87
  file = file_path.with_suffix(".hex")
92
88
  else:
93
- raise ValueError(
94
- "FW-Extraction failed due to unknown datatype '%s'", data_type
95
- )
89
+ raise ValueError("FW-Extraction failed due to unknown datatype '%s'", data_type)
96
90
  if not file.parent.exists():
97
91
  file.parent.mkdir(parents=True)
98
92
  shutil.copy(data, file)
99
93
  else:
100
- raise ValueError(
101
- "FW-Extraction failed due to unknown data-type '%s'", type(data)
102
- )
94
+ raise ValueError("FW-Extraction failed due to unknown data-type '%s'", type(data))
103
95
  return file
@@ -46,9 +46,7 @@ def find_symbol(file_elf: Path, symbol: str) -> bool:
46
46
 
47
47
 
48
48
  @validate_call
49
- def read_symbol(
50
- file_elf: Path, symbol: str, length: int = uid_len_default
51
- ) -> Optional[int]:
49
+ def read_symbol(file_elf: Path, symbol: str, length: int = uid_len_default) -> Optional[int]:
52
50
  """Interpreted as int"""
53
51
  if not find_symbol(file_elf, symbol):
54
52
  return None
@@ -99,9 +97,7 @@ def modify_symbol_value(
99
97
  value_raw = elf.read(address=addr, count=uid_len_default)[-uid_len_default:]
100
98
  # ⤷ cutting needed -> msp produces 4b instead of 2
101
99
  value_old = int.from_bytes(bytes=value_raw, byteorder=elf.endian, signed=False)
102
- value_raw = value.to_bytes(
103
- length=uid_len_default, byteorder=elf.endian, signed=False
104
- )
100
+ value_raw = value.to_bytes(length=uid_len_default, byteorder=elf.endian, signed=False)
105
101
  try:
106
102
  elf.write(address=addr, data=value_raw)
107
103
  except AttributeError:
@@ -110,9 +106,7 @@ def modify_symbol_value(
110
106
  if overwrite:
111
107
  file_new = file_elf
112
108
  else:
113
- file_new = file_elf.with_name(
114
- file_elf.stem + "_" + str(value) + file_elf.suffix
115
- )
109
+ file_new = file_elf.with_name(file_elf.stem + "_" + str(value) + file_elf.suffix)
116
110
  # could be simplified, but py3.8-- doesn't know .with_stem()
117
111
  elf.save(path=file_new)
118
112
  elf.close()
@@ -127,6 +121,4 @@ def modify_symbol_value(
127
121
 
128
122
 
129
123
  def modify_uid(file_elf: Path, value: int) -> Optional[Path]:
130
- return modify_symbol_value(
131
- file_elf, symbol=uid_str_default, value=value, overwrite=True
132
- )
124
+ return modify_symbol_value(file_elf, symbol=uid_str_default, value=value, overwrite=True)
@@ -92,7 +92,7 @@ def is_elf(file: Path) -> bool:
92
92
  return True
93
93
 
94
94
 
95
- def is_elf_msp430(file: Path) -> bool:
95
+ def is_elf_msp430(file: Path) -> bool: # TODO: allow detection without conversion
96
96
  if is_elf(file):
97
97
  with tempfile.TemporaryDirectory() as path:
98
98
  file_hex = Path(path) / "file.hex"
@@ -103,7 +103,7 @@ def is_elf_msp430(file: Path) -> bool:
103
103
  return False
104
104
 
105
105
 
106
- def is_elf_nrf52(file: Path) -> bool:
106
+ def is_elf_nrf52(file: Path) -> bool: # TODO: allow detection without conversion
107
107
  if is_elf(file):
108
108
  with tempfile.TemporaryDirectory() as path:
109
109
  file_hex = Path(path) / "file.hex"
@@ -30,15 +30,9 @@ class Inventory(PythonInventory, SystemInventory, TargetInventory):
30
30
  @classmethod
31
31
  def collect(cls) -> Self:
32
32
  # one by one for more precise error messages
33
- pid = PythonInventory.collect().model_dump(
34
- exclude_unset=True, exclude_defaults=True
35
- )
36
- sid = SystemInventory.collect().model_dump(
37
- exclude_unset=True, exclude_defaults=True
38
- )
39
- tid = TargetInventory.collect().model_dump(
40
- exclude_unset=True, exclude_defaults=True
41
- )
33
+ pid = PythonInventory.collect().model_dump(exclude_unset=True, exclude_defaults=True)
34
+ sid = SystemInventory.collect().model_dump(exclude_unset=True, exclude_defaults=True)
35
+ tid = TargetInventory.collect().model_dump(exclude_unset=True, exclude_defaults=True)
42
36
  model = {**pid, **sid, **tid}
43
37
  return cls(**model)
44
38
 
@@ -11,13 +11,14 @@ from ..data_models import ShpModel
11
11
 
12
12
  class PythonInventory(ShpModel):
13
13
  # program versions
14
- python: Optional[str] = None
15
- numpy: Optional[str] = None
16
14
  h5py: Optional[str] = None
15
+ numpy: Optional[str] = None
17
16
  pydantic: Optional[str] = None
18
- yaml: Optional[str] = None
17
+ python: Optional[str] = None
19
18
  shepherd_core: Optional[str] = None
20
19
  shepherd_sheep: Optional[str] = None
20
+ yaml: Optional[str] = None
21
+ zstandard: Optional[str] = None
21
22
 
22
23
  model_config = ConfigDict(str_min_length=0)
23
24
 
@@ -25,12 +26,13 @@ class PythonInventory(ShpModel):
25
26
  def collect(cls) -> Self:
26
27
  model_dict = {"python": platform.python_version()}
27
28
  module_names = [
28
- "numpy",
29
29
  "h5py",
30
+ "numpy",
30
31
  "pydantic",
31
- "yaml",
32
32
  "shepherd_core",
33
33
  "shepherd_sheep",
34
+ "yaml",
35
+ "zstandard",
34
36
  ]
35
37
 
36
38
  for module_name in module_names:
@@ -53,11 +53,7 @@ class SystemInventory(ShpModel):
53
53
  )
54
54
  else:
55
55
  ifs1 = psutil.net_if_addrs().items()
56
- ifs2 = {
57
- name: (_if[1].address, _if[0].address)
58
- for name, _if in ifs1
59
- if len(_if) > 1
60
- }
56
+ ifs2 = {name: (_if[1].address, _if[0].address) for name, _if in ifs1 if len(_if) > 1}
61
57
  uptime = time.time() - psutil.boot_time()
62
58
 
63
59
  model_dict = {
@@ -69,6 +65,7 @@ class SystemInventory(ShpModel):
69
65
  "processor": platform.processor(),
70
66
  "hostname": platform.node(),
71
67
  "interfaces": ifs2,
68
+ # TODO: add free space on /
72
69
  }
73
70
 
74
71
  with suppress(FileNotFoundError):
shepherd_core/logger.py CHANGED
@@ -15,9 +15,7 @@ def get_verbose_level() -> int:
15
15
  return verbose_level
16
16
 
17
17
 
18
- def set_log_verbose_level(
19
- log_: Union[logging.Logger, logging.Handler], verbose: int
20
- ) -> None:
18
+ def set_log_verbose_level(log_: Union[logging.Logger, logging.Handler], verbose: int) -> None:
21
19
  if verbose == 0:
22
20
  log_.setLevel(logging.ERROR)
23
21
  logging.basicConfig(level=logging.ERROR)
shepherd_core/reader.py CHANGED
@@ -95,9 +95,7 @@ class Reader:
95
95
  self.h5file = h5py.File(self.file_path, "r") # = readonly
96
96
  self._reader_opened = True
97
97
  except OSError as _xcp:
98
- raise TypeError(
99
- f"Unable to open HDF5-File '{self.file_path.name}'"
100
- ) from _xcp
98
+ raise TypeError(f"Unable to open HDF5-File '{self.file_path.name}'") from _xcp
101
99
 
102
100
  if self.is_valid():
103
101
  self._logger.debug("File is available now")
@@ -109,9 +107,7 @@ class Reader:
109
107
  )
110
108
 
111
109
  if not isinstance(self.h5file, h5py.File):
112
- raise TypeError(
113
- "Type of opened file is not h5py.File, for %s", self.file_path.name
114
- )
110
+ raise TypeError("Type of opened file is not h5py.File, for %s", self.file_path.name)
115
111
 
116
112
  self.ds_time: h5py.Dataset = self.h5file["data"]["time"]
117
113
  self.ds_voltage: h5py.Dataset = self.h5file["data"]["voltage"]
@@ -120,9 +116,7 @@ class Reader:
120
116
  # retrieve cal-data
121
117
  if not hasattr(self, "_cal"):
122
118
  cal_dict = CalibrationSeries().model_dump()
123
- for ds, param in product(
124
- ["current", "voltage", "time"], ["gain", "offset"]
125
- ):
119
+ for ds, param in product(["current", "voltage", "time"], ["gain", "offset"]):
126
120
  cal_dict[ds][param] = self.h5file["data"][ds].attrs[param]
127
121
  self._cal = CalibrationSeries(**cal_dict)
128
122
 
@@ -167,12 +161,11 @@ class Reader:
167
161
  """update internal states, helpful after resampling or other changes in data-group"""
168
162
  self.h5file.flush()
169
163
  if (self.ds_time.shape[0] > 1) and (self.ds_time[1] != self.ds_time[0]):
170
- self.sample_interval_s = self._cal.time.raw_to_si(
171
- self.ds_time[1] - self.ds_time[0]
172
- )
164
+ # this assumes isochronal sampling
165
+ self.sample_interval_s = self._cal.time.raw_to_si(self.ds_time[1] - self.ds_time[0])
173
166
  self.sample_interval_ns = int(10**9 * self.sample_interval_s)
174
167
  self.samplerate_sps = max(int(10**9 // self.sample_interval_ns), 1)
175
- self.runtime_s = round(self.ds_time.shape[0] / self.samplerate_sps, 1)
168
+ self.runtime_s = round(self.ds_voltage.shape[0] / self.samplerate_sps, 1)
176
169
  if isinstance(self.file_path, Path):
177
170
  self.file_size = self.file_path.stat().st_size
178
171
  else:
@@ -185,6 +178,7 @@ class Reader:
185
178
  end_n: Optional[int] = None,
186
179
  *,
187
180
  is_raw: bool = False,
181
+ omit_ts: bool = False,
188
182
  ) -> Generator[tuple, None, None]:
189
183
  """Generator that reads the specified range of buffers from the hdf5 file.
190
184
  can be configured on first call
@@ -195,27 +189,27 @@ class Reader:
195
189
  :param start_n: (int) Index of first buffer to be read
196
190
  :param end_n: (int) Index of last buffer to be read
197
191
  :param is_raw: (bool) output original data, not transformed to SI-Units
192
+ :param omit_ts: (bool) optimize reading if timestamp is never used
198
193
  Yields: Buffers between start and end (tuple with time, voltage, current)
199
194
  """
200
195
  if end_n is None:
201
- end_n = int(self.ds_time.shape[0] // self.samples_per_buffer)
202
- self._logger.debug(
203
- "Reading blocks from %d to %d from source-file", start_n, end_n
204
- )
196
+ end_n = int(self.ds_voltage.shape[0] // self.samples_per_buffer)
197
+ self._logger.debug("Reading blocks %d to %d from source-file", start_n, end_n)
205
198
  _raw = is_raw
199
+ _wts = not omit_ts
206
200
 
207
201
  for i in range(start_n, end_n):
208
202
  idx_start = i * self.samples_per_buffer
209
203
  idx_end = idx_start + self.samples_per_buffer
210
204
  if _raw:
211
205
  yield (
212
- self.ds_time[idx_start:idx_end],
206
+ self.ds_time[idx_start:idx_end] if _wts else None,
213
207
  self.ds_voltage[idx_start:idx_end],
214
208
  self.ds_current[idx_start:idx_end],
215
209
  )
216
210
  else:
217
211
  yield (
218
- self._cal.time.raw_to_si(self.ds_time[idx_start:idx_end]),
212
+ self._cal.time.raw_to_si(self.ds_time[idx_start:idx_end]) if _wts else None,
219
213
  self._cal.voltage.raw_to_si(self.ds_voltage[idx_start:idx_end]),
220
214
  self._cal.current.raw_to_si(self.ds_current[idx_start:idx_end]),
221
215
  )
@@ -341,20 +335,20 @@ class Reader:
341
335
  self.file_path.name,
342
336
  )
343
337
  # same length of datasets:
344
- ds_time_size = self.h5file["data"]["time"].shape[0]
345
- for dset in ["current", "voltage"]:
338
+ ds_volt_size = self.h5file["data"]["voltage"].shape[0]
339
+ for dset in ["current", "time"]:
346
340
  ds_size = self.h5file["data"][dset].shape[0]
347
- if ds_time_size != ds_size:
341
+ if ds_volt_size != ds_size:
348
342
  self._logger.warning(
349
343
  "[FileValidation] dataset '%s' has different size (=%d), "
350
344
  "compared to time-ds (=%d), in '%s'",
351
345
  dset,
352
346
  ds_size,
353
- ds_time_size,
347
+ ds_volt_size,
354
348
  self.file_path.name,
355
349
  )
356
350
  # dataset-length should be multiple of buffersize
357
- remaining_size = ds_time_size % self.samples_per_buffer
351
+ remaining_size = ds_volt_size % self.samples_per_buffer
358
352
  if remaining_size != 0:
359
353
  self._logger.warning(
360
354
  "[FileValidation] datasets are not aligned with buffer-size in '%s'",
@@ -382,6 +376,14 @@ class Reader:
382
376
  self._logger.warning(
383
377
  "[FileValidation] Hostname was not set in '%s'", self.file_path.name
384
378
  )
379
+ # errors during execution
380
+ _err = self.count_errors_in_log()
381
+ if _err > 0:
382
+ self._logger.warning(
383
+ "[FileValidation] Sheep reported %d errors during execution -> check logs in '%s'",
384
+ _err,
385
+ self.file_path.name,
386
+ )
385
387
  return True
386
388
 
387
389
  def __getitem__(self, key: str) -> Any:
@@ -403,10 +405,10 @@ class Reader:
403
405
 
404
406
  :return: sampled energy in Ws (watt-seconds)
405
407
  """
406
- iterations = math.ceil(self.ds_time.shape[0] / self.max_elements)
408
+ iterations = math.ceil(self.ds_voltage.shape[0] / self.max_elements)
407
409
  job_iter = trange(
408
410
  0,
409
- self.ds_time.shape[0],
411
+ self.ds_voltage.shape[0],
410
412
  self.max_elements,
411
413
  desc="energy",
412
414
  leave=False,
@@ -414,7 +416,7 @@ class Reader:
414
416
  )
415
417
 
416
418
  def _calc_energy(idx_start: int) -> float:
417
- idx_stop = min(idx_start + self.max_elements, self.ds_time.shape[0])
419
+ idx_stop = min(idx_start + self.max_elements, self.ds_voltage.shape[0])
418
420
  vol_v = self._cal.voltage.raw_to_si(self.ds_voltage[idx_start:idx_stop])
419
421
  cur_a = self._cal.current.raw_to_si(self.ds_current[idx_start:idx_stop])
420
422
  return (vol_v[:] * cur_a[:]).sum() * self.sample_interval_s
@@ -433,9 +435,7 @@ class Reader:
433
435
  si_converted = True
434
436
  if not isinstance(cal, CalibrationPair):
435
437
  if "gain" in dset.attrs and "offset" in dset.attrs:
436
- cal = CalibrationPair(
437
- gain=dset.attrs["gain"], offset=dset.attrs["offset"]
438
- )
438
+ cal = CalibrationPair(gain=dset.attrs["gain"], offset=dset.attrs["offset"])
439
439
  else:
440
440
  cal = CalibrationPair(gain=1)
441
441
  si_converted = False
@@ -453,8 +453,7 @@ class Reader:
453
453
  return [np.mean(data), np.min(data), np.max(data), np.std(data)]
454
454
 
455
455
  stats_list = [
456
- _calc_statistics(cal.raw_to_si(dset[i : i + self.max_elements]))
457
- for i in job_iter
456
+ _calc_statistics(cal.raw_to_si(dset[i : i + self.max_elements])) for i in job_iter
458
457
  ]
459
458
  if len(stats_list) < 1:
460
459
  return {}
@@ -513,6 +512,17 @@ class Reader:
513
512
  )
514
513
  return (len(diffs) <= 1) and diffs[0] == round(0.1 / self.samples_per_buffer, 6)
515
514
 
515
+ def count_errors_in_log(self, group_name: str = "sheep", min_level: int = 40) -> int:
516
+ if group_name not in self.h5file:
517
+ return 0
518
+ if "level" not in self.h5file["sheep"]:
519
+ return 0
520
+ _lvl = self.h5file["sheep"]["level"]
521
+ if _lvl.shape[0] < 1:
522
+ return 0
523
+ _items = [1 for _x in _lvl[:] if _x >= min_level]
524
+ return len(_items)
525
+
516
526
  def get_metadata(
517
527
  self,
518
528
  node: Union[h5py.Dataset, h5py.Group, None] = None,
@@ -580,9 +590,7 @@ class Reader:
580
590
  if yaml_path.exists():
581
591
  self._logger.info("File already exists, will skip '%s'", yaml_path.name)
582
592
  return {}
583
- metadata = self.get_metadata(
584
- node
585
- ) # {"h5root": self.get_metadata(self.h5file)}
593
+ metadata = self.get_metadata(node) # {"h5root": self.get_metadata(self.h5file)}
586
594
  with yaml_path.open("w", encoding="utf-8-sig") as yfd:
587
595
  yaml.safe_dump(metadata, yfd, default_flow_style=False, sort_keys=False)
588
596
  else:
@@ -618,9 +626,7 @@ class Reader:
618
626
  gpio_vs = self.h5file["gpio"]["value"]
619
627
 
620
628
  if name is None:
621
- descriptions = yaml.safe_load(
622
- self.h5file["gpio"]["value"].attrs["description"]
623
- )
629
+ descriptions = yaml.safe_load(self.h5file["gpio"]["value"].attrs["description"])
624
630
  pin_dict = {value["name"]: key for key, value in descriptions.items()}
625
631
  else:
626
632
  pin_dict = {name: self.get_gpio_pin_num(name)}
@@ -636,9 +642,7 @@ class Reader:
636
642
  )
637
643
  return waveforms
638
644
 
639
- def waveform_to_csv(
640
- self, pin_name: str, pin_wf: np.ndarray, separator: str = ","
641
- ) -> None:
645
+ def waveform_to_csv(self, pin_name: str, pin_wf: np.ndarray, separator: str = ",") -> None:
642
646
  path_csv = self.file_path.with_suffix(f".waveform.{pin_name}.csv")
643
647
  if path_csv.exists():
644
648
  self._logger.info("File already exists, will skip '%s'", path_csv.name)
@@ -18,9 +18,7 @@ from .user_model import User
18
18
  class TestbedClient:
19
19
  _instance: Optional[Self] = None
20
20
 
21
- def __init__(
22
- self, server: Optional[str] = None, token: Union[str, Path, None] = None
23
- ) -> None:
21
+ def __init__(self, server: Optional[str] = None, token: Union[str, Path, None] = None) -> None:
24
22
  if not hasattr(self, "_token"):
25
23
  self._token: str = "null"
26
24
  self._server: Optional[str] = testbed_server_default
@@ -42,9 +40,7 @@ class TestbedClient:
42
40
  TestbedClient._instance = None
43
41
 
44
42
  @validate_call
45
- def connect(
46
- self, server: Optional[str] = None, token: Union[str, Path, None] = None
47
- ) -> bool:
43
+ def connect(self, server: Optional[str] = None, token: Union[str, Path, None] = None) -> bool:
48
44
  """
49
45
  server: either "local" to use demo-fixtures or something like "https://HOST:PORT"
50
46
  token: your account validation
@@ -74,9 +70,7 @@ class TestbedClient:
74
70
  parameters=data.model_dump(),
75
71
  )
76
72
  if self._connected:
77
- r = self._req.post(
78
- self._server + "/add", data=wrap.model_dump_json(), timeout=2
79
- )
73
+ r = self._req.post(self._server + "/add", data=wrap.model_dump_json(), timeout=2)
80
74
  r.raise_for_status()
81
75
  else:
82
76
  self._fixtures.insert_model(wrap)
@@ -134,16 +128,11 @@ class TestbedClient:
134
128
  and value.lower() in self._fixtures[model_type].elements_by_name
135
129
  ):
136
130
  values = self.query_item(model_type, name=value)
137
- elif (
138
- isinstance(value, int)
139
- and value in self._fixtures[model_type].elements_by_id
140
- ):
131
+ elif isinstance(value, int) and value in self._fixtures[model_type].elements_by_id:
141
132
  # TODO: still depending on _fixture
142
133
  values = self.query_item(model_type, uid=value)
143
134
  else:
144
- raise ValueError(
145
- f"Query {model_type} by name / ID failed - " f"{values} is unknown!"
146
- )
135
+ raise ValueError(f"Query {model_type} by name / ID failed - {values} is unknown!")
147
136
  return self.try_inheritance(model_type, values)
148
137
 
149
138
  def fill_in_user_data(self, values: dict) -> dict: