shepherd-core 2025.2.2__py3-none-any.whl → 2025.4.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- shepherd_core/calibration_hw_def.py +11 -11
- shepherd_core/commons.py +4 -4
- shepherd_core/data_models/base/cal_measurement.py +10 -11
- shepherd_core/data_models/base/calibration.py +11 -8
- shepherd_core/data_models/base/content.py +1 -1
- shepherd_core/data_models/base/shepherd.py +6 -7
- shepherd_core/data_models/base/wrapper.py +2 -2
- shepherd_core/data_models/content/energy_environment.py +4 -3
- shepherd_core/data_models/content/firmware.py +9 -7
- shepherd_core/data_models/content/virtual_harvester.py +30 -22
- shepherd_core/data_models/content/virtual_source.py +17 -16
- shepherd_core/data_models/experiment/experiment.py +15 -14
- shepherd_core/data_models/experiment/observer_features.py +7 -8
- shepherd_core/data_models/experiment/target_config.py +12 -12
- shepherd_core/data_models/readme.md +2 -1
- shepherd_core/data_models/task/__init__.py +5 -5
- shepherd_core/data_models/task/emulation.py +13 -14
- shepherd_core/data_models/task/firmware_mod.py +11 -11
- shepherd_core/data_models/task/harvest.py +7 -6
- shepherd_core/data_models/task/observer_tasks.py +7 -7
- shepherd_core/data_models/task/programming.py +11 -11
- shepherd_core/data_models/task/testbed_tasks.py +8 -8
- shepherd_core/data_models/testbed/cape.py +7 -6
- shepherd_core/data_models/testbed/gpio.py +8 -7
- shepherd_core/data_models/testbed/mcu.py +8 -7
- shepherd_core/data_models/testbed/observer.py +9 -7
- shepherd_core/data_models/testbed/target.py +9 -7
- shepherd_core/data_models/testbed/testbed.py +11 -10
- shepherd_core/decoder_waveform/uart.py +5 -5
- shepherd_core/fw_tools/converter.py +4 -3
- shepherd_core/fw_tools/patcher.py +14 -15
- shepherd_core/fw_tools/validation.py +3 -2
- shepherd_core/inventory/__init__.py +4 -4
- shepherd_core/inventory/python.py +1 -1
- shepherd_core/inventory/system.py +11 -8
- shepherd_core/inventory/target.py +3 -3
- shepherd_core/logger.py +2 -2
- shepherd_core/reader.py +43 -43
- shepherd_core/testbed_client/client_abc_fix.py +20 -13
- shepherd_core/testbed_client/client_web.py +18 -11
- shepherd_core/testbed_client/fixtures.py +24 -44
- shepherd_core/testbed_client/user_model.py +6 -5
- shepherd_core/version.py +1 -1
- shepherd_core/vsource/target_model.py +3 -3
- shepherd_core/vsource/virtual_converter_model.py +3 -3
- shepherd_core/vsource/virtual_harvester_model.py +7 -9
- shepherd_core/vsource/virtual_harvester_simulation.py +6 -5
- shepherd_core/vsource/virtual_source_model.py +6 -5
- shepherd_core/vsource/virtual_source_simulation.py +7 -6
- shepherd_core/writer.py +33 -34
- {shepherd_core-2025.2.2.dist-info → shepherd_core-2025.4.2.dist-info}/METADATA +3 -4
- shepherd_core-2025.4.2.dist-info/RECORD +81 -0
- {shepherd_core-2025.2.2.dist-info → shepherd_core-2025.4.2.dist-info}/WHEEL +1 -1
- shepherd_core-2025.2.2.dist-info/RECORD +0 -81
- {shepherd_core-2025.2.2.dist-info → shepherd_core-2025.4.2.dist-info}/top_level.txt +0 -0
- {shepherd_core-2025.2.2.dist-info → shepherd_core-2025.4.2.dist-info}/zip-safe +0 -0
|
@@ -1,15 +1,16 @@
|
|
|
1
1
|
"""Read and modify symbols in ELF-files."""
|
|
2
2
|
|
|
3
3
|
from pathlib import Path
|
|
4
|
+
from typing import Annotated
|
|
4
5
|
from typing import Optional
|
|
5
6
|
|
|
6
7
|
from pydantic import Field
|
|
7
8
|
from pydantic import validate_call
|
|
8
|
-
from typing_extensions import Annotated
|
|
9
9
|
|
|
10
|
-
from
|
|
11
|
-
from
|
|
12
|
-
from
|
|
10
|
+
from shepherd_core.commons import UID_NAME
|
|
11
|
+
from shepherd_core.commons import UID_SIZE
|
|
12
|
+
from shepherd_core.logger import logger
|
|
13
|
+
|
|
13
14
|
from .validation import is_elf
|
|
14
15
|
|
|
15
16
|
try:
|
|
@@ -49,7 +50,7 @@ def find_symbol(file_elf: Path, symbol: str) -> bool:
|
|
|
49
50
|
|
|
50
51
|
|
|
51
52
|
@validate_call
|
|
52
|
-
def read_symbol(file_elf: Path, symbol: str, length: int =
|
|
53
|
+
def read_symbol(file_elf: Path, symbol: str, length: int = UID_SIZE) -> Optional[int]:
|
|
53
54
|
"""Read value of symbol in ELF-File.
|
|
54
55
|
|
|
55
56
|
Will be interpreted as int.
|
|
@@ -67,7 +68,7 @@ def read_symbol(file_elf: Path, symbol: str, length: int = uid_len_default) -> O
|
|
|
67
68
|
|
|
68
69
|
def read_uid(file_elf: Path) -> Optional[int]:
|
|
69
70
|
"""Read value of UID-symbol for shepherd testbed."""
|
|
70
|
-
return read_symbol(file_elf, symbol=
|
|
71
|
+
return read_symbol(file_elf, symbol=UID_NAME, length=UID_SIZE)
|
|
71
72
|
|
|
72
73
|
|
|
73
74
|
def read_arch(file_elf: Path) -> Optional[str]:
|
|
@@ -87,7 +88,7 @@ def read_arch(file_elf: Path) -> Optional[str]:
|
|
|
87
88
|
def modify_symbol_value(
|
|
88
89
|
file_elf: Path,
|
|
89
90
|
symbol: str,
|
|
90
|
-
value: Annotated[int, Field(ge=0, lt=2 ** (8 *
|
|
91
|
+
value: Annotated[int, Field(ge=0, lt=2 ** (8 * UID_SIZE))],
|
|
91
92
|
*,
|
|
92
93
|
overwrite: bool = False,
|
|
93
94
|
) -> Optional[Path]:
|
|
@@ -105,20 +106,18 @@ def modify_symbol_value(
|
|
|
105
106
|
raise RuntimeError(elf_error_text)
|
|
106
107
|
elf = ELF(path=file_elf)
|
|
107
108
|
addr = elf.symbols[symbol]
|
|
108
|
-
value_raw = elf.read(address=addr, count=
|
|
109
|
+
value_raw = elf.read(address=addr, count=UID_SIZE)[-UID_SIZE:]
|
|
109
110
|
# ⤷ cutting needed -> msp produces 4b instead of 2
|
|
110
111
|
value_old = int.from_bytes(bytes=value_raw, byteorder=elf.endian, signed=False)
|
|
111
|
-
value_raw = value.to_bytes(length=
|
|
112
|
+
value_raw = value.to_bytes(length=UID_SIZE, byteorder=elf.endian, signed=False)
|
|
113
|
+
|
|
112
114
|
try:
|
|
113
115
|
elf.write(address=addr, data=value_raw)
|
|
114
116
|
except AttributeError:
|
|
115
117
|
logger.warning("ELF-Modifier failed @%s for symbol '%s'", f"0x{addr:X}", symbol)
|
|
116
118
|
return None
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
else:
|
|
120
|
-
file_new = file_elf.with_name(file_elf.stem + "_" + str(value) + file_elf.suffix)
|
|
121
|
-
# could be simplified, but py3.8-- doesn't know .with_stem()
|
|
119
|
+
|
|
120
|
+
file_new = file_elf if overwrite else file_elf.with_stem(file_elf.stem + "_" + str(value))
|
|
122
121
|
elf.save(path=file_new)
|
|
123
122
|
elf.close()
|
|
124
123
|
logger.debug(
|
|
@@ -133,4 +132,4 @@ def modify_symbol_value(
|
|
|
133
132
|
|
|
134
133
|
def modify_uid(file_elf: Path, value: int) -> Optional[Path]:
|
|
135
134
|
"""Replace value of UID-symbol for shepherd testbed."""
|
|
136
|
-
return modify_symbol_value(file_elf, symbol=
|
|
135
|
+
return modify_symbol_value(file_elf, symbol=UID_NAME, value=value, overwrite=True)
|
|
@@ -12,8 +12,9 @@ from intelhex import IntelHex
|
|
|
12
12
|
from intelhex import IntelHexError
|
|
13
13
|
from pydantic import validate_call
|
|
14
14
|
|
|
15
|
-
from
|
|
16
|
-
from
|
|
15
|
+
from shepherd_core.data_models.content.firmware_datatype import FirmwareDType
|
|
16
|
+
from shepherd_core.logger import logger
|
|
17
|
+
|
|
17
18
|
from .converter_elf import elf_to_hex
|
|
18
19
|
|
|
19
20
|
try:
|
|
@@ -9,13 +9,13 @@ This will collect:
|
|
|
9
9
|
from datetime import datetime
|
|
10
10
|
from datetime import timedelta
|
|
11
11
|
from pathlib import Path
|
|
12
|
-
from typing import
|
|
12
|
+
from typing import Annotated
|
|
13
13
|
|
|
14
14
|
from pydantic import Field
|
|
15
|
-
from typing_extensions import Annotated
|
|
16
15
|
from typing_extensions import Self
|
|
17
16
|
|
|
18
|
-
from
|
|
17
|
+
from shepherd_core.data_models import ShpModel
|
|
18
|
+
|
|
19
19
|
from .python import PythonInventory
|
|
20
20
|
from .system import SystemInventory
|
|
21
21
|
from .target import TargetInventory
|
|
@@ -55,7 +55,7 @@ class Inventory(PythonInventory, SystemInventory, TargetInventory):
|
|
|
55
55
|
class InventoryList(ShpModel):
|
|
56
56
|
"""Collection of inventories for several devices."""
|
|
57
57
|
|
|
58
|
-
elements: Annotated[
|
|
58
|
+
elements: Annotated[list[Inventory], Field(min_length=1)]
|
|
59
59
|
|
|
60
60
|
def to_csv(self, path: Path) -> None:
|
|
61
61
|
"""Generate a CSV.
|
|
@@ -3,15 +3,18 @@
|
|
|
3
3
|
import platform
|
|
4
4
|
import subprocess
|
|
5
5
|
import time
|
|
6
|
+
from collections.abc import Mapping
|
|
7
|
+
from collections.abc import Sequence
|
|
6
8
|
from datetime import datetime
|
|
7
9
|
from pathlib import Path
|
|
8
|
-
from
|
|
10
|
+
from types import MappingProxyType
|
|
11
|
+
from typing import Any
|
|
9
12
|
from typing import Optional
|
|
10
13
|
|
|
11
14
|
from typing_extensions import Self
|
|
12
15
|
|
|
13
|
-
from
|
|
14
|
-
from
|
|
16
|
+
from shepherd_core.data_models.base.timezone import local_now
|
|
17
|
+
from shepherd_core.logger import logger
|
|
15
18
|
|
|
16
19
|
try:
|
|
17
20
|
import psutil
|
|
@@ -21,7 +24,7 @@ except ImportError:
|
|
|
21
24
|
from pydantic import ConfigDict
|
|
22
25
|
from pydantic.types import PositiveInt
|
|
23
26
|
|
|
24
|
-
from
|
|
27
|
+
from shepherd_core.data_models import ShpModel
|
|
25
28
|
|
|
26
29
|
|
|
27
30
|
class SystemInventory(ShpModel):
|
|
@@ -30,7 +33,7 @@ class SystemInventory(ShpModel):
|
|
|
30
33
|
uptime: PositiveInt
|
|
31
34
|
# ⤷ seconds
|
|
32
35
|
timestamp: datetime
|
|
33
|
-
# time_delta: timedelta = timedelta(0) # noqa: ERA001
|
|
36
|
+
# time_delta: timedelta = timedelta(seconds=0) # noqa: ERA001
|
|
34
37
|
# ⤷ lag behind earliest observer, TODO: wrong place
|
|
35
38
|
|
|
36
39
|
system: str
|
|
@@ -44,13 +47,13 @@ class SystemInventory(ShpModel):
|
|
|
44
47
|
|
|
45
48
|
hostname: str
|
|
46
49
|
|
|
47
|
-
interfaces:
|
|
50
|
+
interfaces: Mapping[str, Any] = MappingProxyType({})
|
|
48
51
|
# ⤷ tuple with
|
|
49
52
|
# ip IPvAnyAddress
|
|
50
53
|
# mac MACStr
|
|
51
54
|
|
|
52
|
-
fs_root:
|
|
53
|
-
beagle:
|
|
55
|
+
fs_root: Sequence[str] = ()
|
|
56
|
+
beagle: Sequence[str] = ()
|
|
54
57
|
|
|
55
58
|
model_config = ConfigDict(str_min_length=0)
|
|
56
59
|
|
|
@@ -1,19 +1,19 @@
|
|
|
1
1
|
"""Hardware related inventory model."""
|
|
2
2
|
|
|
3
|
-
from
|
|
3
|
+
from collections.abc import Sequence
|
|
4
4
|
from typing import Optional
|
|
5
5
|
|
|
6
6
|
from pydantic import ConfigDict
|
|
7
7
|
from typing_extensions import Self
|
|
8
8
|
|
|
9
|
-
from
|
|
9
|
+
from shepherd_core.data_models import ShpModel
|
|
10
10
|
|
|
11
11
|
|
|
12
12
|
class TargetInventory(ShpModel):
|
|
13
13
|
"""Hardware related inventory model."""
|
|
14
14
|
|
|
15
15
|
cape: Optional[str] = None
|
|
16
|
-
targets:
|
|
16
|
+
targets: Sequence[str] = ()
|
|
17
17
|
|
|
18
18
|
model_config = ConfigDict(str_min_length=0)
|
|
19
19
|
|
shepherd_core/logger.py
CHANGED
|
@@ -33,8 +33,8 @@ def set_log_verbose_level(log_: Union[logging.Logger, logging.Handler], verbose:
|
|
|
33
33
|
if verbose < 3:
|
|
34
34
|
# reduce log-overhead when not debugging, also more user-friendly exceptions
|
|
35
35
|
logging._srcfile = None # noqa: SLF001
|
|
36
|
-
logging.logThreads =
|
|
37
|
-
logging.logProcesses =
|
|
36
|
+
logging.logThreads = False
|
|
37
|
+
logging.logProcesses = False
|
|
38
38
|
|
|
39
39
|
if verbose > 2:
|
|
40
40
|
chromalog.basicConfig(format="%(name)s %(levelname)s: %(message)s")
|
shepherd_core/reader.py
CHANGED
|
@@ -9,14 +9,10 @@ import math
|
|
|
9
9
|
import os
|
|
10
10
|
from itertools import product
|
|
11
11
|
from pathlib import Path
|
|
12
|
+
from types import MappingProxyType
|
|
12
13
|
from typing import TYPE_CHECKING
|
|
13
14
|
from typing import Any
|
|
14
|
-
from typing import ClassVar
|
|
15
|
-
from typing import Dict
|
|
16
|
-
from typing import Generator
|
|
17
|
-
from typing import List
|
|
18
15
|
from typing import Optional
|
|
19
|
-
from typing import Type
|
|
20
16
|
from typing import Union
|
|
21
17
|
|
|
22
18
|
import h5py
|
|
@@ -26,13 +22,16 @@ from pydantic import validate_call
|
|
|
26
22
|
from tqdm import trange
|
|
27
23
|
from typing_extensions import Self
|
|
28
24
|
|
|
29
|
-
from .commons import
|
|
25
|
+
from .commons import SAMPLERATE_SPS_DEFAULT
|
|
30
26
|
from .data_models.base.calibration import CalibrationPair
|
|
31
27
|
from .data_models.base.calibration import CalibrationSeries
|
|
32
28
|
from .data_models.content.energy_environment import EnergyDType
|
|
33
29
|
from .decoder_waveform import Uart
|
|
34
30
|
|
|
35
31
|
if TYPE_CHECKING:
|
|
32
|
+
from collections.abc import Generator
|
|
33
|
+
from collections.abc import Mapping
|
|
34
|
+
from collections.abc import Sequence
|
|
36
35
|
from types import TracebackType
|
|
37
36
|
|
|
38
37
|
|
|
@@ -46,28 +45,27 @@ class Reader:
|
|
|
46
45
|
|
|
47
46
|
"""
|
|
48
47
|
|
|
49
|
-
|
|
48
|
+
BUFFER_SAMPLES_N: int = 10_000
|
|
50
49
|
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
50
|
+
MODE_TO_DTYPE: Mapping[str, Sequence[EnergyDType]] = MappingProxyType(
|
|
51
|
+
{
|
|
52
|
+
"harvester": (
|
|
53
|
+
EnergyDType.ivsample,
|
|
54
|
+
EnergyDType.ivcurve,
|
|
55
|
+
EnergyDType.isc_voc,
|
|
56
|
+
),
|
|
57
|
+
"emulator": (EnergyDType.ivsample,),
|
|
58
|
+
}
|
|
59
|
+
)
|
|
59
60
|
|
|
60
61
|
@validate_call
|
|
61
62
|
def __init__(
|
|
62
63
|
self,
|
|
63
|
-
file_path:
|
|
64
|
+
file_path: Path,
|
|
64
65
|
*,
|
|
65
|
-
verbose:
|
|
66
|
+
verbose: bool = True,
|
|
66
67
|
) -> None:
|
|
67
|
-
|
|
68
|
-
self.file_path: Optional[Path] = None
|
|
69
|
-
if isinstance(file_path, (Path, str)):
|
|
70
|
-
self.file_path = Path(file_path).resolve()
|
|
68
|
+
self.file_path: Path = file_path.resolve()
|
|
71
69
|
|
|
72
70
|
if not hasattr(self, "_logger"):
|
|
73
71
|
self._logger: logging.Logger = logging.getLogger("SHPCore.Reader")
|
|
@@ -75,7 +73,7 @@ class Reader:
|
|
|
75
73
|
self._logger.setLevel(logging.DEBUG if verbose else logging.INFO)
|
|
76
74
|
|
|
77
75
|
if not hasattr(self, "samplerate_sps"):
|
|
78
|
-
self.samplerate_sps: int =
|
|
76
|
+
self.samplerate_sps: int = SAMPLERATE_SPS_DEFAULT
|
|
79
77
|
self.sample_interval_ns: int = round(10**9 // self.samplerate_sps)
|
|
80
78
|
self.sample_interval_s: float = 1 / self.samplerate_sps
|
|
81
79
|
|
|
@@ -155,7 +153,7 @@ class Reader:
|
|
|
155
153
|
|
|
156
154
|
def __exit__(
|
|
157
155
|
self,
|
|
158
|
-
typ: Optional[
|
|
156
|
+
typ: Optional[type[BaseException]] = None,
|
|
159
157
|
exc: Optional[BaseException] = None,
|
|
160
158
|
tb: Optional[TracebackType] = None,
|
|
161
159
|
extra_arg: int = 0,
|
|
@@ -183,7 +181,7 @@ class Reader:
|
|
|
183
181
|
self.sample_interval_ns = round(10**9 * self.sample_interval_s)
|
|
184
182
|
self.samplerate_sps = max(round((sample_count - 1) / duration_s), 1)
|
|
185
183
|
self.runtime_s = round(self.ds_voltage.shape[0] / self.samplerate_sps, 1)
|
|
186
|
-
self.buffers_n = int(self.ds_voltage.shape[0] // self.
|
|
184
|
+
self.buffers_n = int(self.ds_voltage.shape[0] // self.BUFFER_SAMPLES_N)
|
|
187
185
|
if isinstance(self.file_path, Path):
|
|
188
186
|
self.file_size = self.file_path.stat().st_size
|
|
189
187
|
else:
|
|
@@ -194,6 +192,7 @@ class Reader:
|
|
|
194
192
|
self,
|
|
195
193
|
start_n: int = 0,
|
|
196
194
|
end_n: Optional[int] = None,
|
|
195
|
+
n_samples_per_buffer: Optional[int] = None,
|
|
197
196
|
*,
|
|
198
197
|
is_raw: bool = False,
|
|
199
198
|
omit_ts: bool = False,
|
|
@@ -201,27 +200,28 @@ class Reader:
|
|
|
201
200
|
"""Read the specified range of buffers from the hdf5 file.
|
|
202
201
|
|
|
203
202
|
Generator - can be configured on first call
|
|
204
|
-
TODO: reconstruct - start/end mark samples &
|
|
205
|
-
each call can request a certain number of samples.
|
|
206
203
|
|
|
207
204
|
Args:
|
|
208
205
|
----
|
|
209
206
|
:param start_n: (int) Index of first buffer to be read
|
|
210
207
|
:param end_n: (int) Index of last buffer to be read
|
|
208
|
+
:param n_samples_per_buffer: (int) allows changing
|
|
211
209
|
:param is_raw: (bool) output original data, not transformed to SI-Units
|
|
212
210
|
:param omit_ts: (bool) optimize reading if timestamp is never used
|
|
213
211
|
Yields: Buffers between start and end (tuple with time, voltage, current)
|
|
214
212
|
|
|
215
213
|
"""
|
|
216
|
-
if
|
|
217
|
-
|
|
214
|
+
if n_samples_per_buffer is None:
|
|
215
|
+
n_samples_per_buffer = self.BUFFER_SAMPLES_N
|
|
216
|
+
end_max = int(self.ds_voltage.shape[0] // n_samples_per_buffer)
|
|
217
|
+
end_n = end_max if end_n is None else min(end_n, end_max)
|
|
218
218
|
self._logger.debug("Reading blocks %d to %d from source-file", start_n, end_n)
|
|
219
219
|
_raw = is_raw
|
|
220
220
|
_wts = not omit_ts
|
|
221
221
|
|
|
222
222
|
for i in range(start_n, end_n):
|
|
223
|
-
idx_start = i *
|
|
224
|
-
idx_end = idx_start +
|
|
223
|
+
idx_start = i * n_samples_per_buffer
|
|
224
|
+
idx_end = idx_start + n_samples_per_buffer
|
|
225
225
|
if _raw:
|
|
226
226
|
yield (
|
|
227
227
|
self.ds_time[idx_start:idx_end] if _wts else None,
|
|
@@ -252,7 +252,7 @@ class Reader:
|
|
|
252
252
|
return self.h5file.attrs["mode"]
|
|
253
253
|
return ""
|
|
254
254
|
|
|
255
|
-
def get_config(self) ->
|
|
255
|
+
def get_config(self) -> dict:
|
|
256
256
|
if "config" in self.h5file["data"].attrs:
|
|
257
257
|
return yaml.safe_load(self.h5file["data"].attrs["config"])
|
|
258
258
|
return {}
|
|
@@ -327,7 +327,7 @@ class Reader:
|
|
|
327
327
|
self.file_path.name,
|
|
328
328
|
)
|
|
329
329
|
return False
|
|
330
|
-
if self.h5file.attrs["mode"] not in self.
|
|
330
|
+
if self.h5file.attrs["mode"] not in self.MODE_TO_DTYPE:
|
|
331
331
|
self._logger.error(
|
|
332
332
|
"[FileValidation] unsupported mode '%s' in '%s'",
|
|
333
333
|
attr,
|
|
@@ -359,7 +359,7 @@ class Reader:
|
|
|
359
359
|
self.file_path.name,
|
|
360
360
|
)
|
|
361
361
|
return False
|
|
362
|
-
if self.get_datatype() not in self.
|
|
362
|
+
if self.get_datatype() not in self.MODE_TO_DTYPE[self.get_mode()]:
|
|
363
363
|
self._logger.error(
|
|
364
364
|
"[FileValidation] unsupported type '%s' for mode '%s' in '%s'",
|
|
365
365
|
self.get_datatype(),
|
|
@@ -397,7 +397,7 @@ class Reader:
|
|
|
397
397
|
self.file_path.name,
|
|
398
398
|
)
|
|
399
399
|
# dataset-length should be multiple of buffersize
|
|
400
|
-
remaining_size = ds_volt_size % self.
|
|
400
|
+
remaining_size = ds_volt_size % self.BUFFER_SAMPLES_N
|
|
401
401
|
if remaining_size != 0:
|
|
402
402
|
self._logger.warning(
|
|
403
403
|
"[FileValidation] datasets are not aligned with buffer-size in '%s'",
|
|
@@ -476,7 +476,7 @@ class Reader:
|
|
|
476
476
|
|
|
477
477
|
def _dset_statistics(
|
|
478
478
|
self, dset: h5py.Dataset, cal: Optional[CalibrationPair] = None
|
|
479
|
-
) ->
|
|
479
|
+
) -> dict[str, float]:
|
|
480
480
|
"""Create basic stats for a provided dataset.
|
|
481
481
|
|
|
482
482
|
:param dset: dataset to evaluate
|
|
@@ -509,7 +509,7 @@ class Reader:
|
|
|
509
509
|
if len(stats_list) < 1:
|
|
510
510
|
return {}
|
|
511
511
|
stats_nd = np.stack(stats_list)
|
|
512
|
-
stats:
|
|
512
|
+
stats: dict[str, float] = {
|
|
513
513
|
# TODO: wrong calculation for ndim-datasets with n>1
|
|
514
514
|
"mean": float(stats_nd[:, 0].mean()),
|
|
515
515
|
"min": float(stats_nd[:, 1].min()),
|
|
@@ -519,7 +519,7 @@ class Reader:
|
|
|
519
519
|
}
|
|
520
520
|
return stats
|
|
521
521
|
|
|
522
|
-
def _data_timediffs(self) ->
|
|
522
|
+
def _data_timediffs(self) -> list[float]:
|
|
523
523
|
"""Calculate list of unique time-deltas [s] between buffers.
|
|
524
524
|
|
|
525
525
|
Optimized version that only looks at the start of each buffer.
|
|
@@ -538,14 +538,14 @@ class Reader:
|
|
|
538
538
|
|
|
539
539
|
def calc_timediffs(idx_start: int) -> list:
|
|
540
540
|
ds_time = self.ds_time[
|
|
541
|
-
idx_start : (idx_start + self.max_elements) : self.
|
|
541
|
+
idx_start : (idx_start + self.max_elements) : self.BUFFER_SAMPLES_N
|
|
542
542
|
]
|
|
543
543
|
diffs_np = np.unique(ds_time[1:] - ds_time[0:-1], return_counts=False)
|
|
544
544
|
return list(np.array(diffs_np))
|
|
545
545
|
|
|
546
546
|
diffs_ll = [calc_timediffs(i) for i in job_iter]
|
|
547
547
|
diffs = {
|
|
548
|
-
round(self._cal.time.raw_to_si(j) / self.
|
|
548
|
+
round(self._cal.time.raw_to_si(j) / self.BUFFER_SAMPLES_N, 6)
|
|
549
549
|
for i in diffs_ll
|
|
550
550
|
for j in i
|
|
551
551
|
}
|
|
@@ -563,7 +563,7 @@ class Reader:
|
|
|
563
563
|
self._logger.warning(
|
|
564
564
|
"Time-jumps detected -> expected equal steps, but got: %s s", diffs
|
|
565
565
|
)
|
|
566
|
-
return (len(diffs) <= 1) and diffs[0] == round(0.1 / self.
|
|
566
|
+
return (len(diffs) <= 1) and diffs[0] == round(0.1 / self.BUFFER_SAMPLES_N, 6)
|
|
567
567
|
|
|
568
568
|
def count_errors_in_log(self, group_name: str = "sheep", min_level: int = 40) -> int:
|
|
569
569
|
if group_name not in self.h5file:
|
|
@@ -581,7 +581,7 @@ class Reader:
|
|
|
581
581
|
node: Union[h5py.Dataset, h5py.Group, None] = None,
|
|
582
582
|
*,
|
|
583
583
|
minimal: bool = False,
|
|
584
|
-
) ->
|
|
584
|
+
) -> dict[str, dict]:
|
|
585
585
|
"""Recursive FN to capture the structure of the file.
|
|
586
586
|
|
|
587
587
|
:param node: starting node, leave free to go through whole file
|
|
@@ -592,7 +592,7 @@ class Reader:
|
|
|
592
592
|
self._refresh_file_stats()
|
|
593
593
|
return self.get_metadata(self.h5file, minimal=minimal)
|
|
594
594
|
|
|
595
|
-
metadata:
|
|
595
|
+
metadata: dict[str, dict] = {}
|
|
596
596
|
if isinstance(node, h5py.Dataset) and not minimal:
|
|
597
597
|
metadata["_dataset_info"] = {
|
|
598
598
|
"datatype": str(node.dtype),
|
|
@@ -614,7 +614,7 @@ class Reader:
|
|
|
614
614
|
with contextlib.suppress(yaml.YAMLError):
|
|
615
615
|
attr_value = yaml.safe_load(attr_value)
|
|
616
616
|
elif "int" in str(type(attr_value)):
|
|
617
|
-
# TODO: why not isinstance? can it be
|
|
617
|
+
# TODO: why not isinstance? can it be list[int] other complex type?
|
|
618
618
|
attr_value = int(attr_value)
|
|
619
619
|
else:
|
|
620
620
|
attr_value = float(attr_value)
|
|
@@ -17,11 +17,12 @@ TODO: Comfort functions missing
|
|
|
17
17
|
|
|
18
18
|
from abc import ABC
|
|
19
19
|
from abc import abstractmethod
|
|
20
|
-
from typing import
|
|
20
|
+
from typing import Any
|
|
21
21
|
from typing import Optional
|
|
22
22
|
|
|
23
|
-
from
|
|
24
|
-
from
|
|
23
|
+
from shepherd_core.data_models.base.shepherd import ShpModel
|
|
24
|
+
from shepherd_core.data_models.base.wrapper import Wrapper
|
|
25
|
+
|
|
25
26
|
from .fixtures import Fixtures
|
|
26
27
|
|
|
27
28
|
|
|
@@ -40,11 +41,11 @@ class AbcClient(ABC):
|
|
|
40
41
|
"""
|
|
41
42
|
|
|
42
43
|
@abstractmethod
|
|
43
|
-
def query_ids(self, model_type: str) ->
|
|
44
|
+
def query_ids(self, model_type: str) -> list[int]:
|
|
44
45
|
pass
|
|
45
46
|
|
|
46
47
|
@abstractmethod
|
|
47
|
-
def query_names(self, model_type: str) ->
|
|
48
|
+
def query_names(self, model_type: str) -> list[str]:
|
|
48
49
|
pass
|
|
49
50
|
|
|
50
51
|
@abstractmethod
|
|
@@ -54,11 +55,15 @@ class AbcClient(ABC):
|
|
|
54
55
|
pass
|
|
55
56
|
|
|
56
57
|
@abstractmethod
|
|
57
|
-
def try_inheritance(
|
|
58
|
+
def try_inheritance(
|
|
59
|
+
self, model_type: str, values: dict[str, Any]
|
|
60
|
+
) -> tuple[dict[str, Any], list[str]]:
|
|
58
61
|
# TODO: maybe internal? yes
|
|
59
62
|
pass
|
|
60
63
|
|
|
61
|
-
def try_completing_model(
|
|
64
|
+
def try_completing_model(
|
|
65
|
+
self, model_type: str, values: dict[str, Any]
|
|
66
|
+
) -> tuple[dict[str, Any], list[str]]:
|
|
62
67
|
"""Init by name/id, for none existing instances raise Exception.
|
|
63
68
|
|
|
64
69
|
This is the main entry-point for querying a model (used be the core-lib).
|
|
@@ -73,7 +78,7 @@ class AbcClient(ABC):
|
|
|
73
78
|
return self.try_inheritance(model_type, values)
|
|
74
79
|
|
|
75
80
|
@abstractmethod
|
|
76
|
-
def fill_in_user_data(self, values: dict) -> dict:
|
|
81
|
+
def fill_in_user_data(self, values: dict[str, Any]) -> dict[str, Any]:
|
|
77
82
|
# TODO: is it really helpful and needed?
|
|
78
83
|
pass
|
|
79
84
|
|
|
@@ -83,7 +88,7 @@ class FixturesClient(AbcClient):
|
|
|
83
88
|
|
|
84
89
|
def __init__(self) -> None:
|
|
85
90
|
super().__init__()
|
|
86
|
-
self._fixtures:
|
|
91
|
+
self._fixtures: Fixtures = Fixtures()
|
|
87
92
|
|
|
88
93
|
def insert(self, data: ShpModel) -> bool:
|
|
89
94
|
wrap = Wrapper(
|
|
@@ -93,10 +98,10 @@ class FixturesClient(AbcClient):
|
|
|
93
98
|
self._fixtures.insert_model(wrap)
|
|
94
99
|
return True
|
|
95
100
|
|
|
96
|
-
def query_ids(self, model_type: str) ->
|
|
101
|
+
def query_ids(self, model_type: str) -> list[int]:
|
|
97
102
|
return list(self._fixtures[model_type].elements_by_id.keys())
|
|
98
103
|
|
|
99
|
-
def query_names(self, model_type: str) ->
|
|
104
|
+
def query_names(self, model_type: str) -> list[str]:
|
|
100
105
|
return list(self._fixtures[model_type].elements_by_name.keys())
|
|
101
106
|
|
|
102
107
|
def query_item(
|
|
@@ -108,10 +113,12 @@ class FixturesClient(AbcClient):
|
|
|
108
113
|
return self._fixtures[model_type].query_name(name)
|
|
109
114
|
raise ValueError("Query needs either uid or name of object")
|
|
110
115
|
|
|
111
|
-
def try_inheritance(
|
|
116
|
+
def try_inheritance(
|
|
117
|
+
self, model_type: str, values: dict[str, Any]
|
|
118
|
+
) -> tuple[dict[str, Any], list[str]]:
|
|
112
119
|
return self._fixtures[model_type].inheritance(values)
|
|
113
120
|
|
|
114
|
-
def fill_in_user_data(self, values: dict) -> dict:
|
|
121
|
+
def fill_in_user_data(self, values: dict[str, Any]) -> dict[str, Any]:
|
|
115
122
|
"""Add fake user-data when offline-client is used.
|
|
116
123
|
|
|
117
124
|
Hotfix until WebClient is working.
|
|
@@ -2,15 +2,16 @@
|
|
|
2
2
|
|
|
3
3
|
from importlib import import_module
|
|
4
4
|
from pathlib import Path
|
|
5
|
-
from typing import
|
|
5
|
+
from typing import Any
|
|
6
6
|
from typing import Optional
|
|
7
7
|
from typing import Union
|
|
8
8
|
|
|
9
9
|
from pydantic import validate_call
|
|
10
10
|
|
|
11
|
-
from
|
|
12
|
-
from
|
|
13
|
-
from
|
|
11
|
+
from shepherd_core.commons import TESTBED_SERVER_URI
|
|
12
|
+
from shepherd_core.data_models.base.shepherd import ShpModel
|
|
13
|
+
from shepherd_core.data_models.base.wrapper import Wrapper
|
|
14
|
+
|
|
14
15
|
from .client_abc_fix import AbcClient
|
|
15
16
|
from .user_model import User
|
|
16
17
|
|
|
@@ -37,7 +38,7 @@ class WebClient(AbcClient):
|
|
|
37
38
|
if not hasattr(self, "_token"):
|
|
38
39
|
# add default values
|
|
39
40
|
self._token: str = "basic_public_access" # noqa: S105
|
|
40
|
-
self._server: str =
|
|
41
|
+
self._server: str = TESTBED_SERVER_URI
|
|
41
42
|
self._user: Optional[User] = None
|
|
42
43
|
self._key: Optional[str] = None
|
|
43
44
|
self._connected: bool = False
|
|
@@ -49,6 +50,8 @@ class WebClient(AbcClient):
|
|
|
49
50
|
# ABC Functions below
|
|
50
51
|
|
|
51
52
|
def insert(self, data: ShpModel) -> bool:
|
|
53
|
+
if self._req is None:
|
|
54
|
+
return False
|
|
52
55
|
wrap = Wrapper(
|
|
53
56
|
datatype=type(data).__name__,
|
|
54
57
|
parameters=data.model_dump(),
|
|
@@ -57,10 +60,10 @@ class WebClient(AbcClient):
|
|
|
57
60
|
r.raise_for_status()
|
|
58
61
|
return True
|
|
59
62
|
|
|
60
|
-
def query_ids(self, model_type: str) ->
|
|
63
|
+
def query_ids(self, model_type: str) -> list[int]:
|
|
61
64
|
raise NotImplementedError("TODO")
|
|
62
65
|
|
|
63
|
-
def query_names(self, model_type: str) ->
|
|
66
|
+
def query_names(self, model_type: str) -> list[str]:
|
|
64
67
|
raise NotImplementedError("TODO")
|
|
65
68
|
|
|
66
69
|
def query_item(
|
|
@@ -68,10 +71,14 @@ class WebClient(AbcClient):
|
|
|
68
71
|
) -> dict:
|
|
69
72
|
raise NotImplementedError("TODO")
|
|
70
73
|
|
|
71
|
-
def try_inheritance(
|
|
74
|
+
def try_inheritance(
|
|
75
|
+
self, model_type: str, values: dict[str, Any]
|
|
76
|
+
) -> tuple[dict[str, Any], list[str]]:
|
|
72
77
|
raise NotImplementedError("TODO")
|
|
73
78
|
|
|
74
|
-
def fill_in_user_data(self, values: dict) -> dict:
|
|
79
|
+
def fill_in_user_data(self, values: dict[str, Any]) -> dict[str, Any]:
|
|
80
|
+
if self._user is None:
|
|
81
|
+
return values
|
|
75
82
|
if values.get("owner") is None:
|
|
76
83
|
values["owner"] = self._user.name
|
|
77
84
|
if values.get("group") is None:
|
|
@@ -105,7 +112,7 @@ class WebClient(AbcClient):
|
|
|
105
112
|
return self._query_user_data()
|
|
106
113
|
|
|
107
114
|
def _query_session_key(self) -> bool:
|
|
108
|
-
if self._server:
|
|
115
|
+
if self._server and self._req is not None:
|
|
109
116
|
r = self._req.get(self._server + "/session_key", timeout=2)
|
|
110
117
|
r.raise_for_status()
|
|
111
118
|
self._key = r.json()["value"] # TODO: not finished
|
|
@@ -113,7 +120,7 @@ class WebClient(AbcClient):
|
|
|
113
120
|
return False
|
|
114
121
|
|
|
115
122
|
def _query_user_data(self) -> bool:
|
|
116
|
-
if self._server:
|
|
123
|
+
if self._server and self._req is not None:
|
|
117
124
|
r = self._req.get(self._server + "/user?token=" + self._token, timeout=2)
|
|
118
125
|
# TODO: possibly a security nightmare (send via json or encrypted via public key?)
|
|
119
126
|
r.raise_for_status()
|