pyxcp 0.22.33__cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pyxcp might be problematic. Click here for more details.

Files changed (131) hide show
  1. pyxcp/__init__.py +20 -0
  2. pyxcp/aml/EtasCANMonitoring.a2l +82 -0
  3. pyxcp/aml/EtasCANMonitoring.aml +67 -0
  4. pyxcp/aml/XCP_Common.aml +408 -0
  5. pyxcp/aml/XCPonCAN.aml +78 -0
  6. pyxcp/aml/XCPonEth.aml +33 -0
  7. pyxcp/aml/XCPonFlx.aml +113 -0
  8. pyxcp/aml/XCPonSxI.aml +66 -0
  9. pyxcp/aml/XCPonUSB.aml +106 -0
  10. pyxcp/aml/ifdata_CAN.a2l +20 -0
  11. pyxcp/aml/ifdata_Eth.a2l +11 -0
  12. pyxcp/aml/ifdata_Flx.a2l +94 -0
  13. pyxcp/aml/ifdata_SxI.a2l +13 -0
  14. pyxcp/aml/ifdata_USB.a2l +81 -0
  15. pyxcp/asam/__init__.py +0 -0
  16. pyxcp/asam/types.py +131 -0
  17. pyxcp/asamkeydll.c +116 -0
  18. pyxcp/asamkeydll.sh +2 -0
  19. pyxcp/checksum.py +732 -0
  20. pyxcp/cmdline.py +52 -0
  21. pyxcp/config/__init__.py +1102 -0
  22. pyxcp/config/legacy.py +120 -0
  23. pyxcp/constants.py +47 -0
  24. pyxcp/cpp_ext/__init__.py +0 -0
  25. pyxcp/cpp_ext/bin.hpp +104 -0
  26. pyxcp/cpp_ext/blockmem.hpp +58 -0
  27. pyxcp/cpp_ext/cpp_ext.cpython-310-aarch64-linux-gnu.so +0 -0
  28. pyxcp/cpp_ext/cpp_ext.cpython-311-aarch64-linux-gnu.so +0 -0
  29. pyxcp/cpp_ext/cpp_ext.cpython-312-aarch64-linux-gnu.so +0 -0
  30. pyxcp/cpp_ext/daqlist.hpp +206 -0
  31. pyxcp/cpp_ext/event.hpp +67 -0
  32. pyxcp/cpp_ext/extension_wrapper.cpp +100 -0
  33. pyxcp/cpp_ext/helper.hpp +280 -0
  34. pyxcp/cpp_ext/mcobject.hpp +246 -0
  35. pyxcp/cpp_ext/tsqueue.hpp +46 -0
  36. pyxcp/daq_stim/__init__.py +232 -0
  37. pyxcp/daq_stim/optimize/__init__.py +67 -0
  38. pyxcp/daq_stim/optimize/binpacking.py +41 -0
  39. pyxcp/daq_stim/scheduler.cpp +28 -0
  40. pyxcp/daq_stim/scheduler.hpp +75 -0
  41. pyxcp/daq_stim/stim.cpp +13 -0
  42. pyxcp/daq_stim/stim.cpython-310-aarch64-linux-gnu.so +0 -0
  43. pyxcp/daq_stim/stim.cpython-311-aarch64-linux-gnu.so +0 -0
  44. pyxcp/daq_stim/stim.cpython-312-aarch64-linux-gnu.so +0 -0
  45. pyxcp/daq_stim/stim.hpp +604 -0
  46. pyxcp/daq_stim/stim_wrapper.cpp +50 -0
  47. pyxcp/dllif.py +95 -0
  48. pyxcp/errormatrix.py +878 -0
  49. pyxcp/examples/conf_can.toml +19 -0
  50. pyxcp/examples/conf_can_user.toml +16 -0
  51. pyxcp/examples/conf_can_vector.json +11 -0
  52. pyxcp/examples/conf_can_vector.toml +11 -0
  53. pyxcp/examples/conf_eth.toml +9 -0
  54. pyxcp/examples/conf_nixnet.json +20 -0
  55. pyxcp/examples/conf_socket_can.toml +12 -0
  56. pyxcp/examples/conf_sxi.json +9 -0
  57. pyxcp/examples/conf_sxi.toml +7 -0
  58. pyxcp/examples/run_daq.py +163 -0
  59. pyxcp/examples/xcp_policy.py +60 -0
  60. pyxcp/examples/xcp_read_benchmark.py +38 -0
  61. pyxcp/examples/xcp_skel.py +49 -0
  62. pyxcp/examples/xcp_unlock.py +38 -0
  63. pyxcp/examples/xcp_user_supplied_driver.py +54 -0
  64. pyxcp/examples/xcphello.py +79 -0
  65. pyxcp/examples/xcphello_recorder.py +107 -0
  66. pyxcp/master/__init__.py +9 -0
  67. pyxcp/master/errorhandler.py +442 -0
  68. pyxcp/master/master.py +2046 -0
  69. pyxcp/py.typed +0 -0
  70. pyxcp/recorder/__init__.py +101 -0
  71. pyxcp/recorder/build_clang.cmd +1 -0
  72. pyxcp/recorder/build_clang.sh +2 -0
  73. pyxcp/recorder/build_gcc.cmd +1 -0
  74. pyxcp/recorder/build_gcc.sh +2 -0
  75. pyxcp/recorder/build_gcc_arm.sh +2 -0
  76. pyxcp/recorder/converter/__init__.py +451 -0
  77. pyxcp/recorder/lz4.c +2829 -0
  78. pyxcp/recorder/lz4.h +879 -0
  79. pyxcp/recorder/lz4hc.c +2041 -0
  80. pyxcp/recorder/lz4hc.h +413 -0
  81. pyxcp/recorder/mio.hpp +1714 -0
  82. pyxcp/recorder/reader.hpp +139 -0
  83. pyxcp/recorder/reco.py +277 -0
  84. pyxcp/recorder/recorder.rst +0 -0
  85. pyxcp/recorder/rekorder.cpp +59 -0
  86. pyxcp/recorder/rekorder.cpython-310-aarch64-linux-gnu.so +0 -0
  87. pyxcp/recorder/rekorder.cpython-311-aarch64-linux-gnu.so +0 -0
  88. pyxcp/recorder/rekorder.cpython-312-aarch64-linux-gnu.so +0 -0
  89. pyxcp/recorder/rekorder.hpp +274 -0
  90. pyxcp/recorder/setup.py +41 -0
  91. pyxcp/recorder/test_reko.py +34 -0
  92. pyxcp/recorder/unfolder.hpp +1332 -0
  93. pyxcp/recorder/wrap.cpp +189 -0
  94. pyxcp/recorder/writer.hpp +302 -0
  95. pyxcp/scripts/__init__.py +0 -0
  96. pyxcp/scripts/pyxcp_probe_can_drivers.py +20 -0
  97. pyxcp/scripts/xcp_examples.py +64 -0
  98. pyxcp/scripts/xcp_fetch_a2l.py +40 -0
  99. pyxcp/scripts/xcp_id_scanner.py +19 -0
  100. pyxcp/scripts/xcp_info.py +146 -0
  101. pyxcp/scripts/xcp_profile.py +27 -0
  102. pyxcp/scripts/xmraw_converter.py +31 -0
  103. pyxcp/stim/__init__.py +0 -0
  104. pyxcp/tests/test_asam_types.py +24 -0
  105. pyxcp/tests/test_binpacking.py +186 -0
  106. pyxcp/tests/test_can.py +1324 -0
  107. pyxcp/tests/test_checksum.py +95 -0
  108. pyxcp/tests/test_daq.py +193 -0
  109. pyxcp/tests/test_daq_opt.py +426 -0
  110. pyxcp/tests/test_frame_padding.py +156 -0
  111. pyxcp/tests/test_master.py +2006 -0
  112. pyxcp/tests/test_transport.py +81 -0
  113. pyxcp/tests/test_utils.py +30 -0
  114. pyxcp/timing.py +60 -0
  115. pyxcp/transport/__init__.py +10 -0
  116. pyxcp/transport/base.py +440 -0
  117. pyxcp/transport/base_transport.hpp +0 -0
  118. pyxcp/transport/can.py +441 -0
  119. pyxcp/transport/eth.py +219 -0
  120. pyxcp/transport/sxi.py +135 -0
  121. pyxcp/transport/transport_wrapper.cpp +0 -0
  122. pyxcp/transport/usb_transport.py +213 -0
  123. pyxcp/types.py +1000 -0
  124. pyxcp/utils.py +127 -0
  125. pyxcp/vector/__init__.py +0 -0
  126. pyxcp/vector/map.py +82 -0
  127. pyxcp-0.22.33.dist-info/LICENSE +165 -0
  128. pyxcp-0.22.33.dist-info/METADATA +107 -0
  129. pyxcp-0.22.33.dist-info/RECORD +131 -0
  130. pyxcp-0.22.33.dist-info/WHEEL +6 -0
  131. pyxcp-0.22.33.dist-info/entry_points.txt +9 -0
pyxcp/py.typed ADDED
File without changes
@@ -0,0 +1,101 @@
1
+ #!/usr/bin/env python
2
+ """XCP Frame Recording Facility.
3
+ """
4
+
5
+ from dataclasses import dataclass
6
+ from typing import Union
7
+
8
+ from pyxcp.types import FrameCategory
9
+
10
+
11
+ try:
12
+ import pandas as pd
13
+ except ImportError:
14
+ HAS_PANDAS = False
15
+ else:
16
+ HAS_PANDAS = True
17
+
18
+ from pyxcp.recorder.rekorder import DaqOnlinePolicy # noqa: F401
19
+ from pyxcp.recorder.rekorder import (
20
+ DaqRecorderPolicy, # noqa: F401
21
+ Deserializer, # noqa: F401
22
+ MeasurementParameters, # noqa: F401
23
+ ValueHolder, # noqa: F401
24
+ )
25
+ from pyxcp.recorder.rekorder import XcpLogFileDecoder as _XcpLogFileDecoder
26
+ from pyxcp.recorder.rekorder import _PyXcpLogFileReader, _PyXcpLogFileWriter, data_types
27
+
28
+
29
+ DATA_TYPES = data_types()
30
+
31
+
32
+ @dataclass
33
+ class XcpLogFileHeader:
34
+ """ """
35
+
36
+ version: int
37
+ options: int
38
+ num_containers: int
39
+ record_count: int
40
+ size_uncompressed: int
41
+ size_compressed: int
42
+ compression_ratio: float
43
+
44
+
45
+ COUNTER_MAX = 0xFFFF
46
+
47
+
48
+ class XcpLogFileReader:
49
+ """ """
50
+
51
+ def __init__(self, file_name):
52
+ self._reader = _PyXcpLogFileReader(file_name)
53
+
54
+ @property
55
+ def header(self):
56
+ return self._reader.get_header()
57
+
58
+ def get_header(self):
59
+ return XcpLogFileHeader(*self._reader.get_header_as_tuple())
60
+
61
+ def get_metadata(self):
62
+ return self._reader.get_metadata()
63
+
64
+ def __iter__(self):
65
+ while True:
66
+ frames = self._reader.next_block()
67
+ if frames is None:
68
+ break
69
+ for category, counter, timestamp, _, payload in frames:
70
+ yield (FrameCategory(category), counter, timestamp, payload)
71
+
72
+ def reset_iter(self):
73
+ self._reader.reset()
74
+
75
+ def as_dataframe(self):
76
+ if HAS_PANDAS:
77
+ df = pd.DataFrame((f for f in self), columns=["category", "counter", "timestamp", "payload"])
78
+ df = df.set_index("timestamp")
79
+ df.counter = df.counter.astype("uint16")
80
+ df.category = df.category.map({v: k for k, v in FrameCategory.__members__.items()}).astype("category")
81
+ return df
82
+ else:
83
+ raise NotImplementedError("method as_dataframe() requires 'pandas' package")
84
+
85
+
86
+ class XcpLogFileWriter:
87
+ """ """
88
+
89
+ def __init__(self, file_name: str, prealloc=500, chunk_size=1):
90
+ self._writer = _PyXcpLogFileWriter(file_name, prealloc, chunk_size)
91
+ self._finalized = False
92
+
93
+ def __del__(self):
94
+ if not self._finalized:
95
+ self.finalize()
96
+
97
+ def add_frame(self, category: FrameCategory, counter: int, timestamp: float, payload: Union[bytes, bytearray]):
98
+ self._writer.add_frame(category, counter % (COUNTER_MAX + 1), timestamp, len(payload), payload)
99
+
100
+ def finalize(self):
101
+ self._writer.finalize()
@@ -0,0 +1 @@
1
+ clang++ -std=c++20 -O0 -fvectorize -fexceptions -Rpass=loop-vectorize -ggdb -Wall -Wextra -Weffc++ -DLZ4_DEBUG=1 -DSTANDALONE_REKORDER=1 lz4.cpp rekorder.cpp -o rekorder.exe
@@ -0,0 +1,2 @@
1
+ #!/bin/bash
2
+ clang++ -std=c++20 -O0 -fvectorize -Rpass=loop-vectorize -ggdb -Wall -Wextra -Weffc++ -lpthread -DLZ4_DEBUG=1 -DSTANDALONE_REKORDER=1 lz4.cpp rekorder.cpp -o rekorder
@@ -0,0 +1 @@
1
+ g++ -std=c++20 -O0 -ggdb -march=native -fexceptions -ffast-math -ftree-vectorize -msse4 -mfpmath=sse -pg -fopt-info-vec-all -Wall -Wextra -Weffc++ -fcoroutines -DLZ4_DEBUG=1 -DSTANDALONE_REKORDER=1 lz4.cpp rekorder.cpp -o rekorder.exe
@@ -0,0 +1,2 @@
1
+ #!/bin/sh
2
+ g++ -std=c++20 -O0 -ffast-math -ftree-vectorize -ftree-vectorizer-verbose=9 -fcoroutines -ggdb -Wall -Wextra -Weffc++ -DLZ4_DEBUG=1 -DSTANDALONE_REKORDER=1 lz4.cpp rekorder.cpp -o rekorder -lpthread
@@ -0,0 +1,2 @@
1
+ #!/bin/bash
2
+ g++ -std=c++20 -O3 -mcpu=cortex-a7 -mfpu=neon-vfpv4 -mfloat-abi=hard -ffast-math -ftree-vectorize -ftree-vectorizer-verbose=9 -ggdb -Wall -Wextra -Weffc++ -DLZ4_DEBUG=1 -DSTANDALONE_REKORDER=1 lz4.cpp rekorder.cpp -o rekorder
@@ -0,0 +1,451 @@
1
+ """Convert pyXCPs .xmraw files to common data formats.
2
+ """
3
+
4
+ import csv
5
+ import logging
6
+ import os
7
+ import sqlite3
8
+ from array import array
9
+ from dataclasses import dataclass, field
10
+ from mmap import PAGESIZE
11
+ from pathlib import Path
12
+ from typing import Any, List
13
+
14
+ import numpy as np
15
+ from rich.logging import RichHandler
16
+
17
+
18
+ try:
19
+ import pyarrow as pa
20
+ import pyarrow.parquet as pq
21
+
22
+ has_arrow = True
23
+ except ImportError:
24
+ has_arrow = False
25
+
26
+ try:
27
+ import h5py
28
+
29
+ has_h5py = True
30
+ except ImportError:
31
+ has_h5py = False
32
+
33
+ try:
34
+ from asammdf import MDF, Signal
35
+ from asammdf.blocks.v4_blocks import HeaderBlock
36
+ from asammdf.blocks.v4_constants import FLAG_HD_TIME_OFFSET_VALID
37
+
38
+ has_asammdf = True
39
+ except ImportError:
40
+ has_asammdf = False
41
+
42
+ try:
43
+ import xlsxwriter
44
+
45
+ has_xlsxwriter = True
46
+
47
+ except ImportError:
48
+ has_xlsxwriter = False
49
+
50
+ from pyxcp import console
51
+ from pyxcp.recorder.rekorder import XcpLogFileDecoder as _XcpLogFileDecoder
52
+
53
+
54
+ FORMAT = "%(message)s"
55
+ logging.basicConfig(level="NOTSET", format=FORMAT, datefmt="[%X]", handlers=[RichHandler()])
56
+
57
+ log = logging.getLogger("rich")
58
+
59
+ MAP_TO_ARRAY = {
60
+ "U8": "B",
61
+ "I8": "b",
62
+ "U16": "H",
63
+ "I16": "h",
64
+ "U32": "L",
65
+ "I32": "l",
66
+ "U64": "Q",
67
+ "I64": "q",
68
+ "F32": "f",
69
+ "F64": "d",
70
+ "F16": "f",
71
+ "BF16": "f",
72
+ }
73
+
74
+ MAP_TO_NP = {
75
+ "U8": np.uint8,
76
+ "I8": np.int8,
77
+ "U16": np.uint16,
78
+ "I16": np.int16,
79
+ "U32": np.uint32,
80
+ "I32": np.int32,
81
+ "U64": np.uint64,
82
+ "I64": np.int64,
83
+ "F32": np.float32,
84
+ "F64": np.float64,
85
+ "F16": np.float16,
86
+ "BF16": np.float16,
87
+ }
88
+
89
+
90
+ @dataclass
91
+ class Storage:
92
+ name: str
93
+ target_type: Any
94
+ arr: array
95
+
96
+
97
+ @dataclass
98
+ class StorageContainer:
99
+ name: str
100
+ arr: List[Storage] = field(default_factory=[])
101
+ timestamp0: List[int] = field(default_factory=lambda: array("Q"))
102
+ timestamp1: List[int] = field(default_factory=lambda: array("Q"))
103
+
104
+
105
+ class XcpLogFileDecoder(_XcpLogFileDecoder):
106
+ """"""
107
+
108
+ def __init__(
109
+ self,
110
+ recording_file_name: str,
111
+ out_file_suffix: str,
112
+ remove_file: bool = True,
113
+ target_type_map: dict = None,
114
+ target_file_name: str = "",
115
+ ):
116
+ super().__init__(recording_file_name)
117
+ self.logger = logging.getLogger("PyXCP")
118
+ self.logger.setLevel(logging.DEBUG)
119
+ self.out_file_name = Path(recording_file_name).with_suffix(out_file_suffix)
120
+ self.out_file_suffix = out_file_suffix
121
+ self.target_type_map = target_type_map or {}
122
+ if remove_file:
123
+ try:
124
+ os.unlink(self.out_file_name)
125
+ except FileNotFoundError:
126
+ pass
127
+
128
+ def initialize(self) -> None:
129
+ self.on_initialize()
130
+
131
+ def on_initialize(self) -> None:
132
+ self.setup_containers()
133
+
134
+ def finalize(self) -> None:
135
+ self.on_finalize()
136
+
137
+ def on_finalize(self) -> None:
138
+ pass
139
+
140
+ def setup_containers(self) -> None:
141
+ self.tables = []
142
+ for dl in self.daq_lists:
143
+ result = []
144
+ for name, type_str in dl.headers:
145
+ array_txpe = MAP_TO_ARRAY[type_str]
146
+ target_type = self.target_type_map.get(type_str)
147
+ sd = Storage(name, target_type, array(array_txpe))
148
+ result.append(sd)
149
+ sc = StorageContainer(dl.name, result)
150
+ self.tables.append(sc)
151
+ self.on_container(sc)
152
+
153
+ def on_container(self, sc: StorageContainer) -> None:
154
+ pass
155
+
156
+
157
+ class CollectRows:
158
+
159
+ def on_daq_list(self, daq_list_num: int, timestamp0: int, timestamp1: int, measurements: list) -> None:
160
+ storage_container = self.tables[daq_list_num]
161
+ storage_container.timestamp0.append(timestamp0)
162
+ storage_container.timestamp1.append(timestamp1)
163
+ for idx, elem in enumerate(measurements):
164
+ storage = storage_container.arr[idx]
165
+ storage.arr.append(elem)
166
+
167
+
168
+ class ArrowConverter(CollectRows, XcpLogFileDecoder):
169
+ """"""
170
+
171
+ MAP_TO_ARROW = {
172
+ "U8": pa.uint8(),
173
+ "I8": pa.int8(),
174
+ "U16": pa.uint16(),
175
+ "I16": pa.int16(),
176
+ "U32": pa.uint32(),
177
+ "I32": pa.int32(),
178
+ "U64": pa.uint64(),
179
+ "I64": pa.int64(),
180
+ "F32": pa.float32(),
181
+ "F64": pa.float64(),
182
+ "F16": pa.float16(),
183
+ "BF16": pa.float16(),
184
+ }
185
+
186
+ def __init__(self, recording_file_name: str, target_file_name: str = ""):
187
+ super().__init__(
188
+ recording_file_name=recording_file_name,
189
+ out_file_suffix=".parquet",
190
+ remove_file=False,
191
+ target_type_map=self.MAP_TO_ARROW,
192
+ target_file_name=target_file_name,
193
+ )
194
+
195
+ def on_initialize(self) -> None:
196
+ super().on_initialize()
197
+
198
+ def on_finalize(self) -> None:
199
+ result = []
200
+ for arr in self.tables:
201
+ timestamp0 = arr.timestamp0
202
+ timestamp1 = arr.timestamp1
203
+ names = ["timestamp0", "timestamp1"]
204
+ data = [timestamp0, timestamp1]
205
+ for sd in arr.arr:
206
+ adt = pa.array(sd.arr, type=sd.target_type)
207
+ names.append(sd.name)
208
+ data.append(adt)
209
+ table = pa.Table.from_arrays(data, names=names)
210
+ fname = f"{arr.name}{self.out_file_suffix}"
211
+ self.logger.info(f"Writing file {fname!r}")
212
+ pq.write_table(table, fname)
213
+ result.append(table)
214
+ return result
215
+
216
+
217
+ class CsvConverter(XcpLogFileDecoder):
218
+
219
+ def __init__(self, recording_file_name: str, target_file_name: str = ""):
220
+ super().__init__(
221
+ recording_file_name=recording_file_name, out_file_suffix=".csv", remove_file=False, target_file_name=target_file_name
222
+ )
223
+
224
+ def on_initialize(self) -> None:
225
+ self.csv_writers = []
226
+ super().on_initialize()
227
+
228
+ def on_container(self, sc: StorageContainer) -> None:
229
+ fname = f"{sc.name}{self.out_file_suffix}"
230
+ self.logger.info(f"Creating file {fname!r}.")
231
+ writer = csv.writer(open(fname, "w", newline=""), dialect="excel")
232
+ headers = ["timestamp0", "timestamp1"] + [e.name for e in sc.arr]
233
+ writer.writerow(headers)
234
+ self.csv_writers.append(writer)
235
+
236
+ def on_finalize(self) -> None:
237
+ self.logger.info("Done.")
238
+
239
+ def on_daq_list(self, daq_list_num: int, timestamp0: int, timestamp1: int, measurements: list) -> None:
240
+ writer = self.csv_writers[daq_list_num]
241
+ data = [timestamp0, timestamp1, *measurements]
242
+ writer.writerow(data)
243
+
244
+
245
+ class ExcelConverter(XcpLogFileDecoder):
246
+
247
+ def __init__(self, recording_file_name: str, target_file_name: str = ""):
248
+ super().__init__(recording_file_name=recording_file_name, out_file_suffix=".xlsx", target_file_name=target_file_name)
249
+
250
+ def on_initialize(self) -> None:
251
+ self.logger.info(f"Creating file {str(self.out_file_name)!r}.")
252
+ self.xls_workbook = xlsxwriter.Workbook(self.out_file_name)
253
+ self.xls_sheets = []
254
+ self.rows = []
255
+ super().on_initialize()
256
+
257
+ def on_container(self, sc: StorageContainer) -> None:
258
+ sheet = self.xls_workbook.add_worksheet(sc.name)
259
+ self.xls_sheets.append(sheet)
260
+ headers = ["timestamp0", "timestamp1"] + [e.name for e in sc.arr]
261
+ sheet.write_row(0, 0, headers)
262
+ self.rows.append(1)
263
+
264
+ def on_finalize(self) -> None:
265
+ self.xls_workbook.close()
266
+ self.logger.info("Done.")
267
+
268
+ def on_daq_list(self, daq_list_num: int, timestamp0: int, timestamp1: int, measurements: list) -> None:
269
+ sheet = self.xls_sheets[daq_list_num]
270
+ row = self.rows[daq_list_num]
271
+ data = [timestamp0, timestamp1] + measurements
272
+ sheet.write_row(row, 0, data)
273
+ self.rows[daq_list_num] += 1
274
+
275
+
276
+ class HdfConverter(CollectRows, XcpLogFileDecoder):
277
+
278
+ def __init__(self, recording_file_name: str, target_file_name: str = ""):
279
+ super().__init__(recording_file_name=recording_file_name, out_file_suffix=".h5", target_file_name=target_file_name)
280
+
281
+ def on_initialize(self) -> None:
282
+ self.logger.info(f"Creating file {str(self.out_file_name)!r}")
283
+ self.out_file = h5py.File(self.out_file_name, "w")
284
+ super().on_initialize()
285
+
286
+ def on_finalize(self) -> None:
287
+ for arr in self.tables:
288
+ timestamp0 = arr.timestamp0
289
+ timestamp1 = arr.timestamp1
290
+ self.out_file[f"/{arr.name}/timestamp0"] = timestamp0
291
+ self.out_file[f"/{arr.name}/timestamp1"] = timestamp1
292
+ for sd in arr.arr:
293
+ self.out_file[f"/{arr.name}/{sd.name}"] = sd.arr
294
+ self.logger.info(f"Writing table {arr.name!r}")
295
+ self.logger.info("Done.")
296
+ self.out_file.close()
297
+
298
+
299
+ class MdfConverter(CollectRows, XcpLogFileDecoder):
300
+
301
+ def __init__(self, recording_file_name: str, target_file_name: str = ""):
302
+ super().__init__(
303
+ recording_file_name=recording_file_name,
304
+ out_file_suffix=".mf4",
305
+ target_type_map=MAP_TO_NP,
306
+ target_file_name=target_file_name,
307
+ )
308
+
309
+ def on_initialize(self) -> None:
310
+ super().on_initialize()
311
+
312
+ def on_finalize(self) -> None:
313
+ timestamp_info = self.parameters.timestamp_info
314
+ hdr = HeaderBlock(
315
+ abs_time=timestamp_info.timestamp_ns,
316
+ tz_offset=timestamp_info.utc_offset,
317
+ daylight_save_time=timestamp_info.dst_offset,
318
+ time_flags=FLAG_HD_TIME_OFFSET_VALID,
319
+ )
320
+ hdr.comment = f"""<HDcomment><TX>Timezone: {timestamp_info.timezone}</TX></HDcomment>""" # Test-Comment.
321
+ mdf4 = MDF(version="4.10")
322
+ mdf4.header = hdr
323
+ for idx, arr in enumerate(self.tables):
324
+ signals = []
325
+ timestamps = arr.timestamp0
326
+ for sd in arr.arr:
327
+ signal = Signal(samples=sd.arr, name=sd.name, timestamps=timestamps)
328
+ signals.append(signal)
329
+ self.logger.info(f"Appending data-group {arr.name!r}")
330
+ mdf4.append(signals, acq_name=arr.name, comment="Created by pyXCP recorder")
331
+ self.logger.info(f"Writing {str(self.out_file_name)!r}")
332
+ mdf4.save(self.out_file_name, compression=2, overwrite=True)
333
+ self.logger.info("Done.")
334
+
335
+
336
+ class SqliteConverter(XcpLogFileDecoder):
337
+ """ """
338
+
339
+ MAP_TO_SQL = {
340
+ "U8": "INTEGER",
341
+ "I8": "INTEGER",
342
+ "U16": "INTEGER",
343
+ "I16": "INTEGER",
344
+ "U32": "INTEGER",
345
+ "I32": "INTEGER",
346
+ "U64": "INTEGER",
347
+ "I64": "INTEGER",
348
+ "F32": "FLOAT",
349
+ "F64": "FLOAT",
350
+ "F16": "FLOAT",
351
+ "BF16": "FLOAT",
352
+ }
353
+
354
+ def __init__(self, recording_file_name: str, target_file_name: str = ""):
355
+ super().__init__(
356
+ recording_file_name=recording_file_name,
357
+ out_file_suffix=".sq3",
358
+ target_type_map=self.MAP_TO_SQL,
359
+ target_file_name=target_file_name,
360
+ )
361
+
362
+ def on_initialize(self) -> None:
363
+ self.logger.info(f"Creating database {str(self.out_file_name)!r}.")
364
+ self.create_database(self.out_file_name)
365
+ self.insert_stmt = {}
366
+ super().on_initialize()
367
+
368
+ def on_container(self, sc: StorageContainer) -> None:
369
+ self.create_table(sc)
370
+ self.logger.info(f"Creating table {sc.name!r}.")
371
+ self.insert_stmt[sc.name] = (
372
+ f"""INSERT INTO {sc.name}({', '.join(['timestamp0', 'timestamp1'] + [r.name for r in sc.arr])}) VALUES({', '.join(["?" for _ in range(len(sc.arr) + 2)])})"""
373
+ )
374
+
375
+ def on_finalize(self) -> None:
376
+ self.conn.commit()
377
+ self.conn.close()
378
+ print("Done.")
379
+
380
+ def on_daq_list(self, daq_list_num: int, timestamp0: int, timestamp1: int, measurements: list) -> None:
381
+ sc = self.tables[daq_list_num]
382
+ insert_stmt = self.insert_stmt[sc.name]
383
+ data = [timestamp0, timestamp1, *measurements]
384
+ self.execute(insert_stmt, data)
385
+
386
+ def create_database(self, db_name: str) -> None:
387
+ self.conn = sqlite3.Connection(db_name)
388
+ self.cursor = self.conn.cursor()
389
+ self.execute("PRAGMA FOREIGN_KEYS=ON")
390
+ self.execute(f"PRAGMA PAGE_SIZE={PAGESIZE}")
391
+ self.execute("PRAGMA SYNCHRONOUS=OFF")
392
+ self.execute("PRAGMA LOCKING_MODE=EXCLUSIVE")
393
+ self.execute("PRAGMA TEMP_STORE=MEMORY")
394
+
395
+ timestamp_info = self.parameters.timestamp_info
396
+ self.execute(
397
+ "CREATE TABLE timestamp_info(timestamp_ns INTEGER, utc_offset INTEGER, dst_offset INTEGER, timezone VARCHAR(255))"
398
+ )
399
+ self.execute("CREATE TABLE table_names(name VARCHAR(255))")
400
+ self.execute(
401
+ "INSERT INTO timestamp_info VALUES(?, ?, ?, ?)",
402
+ [timestamp_info.timestamp_ns, timestamp_info.utc_offset, timestamp_info.dst_offset, timestamp_info.timezone],
403
+ )
404
+
405
+ def create_table(self, sc: StorageContainer) -> None:
406
+ columns = ["timestamp0 INTEGER", "timestamp1 INTEGER"]
407
+ for elem in sc.arr:
408
+ columns.append(f"{elem.name} {elem.target_type}")
409
+ ddl = f"CREATE TABLE {sc.name}({', '.join(columns)})"
410
+ self.execute(ddl)
411
+ self.execute("INSERT INTO table_names VALUES(?)", [sc.name])
412
+
413
+ def execute(self, *args: List[str]) -> None:
414
+ try:
415
+ self.cursor.execute(*args)
416
+ except Exception as e:
417
+ print(e)
418
+
419
+
420
+ CONVERTERS = {
421
+ "arrow": ArrowConverter,
422
+ "csv": CsvConverter,
423
+ "excel": ExcelConverter,
424
+ "hdf5": HdfConverter,
425
+ "mdf": MdfConverter,
426
+ "sqlite3": SqliteConverter,
427
+ }
428
+
429
+ CONVERTER_REQUIREMENTS = {
430
+ "arrow": (has_arrow, "pyarrow"),
431
+ "csv": (True, "csv"),
432
+ "excel": (has_xlsxwriter, "xlsxwriter"),
433
+ "hdf5": (has_h5py, "h5py"),
434
+ "mdf": (has_asammdf, "asammdf"),
435
+ "sqlite3": (True, "csv"),
436
+ }
437
+
438
+
439
+ def convert_xmraw(converter_name: str, recording_file_name: str, target_file_name: str, *args, **kwargs) -> None:
440
+ converter_class = CONVERTERS.get(converter_name.lower())
441
+ if converter_class is None:
442
+ console.print(f"Invalid converter name: {converter_name!r}")
443
+ return
444
+ available, pck_name = CONVERTER_REQUIREMENTS.get(converter_name.lower(), (True, ""))
445
+ if not available:
446
+ console.print(f"Converter {converter_name!r} requires package {pck_name!r}.")
447
+ console.print(f"Please run [green]pip install {pck_name}[/green] to install it.")
448
+ return
449
+ # Path(*p.parts[:-1], p.stem)
450
+ converter = converter_class(recording_file_name)
451
+ converter.run()