pyxcp 0.22.25__cp313-cp313-win_amd64.whl → 0.22.26__cp313-cp313-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pyxcp might be problematic. Click here for more details.

Files changed (35) hide show
  1. pyxcp/__init__.py +1 -1
  2. pyxcp/config/__init__.py +3 -1
  3. pyxcp/cpp_ext/cpp_ext.cp310-win_amd64.pyd +0 -0
  4. pyxcp/cpp_ext/cpp_ext.cp311-win_amd64.pyd +0 -0
  5. pyxcp/cpp_ext/cpp_ext.cp312-win_amd64.pyd +0 -0
  6. pyxcp/cpp_ext/cpp_ext.cp313-win_amd64.pyd +0 -0
  7. pyxcp/cpp_ext/cpp_ext.cp38-win_amd64.pyd +0 -0
  8. pyxcp/cpp_ext/cpp_ext.cp39-win_amd64.pyd +0 -0
  9. pyxcp/daq_stim/stim.cp310-win_amd64.pyd +0 -0
  10. pyxcp/daq_stim/stim.cp311-win_amd64.pyd +0 -0
  11. pyxcp/daq_stim/stim.cp312-win_amd64.pyd +0 -0
  12. pyxcp/daq_stim/stim.cp313-win_amd64.pyd +0 -0
  13. pyxcp/daq_stim/stim.cp38-win_amd64.pyd +0 -0
  14. pyxcp/daq_stim/stim.cp39-win_amd64.pyd +0 -0
  15. pyxcp/examples/run_daq.py +0 -2
  16. pyxcp/master/errorhandler.py +7 -1
  17. pyxcp/recorder/__init__.py +7 -8
  18. pyxcp/recorder/converter/__init__.py +416 -2
  19. pyxcp/recorder/rekorder.cp310-win_amd64.pyd +0 -0
  20. pyxcp/recorder/rekorder.cp311-win_amd64.pyd +0 -0
  21. pyxcp/recorder/rekorder.cp312-win_amd64.pyd +0 -0
  22. pyxcp/recorder/rekorder.cp313-win_amd64.pyd +0 -0
  23. pyxcp/recorder/rekorder.cp38-win_amd64.pyd +0 -0
  24. pyxcp/recorder/rekorder.cp39-win_amd64.pyd +0 -0
  25. pyxcp/types.py +7 -1
  26. {pyxcp-0.22.25.dist-info → pyxcp-0.22.26.dist-info}/METADATA +1 -1
  27. {pyxcp-0.22.25.dist-info → pyxcp-0.22.26.dist-info}/RECORD +30 -35
  28. pyxcp/examples/ex_arrow.py +0 -109
  29. pyxcp/examples/ex_csv.py +0 -85
  30. pyxcp/examples/ex_excel.py +0 -95
  31. pyxcp/examples/ex_mdf.py +0 -124
  32. pyxcp/examples/ex_sqlite.py +0 -128
  33. {pyxcp-0.22.25.dist-info → pyxcp-0.22.26.dist-info}/LICENSE +0 -0
  34. {pyxcp-0.22.25.dist-info → pyxcp-0.22.26.dist-info}/WHEEL +0 -0
  35. {pyxcp-0.22.25.dist-info → pyxcp-0.22.26.dist-info}/entry_points.txt +0 -0
pyxcp/__init__.py CHANGED
@@ -17,4 +17,4 @@ tb_install(show_locals=True, max_frames=3) # Install custom exception handler.
17
17
 
18
18
  # if you update this manually, do not forget to update
19
19
  # .bumpversion.cfg and pyproject.toml.
20
- __version__ = "0.22.25"
20
+ __version__ = "0.22.26"
pyxcp/config/__init__.py CHANGED
@@ -822,9 +822,11 @@ if there is no response to a command.""",
822
822
  class General(Configurable):
823
823
  """ """
824
824
 
825
- # loglevel = Unicode("INFO", help="Set the log level by value or name.").tag(config=True)
826
825
  disable_error_handling = Bool(False, help="Disable XCP error-handler for performance reasons.").tag(config=True)
827
826
  disconnect_response_optional = Bool(False, help="Ignore missing response on DISCONNECT request.").tag(config=True)
827
+ connect_retries = Integer(help="Number of CONNECT retries (None for infinite retries).", allow_none=True, default_value=3).tag(
828
+ config=True
829
+ )
828
830
  seed_n_key_dll = Unicode("", allow_none=False, help="Dynamic library used for slave resource unlocking.").tag(config=True)
829
831
  seed_n_key_dll_same_bit_width = Bool(False, help="").tag(config=True)
830
832
  seed_n_key_function = Callable(
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
pyxcp/examples/run_daq.py CHANGED
@@ -137,8 +137,6 @@ with ap.run(policy=daq_parser) as x:
137
137
 
138
138
  x.cond_unlock("DAQ") # DAQ resource is locked in many cases.
139
139
 
140
- DAQ_LISTS[1].event_num = 0
141
-
142
140
  print("setup DAQ lists.")
143
141
  daq_parser.setup() # Execute setup procedures.
144
142
  print("start DAQ lists.")
@@ -356,6 +356,7 @@ class Executor(SingletonBase):
356
356
  self.arguments = arguments
357
357
  handler = Handler(inst, func, arguments)
358
358
  self.handlerStack.push(handler)
359
+ connect_retries = inst.config.connect_retries
359
360
  try:
360
361
  while True:
361
362
  try:
@@ -366,9 +367,14 @@ class Executor(SingletonBase):
366
367
  self.error_code = e.get_error_code()
367
368
  handler.error_code = self.error_code
368
369
  except XcpTimeoutError:
369
- # self.logger.error(f"XcpTimeoutError [{str(e)}]")
370
+ is_connect = func.__name__ == "connect"
371
+ self.logger.warning(f"XcpTimeoutError -- Service: {func.__name__!r}")
370
372
  self.error_code = XcpError.ERR_TIMEOUT
371
373
  handler.error_code = self.error_code
374
+ if is_connect and connect_retries is not None:
375
+ if connect_retries == 0:
376
+ raise XcpTimeoutError("Maximum CONNECT retries reached.")
377
+ connect_retries -= 1
372
378
  except TimeoutError:
373
379
  raise
374
380
  except can.CanError:
@@ -1,6 +1,7 @@
1
1
  #!/usr/bin/env python
2
2
  """XCP Frame Recording Facility.
3
3
  """
4
+
4
5
  from dataclasses import dataclass
5
6
  from typing import Union
6
7
 
@@ -16,15 +17,13 @@ else:
16
17
 
17
18
  from pyxcp.recorder.rekorder import DaqOnlinePolicy # noqa: F401
18
19
  from pyxcp.recorder.rekorder import (
19
- DaqRecorderPolicy,
20
- Deserializer,
21
- MeasurementParameters,
22
- ValueHolder,
23
- XcpLogFileDecoder,
24
- _PyXcpLogFileReader,
25
- _PyXcpLogFileWriter,
26
- data_types,
20
+ DaqRecorderPolicy, # noqa: F401
21
+ Deserializer, # noqa: F401
22
+ MeasurementParameters, # noqa: F401
23
+ ValueHolder, # noqa: F401
27
24
  )
25
+ from pyxcp.recorder.rekorder import XcpLogFileDecoder as _XcpLogFileDecoder
26
+ from pyxcp.recorder.rekorder import _PyXcpLogFileReader, _PyXcpLogFileWriter, data_types
28
27
 
29
28
 
30
29
  DATA_TYPES = data_types()
@@ -1,8 +1,60 @@
1
+ """Convert pyXCPs .xmraw files to common data formats.
2
+ """
3
+
4
+ import csv
1
5
  import logging
6
+ import os
7
+ import sqlite3
2
8
  from array import array
3
9
  from dataclasses import dataclass, field
10
+ from mmap import PAGESIZE
11
+ from pathlib import Path
4
12
  from typing import Any, List
5
13
 
14
+ import numpy as np
15
+ from rich.logging import RichHandler
16
+
17
+
18
+ try:
19
+ import pyarrow as pa
20
+ import pyarrow.parquet as pq
21
+
22
+ has_arrow = True
23
+ except ImportError:
24
+ has_arrow = False
25
+
26
+ try:
27
+ import h5py
28
+
29
+ has_h5py = True
30
+ except ImportError:
31
+ has_h5py = False
32
+
33
+ try:
34
+ from asammdf import MDF, Signal
35
+ from asammdf.blocks.v4_blocks import HeaderBlock
36
+ from asammdf.blocks.v4_constants import FLAG_HD_TIME_OFFSET_VALID
37
+
38
+ has_asammdf = True
39
+ except ImportError:
40
+ has_asammdf = False
41
+
42
+ try:
43
+ import xlsxwriter
44
+
45
+ has_xlsxwriter = True
46
+
47
+ except ImportError:
48
+ has_xlsxwriter = False
49
+
50
+ from pyxcp import console
51
+ from pyxcp.recorder.rekorder import XcpLogFileDecoder as _XcpLogFileDecoder
52
+
53
+
54
+ FORMAT = "%(message)s"
55
+ logging.basicConfig(level="NOTSET", format=FORMAT, datefmt="[%X]", handlers=[RichHandler()])
56
+
57
+ log = logging.getLogger("rich")
6
58
 
7
59
  MAP_TO_ARRAY = {
8
60
  "U8": "B",
@@ -19,13 +71,26 @@ MAP_TO_ARRAY = {
19
71
  "BF16": "f",
20
72
  }
21
73
 
22
- logger = logging.getLogger("PyXCP")
74
+ MAP_TO_NP = {
75
+ "U8": np.uint8,
76
+ "I8": np.int8,
77
+ "U16": np.uint16,
78
+ "I16": np.int16,
79
+ "U32": np.uint32,
80
+ "I32": np.int32,
81
+ "U64": np.uint64,
82
+ "I64": np.int64,
83
+ "F32": np.float32,
84
+ "F64": np.float64,
85
+ "F16": np.float16,
86
+ "BF16": np.float16,
87
+ }
23
88
 
24
89
 
25
90
  @dataclass
26
91
  class Storage:
27
92
  name: str
28
- arrow_type: Any
93
+ target_type: Any
29
94
  arr: array
30
95
 
31
96
 
@@ -35,3 +100,352 @@ class StorageContainer:
35
100
  arr: List[Storage] = field(default_factory=[])
36
101
  ts0: List[int] = field(default_factory=lambda: array("Q"))
37
102
  ts1: List[int] = field(default_factory=lambda: array("Q"))
103
+
104
+
105
+ class XcpLogFileDecoder(_XcpLogFileDecoder):
106
+ """"""
107
+
108
+ def __init__(
109
+ self,
110
+ recording_file_name: str,
111
+ out_file_suffix: str,
112
+ remove_file: bool = True,
113
+ target_type_map: dict = None,
114
+ target_file_name: str = "",
115
+ ):
116
+ super().__init__(recording_file_name)
117
+ self.logger = logging.getLogger("PyXCP")
118
+ self.logger.setLevel(logging.DEBUG)
119
+ self.out_file_name = Path(recording_file_name).with_suffix(out_file_suffix)
120
+ self.out_file_suffix = out_file_suffix
121
+ self.target_type_map = target_type_map or {}
122
+ if remove_file:
123
+ try:
124
+ os.unlink(self.out_file_name)
125
+ except FileNotFoundError:
126
+ pass
127
+
128
+ def initialize(self) -> None:
129
+ self.on_initialize()
130
+
131
+ def on_initialize(self) -> None:
132
+ self.setup_containers()
133
+
134
+ def finalize(self) -> None:
135
+ self.on_finalize()
136
+
137
+ def on_finalize(self) -> None:
138
+ pass
139
+
140
+ def setup_containers(self) -> None:
141
+ self.tables = []
142
+ for dl in self.daq_lists:
143
+ result = []
144
+ for name, type_str in dl.headers:
145
+ array_txpe = MAP_TO_ARRAY[type_str]
146
+ target_type = self.target_type_map.get(type_str)
147
+ sd = Storage(name, target_type, array(array_txpe))
148
+ result.append(sd)
149
+ sc = StorageContainer(dl.name, result)
150
+ self.tables.append(sc)
151
+ self.on_container(sc)
152
+
153
+ def on_container(self, sc: StorageContainer) -> None:
154
+ pass
155
+
156
+
157
+ class CollectRows:
158
+
159
+ def on_daq_list(self, daq_list_num: int, timestamp0: int, timestamp1: int, measurements: list) -> None:
160
+ storage_container = self.tables[daq_list_num]
161
+ storage_container.ts0.append(timestamp0)
162
+ storage_container.ts1.append(timestamp1)
163
+ for idx, elem in enumerate(measurements):
164
+ storage = storage_container.arr[idx]
165
+ storage.arr.append(elem)
166
+
167
+
168
+ class ArrowConverter(CollectRows, XcpLogFileDecoder):
169
+ """"""
170
+
171
+ MAP_TO_ARROW = {
172
+ "U8": pa.uint8(),
173
+ "I8": pa.int8(),
174
+ "U16": pa.uint16(),
175
+ "I16": pa.int16(),
176
+ "U32": pa.uint32(),
177
+ "I32": pa.int32(),
178
+ "U64": pa.uint64(),
179
+ "I64": pa.int64(),
180
+ "F32": pa.float32(),
181
+ "F64": pa.float64(),
182
+ "F16": pa.float16(),
183
+ "BF16": pa.float16(),
184
+ }
185
+
186
+ def __init__(self, recording_file_name: str, target_file_name: str = ""):
187
+ super().__init__(
188
+ recording_file_name=recording_file_name,
189
+ out_file_suffix=".parquet",
190
+ remove_file=False,
191
+ target_type_map=self.MAP_TO_ARROW,
192
+ target_file_name=target_file_name,
193
+ )
194
+
195
+ def on_initialize(self) -> None:
196
+ super().on_initialize()
197
+
198
+ def on_finalize(self) -> None:
199
+ result = []
200
+ for arr in self.tables:
201
+ timestamp0 = arr.ts0
202
+ timestamp1 = arr.ts1
203
+ names = ["timestamp0", "timestamp1"]
204
+ data = [timestamp0, timestamp1]
205
+ for sd in arr.arr:
206
+ adt = pa.array(sd.arr, type=sd.target_type)
207
+ names.append(sd.name)
208
+ data.append(adt)
209
+ table = pa.Table.from_arrays(data, names=names)
210
+ fname = f"{arr.name}{self.out_file_suffix}"
211
+ self.logger.info(f"Writing file {fname!r}")
212
+ pq.write_table(table, fname)
213
+ result.append(table)
214
+ return result
215
+
216
+
217
+ class CsvConverter(XcpLogFileDecoder):
218
+
219
+ def __init__(self, recording_file_name: str, target_file_name: str = ""):
220
+ super().__init__(
221
+ recording_file_name=recording_file_name, out_file_suffix=".csv", remove_file=False, target_file_name=target_file_name
222
+ )
223
+
224
+ def on_initialize(self) -> None:
225
+ self.csv_writers = []
226
+ super().on_initialize()
227
+
228
+ def on_container(self, sc: StorageContainer) -> None:
229
+ fname = f"{sc.name}{self.out_file_suffix}"
230
+ self.logger.info(f"Creating file {fname!r}.")
231
+ writer = csv.writer(open(fname, "w", newline=""), dialect="excel")
232
+ headers = ["ts0", "ts1"] + [e.name for e in sc.arr]
233
+ writer.writerow(headers)
234
+ self.csv_writers.append(writer)
235
+
236
+ def on_finalize(self) -> None:
237
+ self.logger.info("Done.")
238
+
239
+ def on_daq_list(self, daq_list_num: int, timestamp0: int, timestamp1: int, measurements: list) -> None:
240
+ writer = self.csv_writers[daq_list_num]
241
+ data = [timestamp0, timestamp1, *measurements]
242
+ writer.writerow(data)
243
+
244
+
245
+ class ExcelConverter(XcpLogFileDecoder):
246
+
247
+ def __init__(self, recording_file_name: str, target_file_name: str = ""):
248
+ super().__init__(recording_file_name=recording_file_name, out_file_suffix=".xlsx", target_file_name=target_file_name)
249
+
250
+ def on_initialize(self) -> None:
251
+ self.logger.info(f"Creating file {str(self.out_file_name)!r}.")
252
+ self.xls_workbook = xlsxwriter.Workbook(self.out_file_name)
253
+ self.xls_sheets = []
254
+ self.rows = []
255
+ super().on_initialize()
256
+
257
+ def on_container(self, sc: StorageContainer) -> None:
258
+ sheet = self.xls_workbook.add_worksheet(sc.name)
259
+ self.xls_sheets.append(sheet)
260
+ headers = ["ts0", "ts1"] + [e.name for e in sc.arr]
261
+ sheet.write_row(0, 0, headers)
262
+ self.rows.append(1)
263
+
264
+ def on_finalize(self) -> None:
265
+ self.xls_workbook.close()
266
+ self.logger.info("Done.")
267
+
268
+ def on_daq_list(self, daq_list_num: int, timestamp0: int, timestamp1: int, measurements: list) -> None:
269
+ sheet = self.xls_sheets[daq_list_num]
270
+ row = self.rows[daq_list_num]
271
+ data = [timestamp0, timestamp1] + measurements
272
+ sheet.write_row(row, 0, data)
273
+ self.rows[daq_list_num] += 1
274
+
275
+
276
+ class HdfConverter(CollectRows, XcpLogFileDecoder):
277
+
278
+ def __init__(self, recording_file_name: str, target_file_name: str = ""):
279
+ super().__init__(recording_file_name=recording_file_name, out_file_suffix=".h5", target_file_name=target_file_name)
280
+
281
+ def on_initialize(self) -> None:
282
+ self.logger.info(f"Creating file {str(self.out_file_name)!r}")
283
+ self.out_file = h5py.File(self.out_file_name, "w")
284
+ super().on_initialize()
285
+
286
+ def on_finalize(self) -> None:
287
+ for arr in self.tables:
288
+ timestamp0 = arr.ts0
289
+ timestamp1 = arr.ts1
290
+ self.out_file[f"/{arr.name}/timestamp0"] = timestamp0
291
+ self.out_file[f"/{arr.name}/timestamp1"] = timestamp1
292
+ for sd in arr.arr:
293
+ self.out_file[f"/{arr.name}/{sd.name}"] = sd.arr
294
+ self.logger.info(f"Writing table {arr.name!r}")
295
+ self.logger.info("Done.")
296
+ self.out_file.close()
297
+
298
+
299
+ class MdfConverter(CollectRows, XcpLogFileDecoder):
300
+
301
+ def __init__(self, recording_file_name: str, target_file_name: str = ""):
302
+ super().__init__(
303
+ recording_file_name=recording_file_name,
304
+ out_file_suffix=".mf4",
305
+ target_type_map=MAP_TO_NP,
306
+ target_file_name=target_file_name,
307
+ )
308
+
309
+ def on_initialize(self) -> None:
310
+ super().on_initialize()
311
+
312
+ def on_finalize(self) -> None:
313
+ timestamp_info = self.parameters.timestamp_info
314
+ hdr = HeaderBlock(
315
+ abs_time=timestamp_info.timestamp_ns,
316
+ tz_offset=timestamp_info.utc_offset,
317
+ daylight_save_time=timestamp_info.dst_offset,
318
+ time_flags=FLAG_HD_TIME_OFFSET_VALID,
319
+ )
320
+ hdr.comment = f"""<HDcomment><TX>Timezone: {timestamp_info.timezone}</TX></HDcomment>""" # Test-Comment.
321
+ mdf4 = MDF(version="4.10")
322
+ mdf4.header = hdr
323
+ for idx, arr in enumerate(self.tables):
324
+ signals = []
325
+ timestamps = arr.ts0
326
+ for sd in arr.arr:
327
+ signal = Signal(samples=sd.arr, name=sd.name, timestamps=timestamps)
328
+ signals.append(signal)
329
+ self.logger.info(f"Appending data-group {arr.name!r}")
330
+ mdf4.append(signals, acq_name=arr.name, comment="Created by pyXCP recorder")
331
+ self.logger.info(f"Writing {str(self.out_file_name)!r}")
332
+ mdf4.save(self.out_file_name, compression=2, overwrite=True)
333
+ self.logger.info("Done.")
334
+
335
+
336
+ class SqliteConverter(XcpLogFileDecoder):
337
+ """ """
338
+
339
+ MAP_TO_SQL = {
340
+ "U8": "INTEGER",
341
+ "I8": "INTEGER",
342
+ "U16": "INTEGER",
343
+ "I16": "INTEGER",
344
+ "U32": "INTEGER",
345
+ "I32": "INTEGER",
346
+ "U64": "INTEGER",
347
+ "I64": "INTEGER",
348
+ "F32": "FLOAT",
349
+ "F64": "FLOAT",
350
+ "F16": "FLOAT",
351
+ "BF16": "FLOAT",
352
+ }
353
+
354
+ def __init__(self, recording_file_name: str, target_file_name: str = ""):
355
+ super().__init__(
356
+ recording_file_name=recording_file_name,
357
+ out_file_suffix=".sq3",
358
+ target_type_map=self.MAP_TO_SQL,
359
+ target_file_name=target_file_name,
360
+ )
361
+
362
+ def on_initialize(self) -> None:
363
+ self.logger.info(f"Creating database {str(self.out_file_name)!r}.")
364
+ self.create_database(self.out_file_name)
365
+ self.insert_stmt = {}
366
+ super().on_initialize()
367
+
368
+ def on_container(self, sc: StorageContainer) -> None:
369
+ self.create_table(sc)
370
+ self.logger.info(f"Creating table {sc.name!r}.")
371
+ self.insert_stmt[sc.name] = (
372
+ f"""INSERT INTO {sc.name}({', '.join(['ts0', 'ts1'] + [r.name for r in sc.arr])}) VALUES({', '.join(["?" for _ in range(len(sc.arr) + 2)])})"""
373
+ )
374
+
375
+ def on_finalize(self) -> None:
376
+ self.conn.commit()
377
+ self.conn.close()
378
+ print("Done.")
379
+
380
+ def on_daq_list(self, daq_list_num: int, timestamp0: int, timestamp1: int, measurements: list) -> None:
381
+ sc = self.tables[daq_list_num]
382
+ insert_stmt = self.insert_stmt[sc.name]
383
+ data = [timestamp0, timestamp1, *measurements]
384
+ self.execute(insert_stmt, data)
385
+
386
+ def create_database(self, db_name: str) -> None:
387
+ self.conn = sqlite3.Connection(db_name)
388
+ self.cursor = self.conn.cursor()
389
+ self.execute("PRAGMA FOREIGN_KEYS=ON")
390
+ self.execute(f"PRAGMA PAGE_SIZE={PAGESIZE}")
391
+ self.execute("PRAGMA SYNCHRONOUS=OFF")
392
+ self.execute("PRAGMA LOCKING_MODE=EXCLUSIVE")
393
+ self.execute("PRAGMA TEMP_STORE=MEMORY")
394
+
395
+ timestamp_info = self.parameters.timestamp_info
396
+ self.execute(
397
+ "CREATE TABLE timestamp_info(timestamp_ns INTEGER, utc_offset INTEGER, dst_offset INTEGER, timezone VARCHAR(255))"
398
+ )
399
+ self.execute("CREATE TABLE table_names(name VARCHAR(255))")
400
+ self.execute(
401
+ "INSERT INTO timestamp_info VALUES(?, ?, ?, ?)",
402
+ [timestamp_info.timestamp_ns, timestamp_info.utc_offset, timestamp_info.dst_offset, timestamp_info.timezone],
403
+ )
404
+
405
+ def create_table(self, sc: StorageContainer) -> None:
406
+ columns = ["ts0 INTEGER", "ts1 INTEGER"]
407
+ for elem in sc.arr:
408
+ columns.append(f"{elem.name} {elem.target_type}")
409
+ ddl = f"CREATE TABLE {sc.name}({', '.join(columns)})"
410
+ self.execute(ddl)
411
+ self.execute("INSERT INTO table_names VALUES(?)", [sc.name])
412
+
413
+ def execute(self, *args: List[str]) -> None:
414
+ try:
415
+ self.cursor.execute(*args)
416
+ except Exception as e:
417
+ print(e)
418
+
419
+
420
+ CONVERTERS = {
421
+ "arrow": ArrowConverter,
422
+ "csv": CsvConverter,
423
+ "excel": ExcelConverter,
424
+ "hdf5": HdfConverter,
425
+ "mdf": MdfConverter,
426
+ "sqlite3": SqliteConverter,
427
+ }
428
+
429
+ CONVERTER_REQUIREMENTS = {
430
+ "arrow": (has_arrow, "pyarrow"),
431
+ "csv": (True, "csv"),
432
+ "excel": (has_xlsxwriter, "xlsxwriter"),
433
+ "hdf5": (has_h5py, "h5py"),
434
+ "mdf": (has_asammdf, "asammdf"),
435
+ "sqlite3": (True, "csv"),
436
+ }
437
+
438
+
439
+ def convert_xmraw(converter_name: str, recording_file_name: str, target_file_name: str, *args, **kwargs) -> None:
440
+ converter_class = CONVERTERS.get(converter_name.lower())
441
+ if converter_class is None:
442
+ console.print(f"Invalid converter name: {converter_name!r}")
443
+ return
444
+ available, pck_name = CONVERTER_REQUIREMENTS.get(converter_name.lower(), (True, ""))
445
+ if not available:
446
+ console.print(f"Converter {converter_name!r} requires package {pck_name!r}.")
447
+ console.print(f"Please run [green]pip install {pck_name}[/green] to install it.")
448
+ return
449
+ # Path(*p.parts[:-1], p.stem)
450
+ converter = converter_class(recording_file_name)
451
+ converter.run()
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
pyxcp/types.py CHANGED
@@ -491,9 +491,15 @@ SetCalPageMode = BitStruct(
491
491
  "ecu" / Flag,
492
492
  )
493
493
 
494
+ PagProperties = BitStruct(
495
+ Padding(7),
496
+ "freezeSupported" / Flag,
497
+ )
498
+
499
+
494
500
  GetPagProcessorInfoResponse = Struct(
495
501
  "maxSegments" / Int8ul,
496
- "pagProperties" / Int8ul,
502
+ "pagProperties" / PagProperties,
497
503
  )
498
504
 
499
505
  GetSegmentInfoMode0Response = Struct(
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: pyxcp
3
- Version: 0.22.25
3
+ Version: 0.22.26
4
4
  Summary: Universal Calibration Protocol for Python
5
5
  License: LGPLv3
6
6
  Keywords: automotive,ecu,xcp,asam,autosar
@@ -1,4 +1,4 @@
1
- pyxcp/__init__.py,sha256=NDu785DrhpFkKcUgVKRBeBhp6NC9yp9DVxG_MGx_3Dg,548
1
+ pyxcp/__init__.py,sha256=RJc-8q7P408Jmlwt5OMDoDop9h53YvraQnGC0mdVmtY,548
2
2
  pyxcp/aml/EtasCANMonitoring.a2l,sha256=EJYwe3Z3H24vyWAa6lUgcdKnQY8pwFxjyCN6ZU1ST8w,1509
3
3
  pyxcp/aml/EtasCANMonitoring.aml,sha256=xl0DdyeiIaLW0mmmJNAyJS0CQdOLSxt9dxfgrdSlU8Y,2405
4
4
  pyxcp/aml/ifdata_CAN.a2l,sha256=NCUnCUEEgRbZYSLGtUGwL2e7zJ8hrp0SbmLHGv8uY58,612
@@ -18,18 +18,18 @@ pyxcp/asamkeydll.c,sha256=dVEvU0S1kgIo62S0La-T8xHSw668LM_DYc_fiQ0No6g,2952
18
18
  pyxcp/asamkeydll.sh,sha256=DC2NKUMwvi39OQgJ6514Chr4wc1LYbTmQHmMq9jAHHs,59
19
19
  pyxcp/checksum.py,sha256=alze1JiZ2JmdRul9QzP_-fuAqJcNyYBbo35zBwEKqHk,11535
20
20
  pyxcp/cmdline.py,sha256=na3ZbWQ-5ezsi1MrkuxMTCAXonUF3X6-LutoneyE3dU,1529
21
- pyxcp/config/__init__.py,sha256=u9v-eUAJd8amEvkJsns5DCMhvYD3Lp3Ct0posmuqOec,41635
21
+ pyxcp/config/__init__.py,sha256=nToBy_zPFQZmCUgRZ1RUjlw-OulCcvoFuGFvPw242z8,41701
22
22
  pyxcp/config/legacy.py,sha256=4QdDheX8DbBKv5JVT72_C_cjCgKvZmhN3tJ6hsvBEtI,5220
23
23
  pyxcp/constants.py,sha256=9yGfujC0ImTYQWfn41wyw8pluJTSrhMGWIVeIZTgsLg,1160
24
24
  pyxcp/cpp_ext/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
25
25
  pyxcp/cpp_ext/bin.hpp,sha256=PwJloZek21la-RBSda2Hc0u_6gID0sfTduPeplaAyR4,2561
26
26
  pyxcp/cpp_ext/blockmem.hpp,sha256=ysaJwmTWGTfE54Outk3gJYOfAVFd_QaonBMtXLcXwCc,1242
27
- pyxcp/cpp_ext/cpp_ext.cp310-win_amd64.pyd,sha256=VWNTkyiub_-mX4OXAsEROyUfJz3fOSuyz_clzlwaYTo,279552
28
- pyxcp/cpp_ext/cpp_ext.cp311-win_amd64.pyd,sha256=2iSOT9Ulo4unK1j9kVlUw6RXxqxgcVrCgEYo3aXNPRM,281600
29
- pyxcp/cpp_ext/cpp_ext.cp312-win_amd64.pyd,sha256=LKQ6C5uI4Nc9ccjht56ZS3mA-9zL8TYRJ-TCLp9An18,285696
30
- pyxcp/cpp_ext/cpp_ext.cp313-win_amd64.pyd,sha256=7iNxDwc_INR372xxIpCDzFYH06X7pkTMaibShEsqj80,285696
31
- pyxcp/cpp_ext/cpp_ext.cp38-win_amd64.pyd,sha256=FvxLTjwiAyCQBEoqKOZ-fci6t1caxfVchOpwv7ruPzY,279552
32
- pyxcp/cpp_ext/cpp_ext.cp39-win_amd64.pyd,sha256=Jx5MDTdx6bNKSEvJjd_Wa-TnVHmexbmdjPqQARmVYx8,260096
27
+ pyxcp/cpp_ext/cpp_ext.cp310-win_amd64.pyd,sha256=6QbsSXVMsmE8L8Uo3JIojb_U9EOrdJc-MZk-2GI3Y5c,279552
28
+ pyxcp/cpp_ext/cpp_ext.cp311-win_amd64.pyd,sha256=BD3qqAHgl5HJP--VT_C5fbtSODPobAGd1ks0co8kq9E,281600
29
+ pyxcp/cpp_ext/cpp_ext.cp312-win_amd64.pyd,sha256=XhGYmwgFsebGsoIgPLJlRd9Mls0RfSZ9tOKXSSEHBLE,285696
30
+ pyxcp/cpp_ext/cpp_ext.cp313-win_amd64.pyd,sha256=gBnrRVydI4p8Xx0Bcq9Nx6JXnG20JTDMlKvwG0mCgic,285696
31
+ pyxcp/cpp_ext/cpp_ext.cp38-win_amd64.pyd,sha256=dAWWWsucJ22tqPC_06-Kr6nhz5nNe4m6YYeMhh384AY,279552
32
+ pyxcp/cpp_ext/cpp_ext.cp39-win_amd64.pyd,sha256=te0D-4CZw619YbwRrIxS8jmdS_UcPS2gQPCSb0Jx8Vg,260096
33
33
  pyxcp/cpp_ext/daqlist.hpp,sha256=g2hlxgoQorAGKHedZFZ0c2FQh1APMIA9sVB6M6hD_n8,7277
34
34
  pyxcp/cpp_ext/event.hpp,sha256=Z-1yxsEKsr81NnLVEWJ2ANA8FV7YsM7EbNxaw-elheE,1200
35
35
  pyxcp/cpp_ext/extension_wrapper.cpp,sha256=FXFjyruBjQYqjCYZZcajdYv6dvNnCggMAbWLqJmfuTM,4756
@@ -41,12 +41,12 @@ pyxcp/daq_stim/optimize/__init__.py,sha256=FUWK0GkNpNT-sUlhibp7xa2aSYpm6Flh5yA2w
41
41
  pyxcp/daq_stim/optimize/binpacking.py,sha256=Iltho5diKlJG-ltbmx053U2vOFRlCISolXK61T14l_I,1257
42
42
  pyxcp/daq_stim/scheduler.cpp,sha256=a7VK7kP2Hs8yMlcDAkXwJ0bH88lr_yz156sphcHS7Z4,715
43
43
  pyxcp/daq_stim/scheduler.hpp,sha256=U_6tUbebmzX5vVZS0EFSgTaPsyxMg6yRXHG_aPWA0x4,1884
44
- pyxcp/daq_stim/stim.cp310-win_amd64.pyd,sha256=DD7ufKxaaRW5DmBBsAe4whg6qoi8GQ62VT7IS7U8KlA,189440
45
- pyxcp/daq_stim/stim.cp311-win_amd64.pyd,sha256=5HWuBjjAG1IaP90UK3IeOMkBADYyCMMa_U9QyuPQxrk,192000
46
- pyxcp/daq_stim/stim.cp312-win_amd64.pyd,sha256=-LlfCB7oJoIOXTefEHFIwSTVgLLLnR-3JVSP2XLpop0,193536
47
- pyxcp/daq_stim/stim.cp313-win_amd64.pyd,sha256=PAr4aoDuqAtZujV3BOquY---8orQpfOujw8ez0nLsmA,193536
48
- pyxcp/daq_stim/stim.cp38-win_amd64.pyd,sha256=X_ftCGxwZ4slT40A21gSUwJq8J-vB8Q_VUvZgQkRB2c,189440
49
- pyxcp/daq_stim/stim.cp39-win_amd64.pyd,sha256=nM37yO0Em1w_FxpT4mL5a8IVkYT5wsKLON1b1lI6C6w,183296
44
+ pyxcp/daq_stim/stim.cp310-win_amd64.pyd,sha256=Ipaa8gxNZetfWUs8SHP-uQ-3bQ6aJzv7queAOKmcZBA,189440
45
+ pyxcp/daq_stim/stim.cp311-win_amd64.pyd,sha256=PeX2RT5L-kBqyfvg3AazPSrD1Fd93K9b1hB8fVzzTns,192000
46
+ pyxcp/daq_stim/stim.cp312-win_amd64.pyd,sha256=V1Zj_B8HRGVNOK7VvQ7A8FXao1pLZdJ68FAb2mZlQWA,193536
47
+ pyxcp/daq_stim/stim.cp313-win_amd64.pyd,sha256=s3-yxRXfcGmn6H07YmDAiihLJS1BHoWIqNa2DjVuph0,193536
48
+ pyxcp/daq_stim/stim.cp38-win_amd64.pyd,sha256=Sh3DMZHjlD4EOKom7gyLPp2yMyIcVZK02msoFsFdCrg,189440
49
+ pyxcp/daq_stim/stim.cp39-win_amd64.pyd,sha256=cfMM0LKRqcjWx-I0ijRk85aN9YEPlsToeNJU-dLBMec,183296
50
50
  pyxcp/daq_stim/stim.cpp,sha256=F2OG67W4KKwTTiUCxm-9egIv3TLFdOkRunX6xf7YOtc,177
51
51
  pyxcp/daq_stim/stim.hpp,sha256=U-uInRrA6OCdMl1l1SWbQ_KEPpnNYrWut924IvbW6R0,18508
52
52
  pyxcp/daq_stim/stim_wrapper.cpp,sha256=iT2yxJ3LRG7HoYC1bwhM3tCAxF9X_HHierBNsLRmTJg,1995
@@ -61,12 +61,7 @@ pyxcp/examples/conf_nixnet.json,sha256=BvXPrljPGzaRTNPch3K0XfU3KSBP1sVDDNP7yY850
61
61
  pyxcp/examples/conf_socket_can.toml,sha256=gTacQGm0p6fhPCMWC3ScLq9Xj-xJmNbjNXkjO4o7r8k,269
62
62
  pyxcp/examples/conf_sxi.json,sha256=cXwNGoOpvqhdjXBQcE8lKgTs50wi9beosWKskZGJ-nI,158
63
63
  pyxcp/examples/conf_sxi.toml,sha256=t-XsgRljcMdj0f3_CGRT60c77LeQPNbjIT17YxDK3Yg,125
64
- pyxcp/examples/ex_arrow.py,sha256=HvY5Lc7rL87-FgTTcZSQJLjSiTdfjfjMLu0mMmLpW10,3020
65
- pyxcp/examples/ex_csv.py,sha256=GNWQ3IatXj3Kg5MUX6p8tzJRUppGreON9dkrNiqdTtk,2461
66
- pyxcp/examples/ex_excel.py,sha256=VpoqRTv-rHz-MnaFKt5f7MqDrK9OLYyRJvVWzCFsayc,2828
67
- pyxcp/examples/ex_mdf.py,sha256=zfivlNkbbsfvwqsISttaoQk1R888r7UUtwSqocE60sU,3759
68
- pyxcp/examples/ex_sqlite.py,sha256=ludD0EIziBhBNnC3MOrQTGs06cl7iNyL2yefwe53zNc,4268
69
- pyxcp/examples/run_daq.py,sha256=KXL9myK3w7RCIgjZblU2i1XOP5l4V0YRQKNRXF7ISIo,5518
64
+ pyxcp/examples/run_daq.py,sha256=gSKkJ_S4QgP6Pgeu-gNoh_hGzxtrAgcbQ1AEEC2QWHE,5484
70
65
  pyxcp/examples/xcp_policy.py,sha256=io9tS2W-7PvR8ZzU203KolFrDp67eorUlwNWvA4kC5k,1921
71
66
  pyxcp/examples/xcp_read_benchmark.py,sha256=zOG0Yrji10vA0vhHa27KK7zgc3JDpzXzXsFnIU4C_AM,956
72
67
  pyxcp/examples/xcp_skel.py,sha256=YXLQC8nn8aAwYSVuBGhr1dvmdMBjmO1Ee1w3e5sy16s,1159
@@ -75,16 +70,16 @@ pyxcp/examples/xcp_user_supplied_driver.py,sha256=bL-6HDvPjgRErvYVdaRL_Pg1GDxu02
75
70
  pyxcp/examples/xcphello.py,sha256=xbcWq8StRJyUZBLUvknsXv7VkEBD5SU0SJjlZTHsSzs,2630
76
71
  pyxcp/examples/xcphello_recorder.py,sha256=QHWJsq5h5CI9t5qEmMSorZyzirTpoXz4nzuKTMzbZCA,3409
77
72
  pyxcp/master/__init__.py,sha256=QQbkUJM1WQ-5p2MiNFYxLAmHhNsCQLzDp-S4aoOFxoA,318
78
- pyxcp/master/errorhandler.py,sha256=U5QuvGRDM9kRNwY5kbkTOnthp19RHoXEGCsaBNiFTps,14973
73
+ pyxcp/master/errorhandler.py,sha256=ulL6WiraZbVZnM2pfR8S9vlWAAP5UXwXqmbjjxH9rgc,15359
79
74
  pyxcp/master/master.py,sha256=y5i5HTw5g4HKHW8tXfpNc0hmaQjle-3dsUkVFqPjUXc,78365
80
75
  pyxcp/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
81
- pyxcp/recorder/__init__.py,sha256=pg-cdOaoj-D-woFxFb2p6SpFTNTdpQEIknHdDaQ9ROE,2695
76
+ pyxcp/recorder/__init__.py,sha256=jeTmKvfjIenxHxt7zn6HMjnDpuPQU0d9SdnYK_t3gdE,2850
82
77
  pyxcp/recorder/build_clang.cmd,sha256=JvFngSnb28XcBGXxC6MGrcOCGYfahOIvHpgRpqbA6HQ,175
83
78
  pyxcp/recorder/build_clang.sh,sha256=zmU3nZxaNH1pxGWMyQ-S541TuVqxS00p3iPR9NUP4Ec,181
84
79
  pyxcp/recorder/build_gcc.cmd,sha256=zj732DdvqDzGAFg7dvF83DUpf8Qf6rQ0cqEaID15Z80,238
85
80
  pyxcp/recorder/build_gcc.sh,sha256=nCSh7G8xtxWtDNrMqNUxcjnm_CFpMeduIF0X-RSJtHA,211
86
81
  pyxcp/recorder/build_gcc_arm.sh,sha256=jEo6Mgt_aVDL3nHtffecXOrN6gRsEoaA3S4pPrAzpCE,240
87
- pyxcp/recorder/converter/__init__.py,sha256=avxsOiQJ-zrmQaS7qocp5yebAD-b5Lq4ehOzISUJuFw,700
82
+ pyxcp/recorder/converter/__init__.py,sha256=7gP3JmI5aPjVcoVqO7EqwdDKqzzidytH8a8pPKyRvAU,14779
88
83
  pyxcp/recorder/lz4.c,sha256=rOy3JE2SsOXvJ8a9pgGEfGpbDJnJR03dSVej0CwPmjg,120974
89
84
  pyxcp/recorder/lz4.h,sha256=Kz_2V6kvOunNHoPl9-EqxWDVCvYXbU0J-pkSnCeXubs,46483
90
85
  pyxcp/recorder/lz4hc.c,sha256=E56iE5CQ6fhQIVi3qNpxiIIP2sTGeC80JtVPyhidV6Q,88870
@@ -93,12 +88,12 @@ pyxcp/recorder/mio.hpp,sha256=5ASJLKSEykH0deAQD5uak-_yAgd5p2n8t06315GSGrg,63346
93
88
  pyxcp/recorder/reader.hpp,sha256=rr9XZ_ciL6eF2_xEqyt9XYNqTIze9ytAsnf8uYukO9U,5201
94
89
  pyxcp/recorder/reco.py,sha256=6N6FIwfCEVMpi5dr3eUOQa1lowcg2LCnS_sy_-b-UiQ,8725
95
90
  pyxcp/recorder/recorder.rst,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
96
- pyxcp/recorder/rekorder.cp310-win_amd64.pyd,sha256=CGdvShmnyjesXbPTQ8rM_bqpftDtXRZ1A7NjsGhkrDc,377856
97
- pyxcp/recorder/rekorder.cp311-win_amd64.pyd,sha256=_OCKW_sm_heL1rvvMqM3HiAb8oH9on2lvy42vroQFcM,380416
98
- pyxcp/recorder/rekorder.cp312-win_amd64.pyd,sha256=OVrH53mF32JztlRAIJAXYRL1FRWuUJjr0Z3IwrwnWDM,382976
99
- pyxcp/recorder/rekorder.cp313-win_amd64.pyd,sha256=pUzjWdVAhlQpdKQ6bH7mvNCxRFR4LXdZRlLAibEjINI,382976
100
- pyxcp/recorder/rekorder.cp38-win_amd64.pyd,sha256=_XW8GKdICxi0mzTRHwZ7Fc_afbCMOPrCQpNa-EozKnE,377856
101
- pyxcp/recorder/rekorder.cp39-win_amd64.pyd,sha256=IlUPHXqh86T7fBmgHC8EE7nTMuCkJMYTggM9u52LSwU,364032
91
+ pyxcp/recorder/rekorder.cp310-win_amd64.pyd,sha256=kdZ8tFAEG6DYBZhh6gq353ciHfADV_tr-4xODSm0Xtw,377856
92
+ pyxcp/recorder/rekorder.cp311-win_amd64.pyd,sha256=KY_0mU2ZWOtaTZohpLAlXPOBQ5ueyXlkDCqrh1Gjcwo,380416
93
+ pyxcp/recorder/rekorder.cp312-win_amd64.pyd,sha256=1G6rlgiKpjBNf5_Ipt7xsvDPrOZc2ErOiGu0Vd-Syj4,382976
94
+ pyxcp/recorder/rekorder.cp313-win_amd64.pyd,sha256=x9Q5QlCnynjavnRbOAcb6XzX5ArXez4N9HFojWK1IeU,382976
95
+ pyxcp/recorder/rekorder.cp38-win_amd64.pyd,sha256=QXsP963LBZg8uxjZqmFgxBlxxjFKiKKrvja-v7asr1c,377856
96
+ pyxcp/recorder/rekorder.cp39-win_amd64.pyd,sha256=MHWy_5Em0uDyGq39Rt3kmwMZRG9qYwcGN5tWug_i-Js,364032
102
97
  pyxcp/recorder/rekorder.cpp,sha256=U0LMyk8pZXx9emgS_WPVthvn_9IpgE7JGrh4kg-8CX4,1900
103
98
  pyxcp/recorder/rekorder.hpp,sha256=sWvRch9bVt6mmgrFHp5mwWhap7HoFG4geeb7UqEIzio,7638
104
99
  pyxcp/recorder/setup.py,sha256=_99XFPQAd5V4LcJaSGJwdnbxgxJ7kl8DEXfHsnKO1Yg,998
@@ -132,12 +127,12 @@ pyxcp/transport/eth.py,sha256=xPzN2oSALoPKJVvZpBljPSV1AxfpjRusOzymO-TD1Rw,8711
132
127
  pyxcp/transport/sxi.py,sha256=vM8WZIKuu_dNuqkxZM_1n6iQkQCCzo4ykWpiG6ba8Fk,4695
133
128
  pyxcp/transport/transport_wrapper.cpp,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
134
129
  pyxcp/transport/usb_transport.py,sha256=JuYrwkWsUdibdVNA57LBEQT3a3ykOgWPdWcfqj96nDE,8343
135
- pyxcp/types.py,sha256=wm3tOocuAln4jpH_mDguPRYtIzHSQ_KQBYNATFB2WXc,26068
130
+ pyxcp/types.py,sha256=mjp3FhsTTbS3D5VuC-dfdbMql0lJwEfbZjf8a2pHi1o,26158
136
131
  pyxcp/utils.py,sha256=unlg0CoNwcWYfd-BE0hZJ93uhlAoW_nryv9tS_R3C44,2969
137
132
  pyxcp/vector/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
138
133
  pyxcp/vector/map.py,sha256=7Gnhvr79geMeqqGVIJPxODXGwABdNDinnqzhpooN5TE,2306
139
- pyxcp-0.22.25.dist-info/entry_points.txt,sha256=2JbL-pWn9UxpBrS64aWiFFkq9x2A7y-dkrxYlfQqIJU,307
140
- pyxcp-0.22.25.dist-info/LICENSE,sha256=fTqV5eBpeAZO0_jit8j4Ref9ikBSlHJ8xwj5TLg7gFk,7817
141
- pyxcp-0.22.25.dist-info/METADATA,sha256=oUrQTmW0BB6APl8u-j-RKbfm75jwXGBVnr72VmiaOjo,4088
142
- pyxcp-0.22.25.dist-info/WHEEL,sha256=CAXnmTOdevtyHhGbx1uQxRlUnaXo4-LVkad_lFClKZs,98
143
- pyxcp-0.22.25.dist-info/RECORD,,
134
+ pyxcp-0.22.26.dist-info/entry_points.txt,sha256=2JbL-pWn9UxpBrS64aWiFFkq9x2A7y-dkrxYlfQqIJU,307
135
+ pyxcp-0.22.26.dist-info/LICENSE,sha256=fTqV5eBpeAZO0_jit8j4Ref9ikBSlHJ8xwj5TLg7gFk,7817
136
+ pyxcp-0.22.26.dist-info/METADATA,sha256=_lhdH3MGmewLKRJk-lBDNRuqSu2n9VJs_hbwWsuSUD8,4088
137
+ pyxcp-0.22.26.dist-info/WHEEL,sha256=CAXnmTOdevtyHhGbx1uQxRlUnaXo4-LVkad_lFClKZs,98
138
+ pyxcp-0.22.26.dist-info/RECORD,,
@@ -1,109 +0,0 @@
1
- import argparse
2
- import logging
3
- from array import array
4
- from dataclasses import dataclass, field
5
- from typing import Any, List
6
-
7
- import pyarrow as pa
8
- import pyarrow.parquet as pq
9
-
10
- from pyxcp.recorder import XcpLogFileDecoder
11
-
12
-
13
- MAP_TO_ARROW = {
14
- "U8": pa.uint8(),
15
- "I8": pa.int8(),
16
- "U16": pa.uint16(),
17
- "I16": pa.int16(),
18
- "U32": pa.uint32(),
19
- "I32": pa.int32(),
20
- "U64": pa.uint64(),
21
- "I64": pa.int64(),
22
- "F32": pa.float32(),
23
- "F64": pa.float64(),
24
- "F16": pa.float16(),
25
- "BF16": pa.float16(),
26
- }
27
-
28
- MAP_TO_ARRAY = {
29
- "U8": "B",
30
- "I8": "b",
31
- "U16": "H",
32
- "I16": "h",
33
- "U32": "L",
34
- "I32": "l",
35
- "U64": "Q",
36
- "I64": "q",
37
- "F32": "f",
38
- "F64": "d",
39
- "F16": "f",
40
- "BF16": "f",
41
- }
42
-
43
- logger = logging.getLogger("PyXCP")
44
-
45
- parser = argparse.ArgumentParser(description="Use .xmraw files in an Apache Arrow application.")
46
- parser.add_argument("xmraw_file", help=".xmraw file")
47
- args = parser.parse_args()
48
-
49
-
50
- @dataclass
51
- class Storage:
52
- name: str
53
- arrow_type: Any
54
- arr: array
55
-
56
-
57
- @dataclass
58
- class StorageContainer:
59
- name: str
60
- arr: List[Storage] = field(default_factory=[])
61
- ts0: List[int] = field(default_factory=lambda: array("Q"))
62
- ts1: List[int] = field(default_factory=lambda: array("Q"))
63
-
64
-
65
- class Decoder(XcpLogFileDecoder):
66
-
67
- def initialize(self) -> None:
68
- self.arrow_tables = []
69
- for dl in self.daq_lists:
70
- result = []
71
- for name, type_str in dl.headers:
72
- array_txpe = MAP_TO_ARRAY[type_str]
73
- arrow_type = MAP_TO_ARROW[type_str]
74
- sd = Storage(name, arrow_type, array(array_txpe))
75
- print(f"\t{name!r} {array_txpe} {arrow_type}", sd)
76
- result.append(sd)
77
- sc = StorageContainer(dl.name, result)
78
- self.arrow_tables.append(sc)
79
-
80
- def finalize(self) -> Any:
81
- result = []
82
- for arr in self.arrow_tables:
83
- timestamp0 = arr.ts0
84
- timestamp1 = arr.ts1
85
- names = ["timestamp0", "timestamp1"]
86
- data = [timestamp0, timestamp1]
87
- for sd in arr.arr:
88
- adt = pa.array(sd.arr, type=sd.arrow_type)
89
- names.append(sd.name)
90
- data.append(adt)
91
- table = pa.Table.from_arrays(data, names=names)
92
- fname = f"{arr.name}.parquet"
93
- print("Writing table", fname)
94
- pq.write_table(table, fname)
95
- print("done.", table.shape)
96
- result.append(table)
97
- return result
98
-
99
- def on_daq_list(self, daq_list_num: int, timestamp0: int, timestamp1: int, measurements: list) -> None:
100
- sc = self.arrow_tables[daq_list_num]
101
- sc.ts0.append(timestamp0)
102
- sc.ts1.append(timestamp1)
103
- for idx, elem in enumerate(measurements):
104
- sto = sc.arr[idx]
105
- sto.arr.append(elem)
106
-
107
-
108
- decoder = Decoder(args.xmraw_file)
109
- res = decoder.run()
pyxcp/examples/ex_csv.py DELETED
@@ -1,85 +0,0 @@
1
- import argparse
2
- import logging
3
- import os
4
- import csv
5
- from array import array
6
- from dataclasses import dataclass, field
7
- from mmap import PAGESIZE
8
- from pathlib import Path
9
- from typing import Any, List
10
-
11
- from pyxcp.recorder import XcpLogFileDecoder
12
- from pyxcp.recorder.converter import MAP_TO_ARRAY
13
-
14
-
15
- MAP_TO_SQL = {
16
- "U8": "INTEGER",
17
- "I8": "INTEGER",
18
- "U16": "INTEGER",
19
- "I16": "INTEGER",
20
- "U32": "INTEGER",
21
- "I32": "INTEGER",
22
- "U64": "INTEGER",
23
- "I64": "INTEGER",
24
- "F32": "FLOAT",
25
- "F64": "FLOAT",
26
- "F16": "FLOAT",
27
- "BF16": "FLOAT",
28
- }
29
-
30
- logger = logging.getLogger("PyXCP")
31
-
32
- parser = argparse.ArgumentParser(description="Use .xmraw files in an Apache Arrow application.")
33
- parser.add_argument("xmraw_file", help=".xmraw file")
34
- args = parser.parse_args()
35
-
36
-
37
- @dataclass
38
- class Storage:
39
- name: str
40
- arrow_type: Any
41
- arr: array
42
-
43
-
44
- @dataclass
45
- class StorageContainer:
46
- name: str
47
- arr: List[Storage] = field(default_factory=[])
48
- ts0: List[int] = field(default_factory=lambda: array("Q"))
49
- ts1: List[int] = field(default_factory=lambda: array("Q"))
50
-
51
-
52
- class Decoder(XcpLogFileDecoder):
53
-
54
- def __init__(self, recording_file_name: str):
55
- super().__init__(recording_file_name)
56
-
57
- def initialize(self) -> None:
58
- self.arrow_tables = []
59
- self.csv_writers = []
60
- for dl in self.daq_lists:
61
- result = []
62
- for name, type_str in dl.headers:
63
- array_txpe = MAP_TO_ARRAY[type_str]
64
- sql_type = MAP_TO_SQL[type_str]
65
- sd = Storage(name, sql_type, array(array_txpe))
66
- result.append(sd)
67
- sc = StorageContainer(dl.name, result)
68
- writer = csv.writer(open(f"{sc.name}.csv", "w", newline=""), dialect="excel")
69
- headers = ["ts0", "ts1"] + [e.name for e in sc.arr]
70
- writer.writerow(headers)
71
- self.csv_writers.append(writer)
72
- self.arrow_tables.append(sc)
73
- print("\nInserting data...")
74
-
75
- def finalize(self) -> None:
76
- print("Done.")
77
-
78
- def on_daq_list(self, daq_list_num: int, timestamp0: int, timestamp1: int, measurements: list) -> None:
79
- sc = self.arrow_tables[daq_list_num]
80
- writer = self.csv_writers[daq_list_num]
81
- data = [timestamp0, timestamp1, *measurements]
82
- writer.writerow(data)
83
-
84
- decoder = Decoder(args.xmraw_file)
85
- decoder.run()
@@ -1,95 +0,0 @@
1
- import argparse
2
- import logging
3
- import os
4
- import xlsxwriter
5
- from array import array
6
- from dataclasses import dataclass, field
7
- from mmap import PAGESIZE
8
- from pathlib import Path
9
- from typing import Any, List
10
-
11
- from pyxcp.recorder import XcpLogFileDecoder
12
- from pyxcp.recorder.converter import MAP_TO_ARRAY
13
-
14
-
15
- MAP_TO_SQL = {
16
- "U8": "INTEGER",
17
- "I8": "INTEGER",
18
- "U16": "INTEGER",
19
- "I16": "INTEGER",
20
- "U32": "INTEGER",
21
- "I32": "INTEGER",
22
- "U64": "INTEGER",
23
- "I64": "INTEGER",
24
- "F32": "FLOAT",
25
- "F64": "FLOAT",
26
- "F16": "FLOAT",
27
- "BF16": "FLOAT",
28
- }
29
-
30
- logger = logging.getLogger("PyXCP")
31
-
32
- parser = argparse.ArgumentParser(description="Use .xmraw files in an Apache Arrow application.")
33
- parser.add_argument("xmraw_file", help=".xmraw file")
34
- args = parser.parse_args()
35
-
36
-
37
- @dataclass
38
- class Storage:
39
- name: str
40
- arrow_type: Any
41
- arr: array
42
-
43
-
44
- @dataclass
45
- class StorageContainer:
46
- name: str
47
- arr: List[Storage] = field(default_factory=[])
48
- ts0: List[int] = field(default_factory=lambda: array("Q"))
49
- ts1: List[int] = field(default_factory=lambda: array("Q"))
50
-
51
-
52
- class Decoder(XcpLogFileDecoder):
53
-
54
- def __init__(self, recording_file_name: str):
55
- super().__init__(recording_file_name)
56
- self.xls_file_name = Path(recording_file_name).with_suffix(".xlsx")
57
- try:
58
- os.unlink(self.xls_file_name)
59
- except Exception as e:
60
- print(e)
61
-
62
- def initialize(self) -> None:
63
- self.arrow_tables = []
64
- self.xls_workbook = xlsxwriter.Workbook(self.xls_file_name)
65
- self.xls_sheets = []
66
- self.rows = []
67
- for dl in self.daq_lists:
68
- result = []
69
- for name, type_str in dl.headers:
70
- array_txpe = MAP_TO_ARRAY[type_str]
71
- sql_type = MAP_TO_SQL[type_str]
72
- sd = Storage(name, sql_type, array(array_txpe))
73
- result.append(sd)
74
- sc = StorageContainer(dl.name, result)
75
- sheet = self.xls_workbook.add_worksheet(sc.name)
76
- self.xls_sheets.append(sheet)
77
- headers = ["ts0", "ts1"] + [e.name for e in sc.arr]
78
- sheet.write_row(0, 0, headers)
79
- self.rows.append(1)
80
- self.arrow_tables.append(sc)
81
- print("\nInserting data...")
82
-
83
- def finalize(self) -> None:
84
- self.xls_workbook.close()
85
- print("Done.")
86
-
87
- def on_daq_list(self, daq_list_num: int, timestamp0: int, timestamp1: int, measurements: list) -> None:
88
- sheet = self.xls_sheets[daq_list_num]
89
- row = self.rows[daq_list_num]
90
- data = [timestamp0, timestamp1] + measurements
91
- sheet.write_row(row, 0, data)
92
- self.rows[daq_list_num] += 1
93
-
94
- decoder = Decoder(args.xmraw_file)
95
- decoder.run()
pyxcp/examples/ex_mdf.py DELETED
@@ -1,124 +0,0 @@
1
- import argparse
2
- import logging
3
- from array import array
4
- from dataclasses import dataclass, field
5
- from pathlib import Path
6
- from typing import Any, List
7
-
8
- import numpy as np
9
- from asammdf import MDF, Signal
10
- from asammdf.blocks.v4_blocks import HeaderBlock # ChannelGroup
11
- from asammdf.blocks.v4_constants import FLAG_HD_TIME_OFFSET_VALID # FLAG_HD_LOCAL_TIME,
12
-
13
- from pyxcp.recorder import XcpLogFileDecoder
14
-
15
-
16
- MAP_TO_NP = {
17
- "U8": np.uint8,
18
- "I8": np.int8,
19
- "U16": np.uint16,
20
- "I16": np.int16,
21
- "U32": np.uint32,
22
- "I32": np.int32,
23
- "U64": np.uint64,
24
- "I64": np.int64,
25
- "F32": np.float32,
26
- "F64": np.float64,
27
- "F16": np.float16,
28
- "BF16": np.float16,
29
- }
30
-
31
- MAP_TO_ARRAY = {
32
- "U8": "B",
33
- "I8": "b",
34
- "U16": "H",
35
- "I16": "h",
36
- "U32": "L",
37
- "I32": "l",
38
- "U64": "Q",
39
- "I64": "q",
40
- "F32": "f",
41
- "F64": "d",
42
- "F16": "f",
43
- # "BF16"
44
- }
45
-
46
- logger = logging.getLogger("PyXCP")
47
-
48
- parser = argparse.ArgumentParser(description="Use .xmraw files in an Apache Arrow application.")
49
- parser.add_argument("xmraw_file", help=".xmraw file")
50
- args = parser.parse_args()
51
-
52
-
53
- @dataclass
54
- class Storage:
55
- name: str
56
- arrow_type: Any
57
- arr: array
58
-
59
-
60
- @dataclass
61
- class StorageContainer:
62
- name: str
63
- arr: list[Storage] = field(default_factory=[])
64
- ts0: List[int] = field(default_factory=lambda: array("Q"))
65
- ts1: List[int] = field(default_factory=lambda: array("Q"))
66
-
67
-
68
- class Decoder(XcpLogFileDecoder):
69
-
70
- def __init__(self, recording_file_name: str):
71
- super().__init__(recording_file_name)
72
- self.mdf_file_name = Path(recording_file_name).with_suffix(".mf4")
73
-
74
- def initialize(self) -> None:
75
- self.tables = []
76
- for dl in self.daq_lists:
77
- result = []
78
- for name, type_str in dl.headers:
79
- array_txpe = MAP_TO_ARRAY[type_str]
80
- arrow_type = MAP_TO_NP[type_str]
81
- sd = Storage(name, arrow_type, array(array_txpe))
82
- result.append(sd)
83
- sc = StorageContainer(dl.name, result)
84
- self.tables.append(sc)
85
- print("Extracting DAQ lists...")
86
-
87
- def finalize(self) -> None:
88
- print("Creating MDF result...")
89
- timestamp_info = self.parameters.timestamp_info
90
- hdr = HeaderBlock(
91
- abs_time=timestamp_info.timestamp_ns,
92
- tz_offset=timestamp_info.utc_offset,
93
- daylight_save_time=timestamp_info.dst_offset,
94
- time_flags=FLAG_HD_TIME_OFFSET_VALID,
95
- )
96
- hdr.comment = f"""<HDcomment><TX>Timezone: {timestamp_info.timezone}</TX></HDcomment>""" # Test-Comment.
97
- mdf4 = MDF(version="4.10")
98
- mdf4.header = hdr
99
- # result = []
100
- for idx, arr in enumerate(self.tables):
101
- signals = []
102
- timestamps = arr.ts0
103
- for sd in arr.arr:
104
-
105
- signal = Signal(samples=sd.arr, name=sd.name, timestamps=timestamps)
106
- signals.append(signal)
107
- print(f"Appending data-group {arr.name!r}")
108
- mdf4.append(signals, acq_name=arr.name, comment="Created by pyXCP recorder")
109
- print(f"Writing '{self.mdf_file_name!s}'")
110
- mdf4.save(self.mdf_file_name, compression=2, overwrite=True)
111
- print("Done.")
112
- return mdf4
113
-
114
- def on_daq_list(self, daq_list_num: int, timestamp0: int, timestamp1: int, measurements: list) -> None:
115
- sc = self.tables[daq_list_num]
116
- sc.ts0.append(timestamp0)
117
- sc.ts1.append(timestamp1)
118
- for idx, elem in enumerate(measurements):
119
- sto = sc.arr[idx]
120
- sto.arr.append(elem)
121
-
122
-
123
- decoder = Decoder(args.xmraw_file)
124
- res = decoder.run()
@@ -1,128 +0,0 @@
1
- import argparse
2
- import logging
3
- import os
4
- import sqlite3
5
- from array import array
6
- from dataclasses import dataclass, field
7
- from mmap import PAGESIZE
8
- from pathlib import Path
9
- from typing import Any, List
10
-
11
- from pyxcp.recorder import XcpLogFileDecoder
12
- from pyxcp.recorder.converter import MAP_TO_ARRAY
13
-
14
-
15
- MAP_TO_SQL = {
16
- "U8": "INTEGER",
17
- "I8": "INTEGER",
18
- "U16": "INTEGER",
19
- "I16": "INTEGER",
20
- "U32": "INTEGER",
21
- "I32": "INTEGER",
22
- "U64": "INTEGER",
23
- "I64": "INTEGER",
24
- "F32": "FLOAT",
25
- "F64": "FLOAT",
26
- "F16": "FLOAT",
27
- "BF16": "FLOAT",
28
- }
29
-
30
- logger = logging.getLogger("PyXCP")
31
-
32
- parser = argparse.ArgumentParser(description="Use .xmraw files in an Apache Arrow application.")
33
- parser.add_argument("xmraw_file", help=".xmraw file")
34
- args = parser.parse_args()
35
-
36
-
37
- @dataclass
38
- class Storage:
39
- name: str
40
- arrow_type: Any
41
- arr: array
42
-
43
-
44
- @dataclass
45
- class StorageContainer:
46
- name: str
47
- arr: List[Storage] = field(default_factory=[])
48
- ts0: List[int] = field(default_factory=lambda: array("Q"))
49
- ts1: List[int] = field(default_factory=lambda: array("Q"))
50
-
51
-
52
- class Decoder(XcpLogFileDecoder):
53
-
54
- def __init__(self, recording_file_name: str):
55
- super().__init__(recording_file_name)
56
- self.sq3_file_name = Path(recording_file_name).with_suffix(".sq3")
57
- try:
58
- os.unlink(self.sq3_file_name)
59
- except Exception as e:
60
- print(e)
61
-
62
- def initialize(self) -> None:
63
- self.create_database(self.sq3_file_name)
64
- self.arrow_tables = []
65
- self.insert_stmt = {}
66
- for dl in self.daq_lists:
67
- result = []
68
- for name, type_str in dl.headers:
69
- array_txpe = MAP_TO_ARRAY[type_str]
70
- sql_type = MAP_TO_SQL[type_str]
71
- sd = Storage(name, sql_type, array(array_txpe))
72
- result.append(sd)
73
- sc = StorageContainer(dl.name, result)
74
- print(f"Creating table {sc.name!r}.")
75
- self.create_table(sc)
76
- self.insert_stmt[sc.name] = (
77
- f"""INSERT INTO {sc.name}({', '.join(['ts0', 'ts1'] + [r.name for r in sc.arr])}) VALUES({', '.join(["?" for _ in range(len(sc.arr) + 2)])})"""
78
- )
79
- self.arrow_tables.append(sc)
80
- print("\nInserting data...")
81
-
82
- def create_database(self, db_name: str) -> None:
83
- self.conn = sqlite3.Connection(db_name)
84
- self.cursor = self.conn.cursor()
85
- self.execute("PRAGMA FOREIGN_KEYS=ON")
86
- self.execute(f"PRAGMA PAGE_SIZE={PAGESIZE}")
87
- self.execute("PRAGMA SYNCHRONOUS=OFF")
88
- self.execute("PRAGMA LOCKING_MODE=EXCLUSIVE")
89
- self.execute("PRAGMA TEMP_STORE=MEMORY")
90
-
91
- timestamp_info = self.parameters.timestamp_info
92
- self.execute(
93
- "CREATE TABLE timestamp_info(timestamp_ns INTEGER, utc_offset INTEGER, dst_offset INTEGER, timezone VARCHAR(255))"
94
- )
95
- self.execute("CREATE TABLE table_names(name VARCHAR(255))")
96
- self.execute(
97
- "INSERT INTO timestamp_info VALUES(?, ?, ?, ?)",
98
- [timestamp_info.timestamp_ns, timestamp_info.utc_offset, timestamp_info.dst_offset, timestamp_info.timezone],
99
- )
100
-
101
- def create_table(self, sc: StorageContainer) -> None:
102
- columns = ["ts0 INTEGER", "ts1 INTEGER"]
103
- for elem in sc.arr:
104
- columns.append(f"{elem.name} {elem.arrow_type}")
105
- ddl = f"CREATE TABLE {sc.name}({', '.join(columns)})"
106
- self.execute(ddl)
107
- self.execute("INSERT INTO table_names VALUES(?)", [sc.name])
108
-
109
- def execute(self, *args: List[str]) -> None:
110
- try:
111
- self.cursor.execute(*args)
112
- except Exception as e:
113
- print(e)
114
-
115
- def finalize(self) -> None:
116
- self.conn.commit()
117
- self.conn.close()
118
- print("Done.")
119
-
120
- def on_daq_list(self, daq_list_num: int, timestamp0: int, timestamp1: int, measurements: list) -> None:
121
- sc = self.arrow_tables[daq_list_num]
122
- insert_stmt = self.insert_stmt[sc.name]
123
- data = [timestamp0, timestamp1, *measurements]
124
- self.execute(insert_stmt, data)
125
-
126
-
127
- decoder = Decoder(args.xmraw_file)
128
- decoder.run()