pyxcp 0.25.2__cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pyxcp/__init__.py +20 -0
- pyxcp/aml/EtasCANMonitoring.a2l +82 -0
- pyxcp/aml/EtasCANMonitoring.aml +67 -0
- pyxcp/aml/XCP_Common.aml +408 -0
- pyxcp/aml/XCPonCAN.aml +78 -0
- pyxcp/aml/XCPonEth.aml +33 -0
- pyxcp/aml/XCPonFlx.aml +113 -0
- pyxcp/aml/XCPonSxI.aml +66 -0
- pyxcp/aml/XCPonUSB.aml +106 -0
- pyxcp/aml/ifdata_CAN.a2l +20 -0
- pyxcp/aml/ifdata_Eth.a2l +11 -0
- pyxcp/aml/ifdata_Flx.a2l +94 -0
- pyxcp/aml/ifdata_SxI.a2l +13 -0
- pyxcp/aml/ifdata_USB.a2l +81 -0
- pyxcp/asam/__init__.py +0 -0
- pyxcp/asam/types.py +131 -0
- pyxcp/asamkeydll +0 -0
- pyxcp/asamkeydll.c +116 -0
- pyxcp/asamkeydll.sh +2 -0
- pyxcp/checksum.py +732 -0
- pyxcp/cmdline.py +83 -0
- pyxcp/config/__init__.py +1257 -0
- pyxcp/config/legacy.py +120 -0
- pyxcp/constants.py +47 -0
- pyxcp/cpp_ext/__init__.py +0 -0
- pyxcp/cpp_ext/aligned_buffer.hpp +168 -0
- pyxcp/cpp_ext/bin.hpp +105 -0
- pyxcp/cpp_ext/blockmem.hpp +58 -0
- pyxcp/cpp_ext/cpp_ext.cpython-310-x86_64-linux-gnu.so +0 -0
- pyxcp/cpp_ext/cpp_ext.cpython-311-x86_64-linux-gnu.so +0 -0
- pyxcp/cpp_ext/cpp_ext.cpython-312-x86_64-linux-gnu.so +0 -0
- pyxcp/cpp_ext/cpp_ext.cpython-313-x86_64-linux-gnu.so +0 -0
- pyxcp/cpp_ext/daqlist.hpp +374 -0
- pyxcp/cpp_ext/event.hpp +67 -0
- pyxcp/cpp_ext/extension_wrapper.cpp +131 -0
- pyxcp/cpp_ext/framing.hpp +360 -0
- pyxcp/cpp_ext/helper.hpp +280 -0
- pyxcp/cpp_ext/mcobject.hpp +248 -0
- pyxcp/cpp_ext/sxi_framing.hpp +332 -0
- pyxcp/cpp_ext/tsqueue.hpp +46 -0
- pyxcp/daq_stim/__init__.py +306 -0
- pyxcp/daq_stim/optimize/__init__.py +67 -0
- pyxcp/daq_stim/optimize/binpacking.py +41 -0
- pyxcp/daq_stim/scheduler.cpp +62 -0
- pyxcp/daq_stim/scheduler.hpp +75 -0
- pyxcp/daq_stim/stim.cpp +13 -0
- pyxcp/daq_stim/stim.cpython-310-x86_64-linux-gnu.so +0 -0
- pyxcp/daq_stim/stim.cpython-311-x86_64-linux-gnu.so +0 -0
- pyxcp/daq_stim/stim.cpython-312-x86_64-linux-gnu.so +0 -0
- pyxcp/daq_stim/stim.cpython-313-x86_64-linux-gnu.so +0 -0
- pyxcp/daq_stim/stim.hpp +604 -0
- pyxcp/daq_stim/stim_wrapper.cpp +50 -0
- pyxcp/dllif.py +100 -0
- pyxcp/errormatrix.py +878 -0
- pyxcp/examples/conf_can.toml +19 -0
- pyxcp/examples/conf_can_user.toml +16 -0
- pyxcp/examples/conf_can_vector.json +11 -0
- pyxcp/examples/conf_can_vector.toml +11 -0
- pyxcp/examples/conf_eth.toml +9 -0
- pyxcp/examples/conf_nixnet.json +20 -0
- pyxcp/examples/conf_socket_can.toml +12 -0
- pyxcp/examples/run_daq.py +165 -0
- pyxcp/examples/xcp_policy.py +60 -0
- pyxcp/examples/xcp_read_benchmark.py +38 -0
- pyxcp/examples/xcp_skel.py +48 -0
- pyxcp/examples/xcp_unlock.py +38 -0
- pyxcp/examples/xcp_user_supplied_driver.py +43 -0
- pyxcp/examples/xcphello.py +79 -0
- pyxcp/examples/xcphello_recorder.py +107 -0
- pyxcp/master/__init__.py +10 -0
- pyxcp/master/errorhandler.py +677 -0
- pyxcp/master/master.py +2645 -0
- pyxcp/py.typed +0 -0
- pyxcp/recorder/.idea/.gitignore +8 -0
- pyxcp/recorder/.idea/misc.xml +4 -0
- pyxcp/recorder/.idea/modules.xml +8 -0
- pyxcp/recorder/.idea/recorder.iml +6 -0
- pyxcp/recorder/.idea/sonarlint/issuestore/3/8/3808afc69ac1edb9d760000a2f137335b1b99728 +7 -0
- pyxcp/recorder/.idea/sonarlint/issuestore/9/a/9a2aa4db38d3115ed60da621e012c0efc0172aae +0 -0
- pyxcp/recorder/.idea/sonarlint/issuestore/b/4/b49006702b459496a8e8c94ebe60947108361b91 +0 -0
- pyxcp/recorder/.idea/sonarlint/issuestore/index.pb +7 -0
- pyxcp/recorder/.idea/sonarlint/securityhotspotstore/3/8/3808afc69ac1edb9d760000a2f137335b1b99728 +0 -0
- pyxcp/recorder/.idea/sonarlint/securityhotspotstore/9/a/9a2aa4db38d3115ed60da621e012c0efc0172aae +0 -0
- pyxcp/recorder/.idea/sonarlint/securityhotspotstore/b/4/b49006702b459496a8e8c94ebe60947108361b91 +0 -0
- pyxcp/recorder/.idea/sonarlint/securityhotspotstore/index.pb +7 -0
- pyxcp/recorder/.idea/vcs.xml +10 -0
- pyxcp/recorder/__init__.py +96 -0
- pyxcp/recorder/build_clang.cmd +1 -0
- pyxcp/recorder/build_clang.sh +2 -0
- pyxcp/recorder/build_gcc.cmd +1 -0
- pyxcp/recorder/build_gcc.sh +2 -0
- pyxcp/recorder/build_gcc_arm.sh +2 -0
- pyxcp/recorder/converter/__init__.py +445 -0
- pyxcp/recorder/lz4.c +2829 -0
- pyxcp/recorder/lz4.h +879 -0
- pyxcp/recorder/lz4hc.c +2041 -0
- pyxcp/recorder/lz4hc.h +413 -0
- pyxcp/recorder/mio.hpp +1714 -0
- pyxcp/recorder/reader.hpp +138 -0
- pyxcp/recorder/reco.py +278 -0
- pyxcp/recorder/recorder.rst +0 -0
- pyxcp/recorder/rekorder.cpp +59 -0
- pyxcp/recorder/rekorder.cpython-310-x86_64-linux-gnu.so +0 -0
- pyxcp/recorder/rekorder.cpython-311-x86_64-linux-gnu.so +0 -0
- pyxcp/recorder/rekorder.cpython-312-x86_64-linux-gnu.so +0 -0
- pyxcp/recorder/rekorder.cpython-313-x86_64-linux-gnu.so +0 -0
- pyxcp/recorder/rekorder.hpp +274 -0
- pyxcp/recorder/setup.py +41 -0
- pyxcp/recorder/test_reko.py +34 -0
- pyxcp/recorder/unfolder.hpp +1354 -0
- pyxcp/recorder/wrap.cpp +184 -0
- pyxcp/recorder/writer.hpp +302 -0
- pyxcp/scripts/__init__.py +0 -0
- pyxcp/scripts/pyxcp_probe_can_drivers.py +20 -0
- pyxcp/scripts/xcp_examples.py +64 -0
- pyxcp/scripts/xcp_fetch_a2l.py +40 -0
- pyxcp/scripts/xcp_id_scanner.py +18 -0
- pyxcp/scripts/xcp_info.py +144 -0
- pyxcp/scripts/xcp_profile.py +26 -0
- pyxcp/scripts/xmraw_converter.py +31 -0
- pyxcp/stim/__init__.py +0 -0
- pyxcp/tests/test_asam_types.py +24 -0
- pyxcp/tests/test_binpacking.py +186 -0
- pyxcp/tests/test_can.py +1324 -0
- pyxcp/tests/test_checksum.py +95 -0
- pyxcp/tests/test_daq.py +193 -0
- pyxcp/tests/test_daq_opt.py +426 -0
- pyxcp/tests/test_frame_padding.py +156 -0
- pyxcp/tests/test_framing.py +262 -0
- pyxcp/tests/test_master.py +2116 -0
- pyxcp/tests/test_transport.py +177 -0
- pyxcp/tests/test_utils.py +30 -0
- pyxcp/timing.py +60 -0
- pyxcp/transport/__init__.py +13 -0
- pyxcp/transport/base.py +484 -0
- pyxcp/transport/base_transport.hpp +0 -0
- pyxcp/transport/can.py +660 -0
- pyxcp/transport/eth.py +254 -0
- pyxcp/transport/sxi.py +209 -0
- pyxcp/transport/transport_ext.hpp +214 -0
- pyxcp/transport/transport_wrapper.cpp +249 -0
- pyxcp/transport/usb_transport.py +229 -0
- pyxcp/types.py +987 -0
- pyxcp/utils.py +127 -0
- pyxcp/vector/__init__.py +0 -0
- pyxcp/vector/map.py +82 -0
- pyxcp-0.25.2.dist-info/METADATA +341 -0
- pyxcp-0.25.2.dist-info/RECORD +151 -0
- pyxcp-0.25.2.dist-info/WHEEL +6 -0
- pyxcp-0.25.2.dist-info/entry_points.txt +9 -0
- pyxcp-0.25.2.dist-info/licenses/LICENSE +165 -0
pyxcp/py.typed
ADDED
|
File without changes
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
|
|
2
|
+
T cpp:S5028"8Replace this macro by "const", "constexpr" or an "enum".(����8�1
|
|
3
|
+
cpp:S3806""^non-portable path to file '<windows.h>'; specified path differs in case from file name on disk(��������8�����1
|
|
4
|
+
Tcpp:S954�"8Move these 3 #include directives to the top of the file.(���8ﴔ��1
|
|
5
|
+
T cpp:S5028"8Replace this macro by "const", "constexpr" or an "enum".(����8帔��1
|
|
6
|
+
cpp:S3806""^non-portable path to file '<windows.h>'; specified path differs in case from file name on disk(��������8�Ӕ��1
|
|
7
|
+
Tcpp:S954�"8Move these 3 #include directives to the top of the file.(���8�Ԕ��1
|
|
File without changes
|
|
File without changes
|
pyxcp/recorder/.idea/sonarlint/securityhotspotstore/3/8/3808afc69ac1edb9d760000a2f137335b1b99728
ADDED
|
File without changes
|
pyxcp/recorder/.idea/sonarlint/securityhotspotstore/9/a/9a2aa4db38d3115ed60da621e012c0efc0172aae
ADDED
|
File without changes
|
pyxcp/recorder/.idea/sonarlint/securityhotspotstore/b/4/b49006702b459496a8e8c94ebe60947108361b91
ADDED
|
File without changes
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
<?xml version="1.0" encoding="UTF-8"?>
|
|
2
|
+
<project version="4">
|
|
3
|
+
<component name="VcsDirectoryMappings">
|
|
4
|
+
<mapping directory="$PROJECT_DIR$/../.." vcs="Git" />
|
|
5
|
+
<mapping directory="$PROJECT_DIR$/lz4" vcs="Git" />
|
|
6
|
+
<mapping directory="$PROJECT_DIR$/mio" vcs="Git" />
|
|
7
|
+
<mapping directory="$PROJECT_DIR$/simde" vcs="Git" />
|
|
8
|
+
<mapping directory="$PROJECT_DIR$/simdjson" vcs="Git" />
|
|
9
|
+
</component>
|
|
10
|
+
</project>
|
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
"""XCP Frame Recording Facility."""
|
|
3
|
+
|
|
4
|
+
from dataclasses import dataclass
|
|
5
|
+
from typing import Union
|
|
6
|
+
|
|
7
|
+
from pyxcp.transport.base import FrameCategory
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
try:
|
|
11
|
+
import pandas as pd
|
|
12
|
+
except ImportError:
|
|
13
|
+
HAS_PANDAS = False
|
|
14
|
+
else:
|
|
15
|
+
HAS_PANDAS = True
|
|
16
|
+
|
|
17
|
+
from pyxcp.recorder.rekorder import DaqOnlinePolicy # noqa: F401
|
|
18
|
+
from pyxcp.recorder.rekorder import DaqRecorderPolicy # noqa: F401
|
|
19
|
+
from pyxcp.recorder.rekorder import Deserializer # noqa: F401
|
|
20
|
+
from pyxcp.recorder.rekorder import MeasurementParameters # noqa: F401
|
|
21
|
+
from pyxcp.recorder.rekorder import _PyXcpLogFileReader, _PyXcpLogFileWriter, data_types
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
DATA_TYPES = data_types()
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
@dataclass
|
|
28
|
+
class XcpLogFileHeader:
|
|
29
|
+
""" """
|
|
30
|
+
|
|
31
|
+
version: int
|
|
32
|
+
options: int
|
|
33
|
+
num_containers: int
|
|
34
|
+
record_count: int
|
|
35
|
+
size_uncompressed: int
|
|
36
|
+
size_compressed: int
|
|
37
|
+
compression_ratio: float
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
COUNTER_MAX = 0xFFFF
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
class XcpLogFileReader:
|
|
44
|
+
""" """
|
|
45
|
+
|
|
46
|
+
def __init__(self, file_name):
|
|
47
|
+
self._reader = _PyXcpLogFileReader(file_name)
|
|
48
|
+
|
|
49
|
+
@property
|
|
50
|
+
def header(self):
|
|
51
|
+
return self._reader.get_header()
|
|
52
|
+
|
|
53
|
+
def get_header(self):
|
|
54
|
+
return XcpLogFileHeader(*self._reader.get_header_as_tuple())
|
|
55
|
+
|
|
56
|
+
def get_metadata(self):
|
|
57
|
+
return self._reader.get_metadata()
|
|
58
|
+
|
|
59
|
+
def __iter__(self):
|
|
60
|
+
while True:
|
|
61
|
+
frames = self._reader.next_block()
|
|
62
|
+
if frames is None:
|
|
63
|
+
break
|
|
64
|
+
for category, counter, timestamp, _, payload in frames:
|
|
65
|
+
yield (FrameCategory(category), counter, timestamp, payload)
|
|
66
|
+
|
|
67
|
+
def reset_iter(self):
|
|
68
|
+
self._reader.reset()
|
|
69
|
+
|
|
70
|
+
def as_dataframe(self):
|
|
71
|
+
if HAS_PANDAS:
|
|
72
|
+
df = pd.DataFrame((f for f in self), columns=["category", "counter", "timestamp", "payload"])
|
|
73
|
+
df = df.set_index("timestamp")
|
|
74
|
+
df.counter = df.counter.astype("uint16")
|
|
75
|
+
df.category = df.category.map({v: k for k, v in FrameCategory.__members__.items()}).astype("category")
|
|
76
|
+
return df
|
|
77
|
+
else:
|
|
78
|
+
raise NotImplementedError("method as_dataframe() requires 'pandas' package")
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
class XcpLogFileWriter:
|
|
82
|
+
""" """
|
|
83
|
+
|
|
84
|
+
def __init__(self, file_name: str, prealloc=500, chunk_size=1):
|
|
85
|
+
self._writer = _PyXcpLogFileWriter(file_name, prealloc, chunk_size)
|
|
86
|
+
self._finalized = False
|
|
87
|
+
|
|
88
|
+
def __del__(self):
|
|
89
|
+
if not self._finalized:
|
|
90
|
+
self.finalize()
|
|
91
|
+
|
|
92
|
+
def add_frame(self, category: FrameCategory, counter: int, timestamp: float, payload: Union[bytes, bytearray]):
|
|
93
|
+
self._writer.add_frame(category, counter % (COUNTER_MAX + 1), timestamp, len(payload), payload)
|
|
94
|
+
|
|
95
|
+
def finalize(self):
|
|
96
|
+
self._writer.finalize()
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
clang++ -std=c++20 -O0 -fvectorize -fexceptions -Rpass=loop-vectorize -ggdb -Wall -Wextra -Weffc++ -DLZ4_DEBUG=1 -DSTANDALONE_REKORDER=1 lz4.cpp rekorder.cpp -o rekorder.exe
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
g++ -std=c++20 -O0 -ggdb -march=native -fexceptions -ffast-math -ftree-vectorize -msse4 -mfpmath=sse -pg -fopt-info-vec-all -Wall -Wextra -Weffc++ -fcoroutines -DLZ4_DEBUG=1 -DSTANDALONE_REKORDER=1 lz4.cpp rekorder.cpp -o rekorder.exe
|
|
@@ -0,0 +1,445 @@
|
|
|
1
|
+
"""Convert pyXCPs .xmraw files to common data formats."""
|
|
2
|
+
|
|
3
|
+
import csv
|
|
4
|
+
import logging
|
|
5
|
+
import os
|
|
6
|
+
import sqlite3
|
|
7
|
+
from array import array
|
|
8
|
+
from dataclasses import dataclass, field
|
|
9
|
+
from mmap import PAGESIZE
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
from typing import Any, List
|
|
12
|
+
|
|
13
|
+
import numpy as np
|
|
14
|
+
from rich.logging import RichHandler
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
try:
|
|
18
|
+
import pyarrow as pa
|
|
19
|
+
import pyarrow.parquet as pq
|
|
20
|
+
|
|
21
|
+
has_arrow = True
|
|
22
|
+
except ImportError:
|
|
23
|
+
has_arrow = False
|
|
24
|
+
|
|
25
|
+
try:
|
|
26
|
+
import h5py
|
|
27
|
+
|
|
28
|
+
has_h5py = True
|
|
29
|
+
except ImportError:
|
|
30
|
+
has_h5py = False
|
|
31
|
+
|
|
32
|
+
try:
|
|
33
|
+
from asammdf import MDF, Signal
|
|
34
|
+
from asammdf.blocks.v4_blocks import HeaderBlock
|
|
35
|
+
from asammdf.blocks.v4_constants import FLAG_HD_TIME_OFFSET_VALID
|
|
36
|
+
|
|
37
|
+
has_asammdf = True
|
|
38
|
+
except ImportError:
|
|
39
|
+
has_asammdf = False
|
|
40
|
+
|
|
41
|
+
try:
|
|
42
|
+
import xlsxwriter
|
|
43
|
+
|
|
44
|
+
has_xlsxwriter = True
|
|
45
|
+
|
|
46
|
+
except ImportError:
|
|
47
|
+
has_xlsxwriter = False
|
|
48
|
+
|
|
49
|
+
from pyxcp import console
|
|
50
|
+
from pyxcp.recorder.rekorder import XcpLogFileDecoder as _XcpLogFileDecoder
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
FORMAT = "%(message)s"
|
|
54
|
+
logging.basicConfig(level="NOTSET", format=FORMAT, datefmt="[%X]", handlers=[RichHandler()])
|
|
55
|
+
|
|
56
|
+
log = logging.getLogger("rich")
|
|
57
|
+
|
|
58
|
+
MAP_TO_ARRAY = {
|
|
59
|
+
"U8": "B",
|
|
60
|
+
"I8": "b",
|
|
61
|
+
"U16": "H",
|
|
62
|
+
"I16": "h",
|
|
63
|
+
"U32": "L",
|
|
64
|
+
"I32": "l",
|
|
65
|
+
"U64": "Q",
|
|
66
|
+
"I64": "q",
|
|
67
|
+
"F32": "f",
|
|
68
|
+
"F64": "d",
|
|
69
|
+
"F16": "f",
|
|
70
|
+
"BF16": "f",
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
MAP_TO_NP = {
|
|
74
|
+
"U8": np.uint8,
|
|
75
|
+
"I8": np.int8,
|
|
76
|
+
"U16": np.uint16,
|
|
77
|
+
"I16": np.int16,
|
|
78
|
+
"U32": np.uint32,
|
|
79
|
+
"I32": np.int32,
|
|
80
|
+
"U64": np.uint64,
|
|
81
|
+
"I64": np.int64,
|
|
82
|
+
"F32": np.float32,
|
|
83
|
+
"F64": np.float64,
|
|
84
|
+
"F16": np.float16,
|
|
85
|
+
"BF16": np.float16,
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
@dataclass
|
|
90
|
+
class Storage:
|
|
91
|
+
name: str
|
|
92
|
+
target_type: Any
|
|
93
|
+
arr: array
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
@dataclass
|
|
97
|
+
class StorageContainer:
|
|
98
|
+
name: str
|
|
99
|
+
arr: List[Storage] = field(default_factory=[])
|
|
100
|
+
timestamp0: List[int] = field(default_factory=lambda: array("Q"))
|
|
101
|
+
timestamp1: List[int] = field(default_factory=lambda: array("Q"))
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
class XcpLogFileDecoder(_XcpLogFileDecoder):
|
|
105
|
+
""""""
|
|
106
|
+
|
|
107
|
+
def __init__(
|
|
108
|
+
self,
|
|
109
|
+
recording_file_name: str,
|
|
110
|
+
out_file_suffix: str,
|
|
111
|
+
remove_file: bool = True,
|
|
112
|
+
target_type_map: dict = None,
|
|
113
|
+
target_file_name: str = "",
|
|
114
|
+
):
|
|
115
|
+
super().__init__(recording_file_name)
|
|
116
|
+
self.logger = logging.getLogger("PyXCP")
|
|
117
|
+
self.logger.setLevel(logging.DEBUG)
|
|
118
|
+
self.out_file_name = Path(recording_file_name).with_suffix(out_file_suffix)
|
|
119
|
+
self.out_file_suffix = out_file_suffix
|
|
120
|
+
self.target_type_map = target_type_map or {}
|
|
121
|
+
if remove_file:
|
|
122
|
+
try:
|
|
123
|
+
os.unlink(self.out_file_name)
|
|
124
|
+
except FileNotFoundError:
|
|
125
|
+
pass
|
|
126
|
+
|
|
127
|
+
def initialize(self) -> None:
|
|
128
|
+
self.on_initialize()
|
|
129
|
+
|
|
130
|
+
def on_initialize(self) -> None:
|
|
131
|
+
self.setup_containers()
|
|
132
|
+
|
|
133
|
+
def finalize(self) -> None:
|
|
134
|
+
self.on_finalize()
|
|
135
|
+
|
|
136
|
+
def on_finalize(self) -> None:
|
|
137
|
+
pass
|
|
138
|
+
|
|
139
|
+
def setup_containers(self) -> None:
|
|
140
|
+
self.tables = []
|
|
141
|
+
for dl in self.daq_lists:
|
|
142
|
+
result = []
|
|
143
|
+
for name, type_str in dl.headers:
|
|
144
|
+
array_txpe = MAP_TO_ARRAY[type_str]
|
|
145
|
+
target_type = self.target_type_map.get(type_str)
|
|
146
|
+
sd = Storage(name, target_type, array(array_txpe))
|
|
147
|
+
result.append(sd)
|
|
148
|
+
sc = StorageContainer(dl.name, result)
|
|
149
|
+
self.tables.append(sc)
|
|
150
|
+
self.on_container(sc)
|
|
151
|
+
|
|
152
|
+
def on_container(self, sc: StorageContainer) -> None:
|
|
153
|
+
pass
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
class CollectRows:
|
|
157
|
+
def on_daq_list(self, daq_list_num: int, timestamp0: int, timestamp1: int, measurements: list) -> None:
|
|
158
|
+
storage_container = self.tables[daq_list_num]
|
|
159
|
+
storage_container.timestamp0.append(timestamp0)
|
|
160
|
+
storage_container.timestamp1.append(timestamp1)
|
|
161
|
+
for idx, elem in enumerate(measurements):
|
|
162
|
+
storage = storage_container.arr[idx]
|
|
163
|
+
storage.arr.append(elem)
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
class ArrowConverter(CollectRows, XcpLogFileDecoder):
|
|
167
|
+
""""""
|
|
168
|
+
|
|
169
|
+
MAP_TO_ARROW = {
|
|
170
|
+
"U8": pa.uint8(),
|
|
171
|
+
"I8": pa.int8(),
|
|
172
|
+
"U16": pa.uint16(),
|
|
173
|
+
"I16": pa.int16(),
|
|
174
|
+
"U32": pa.uint32(),
|
|
175
|
+
"I32": pa.int32(),
|
|
176
|
+
"U64": pa.uint64(),
|
|
177
|
+
"I64": pa.int64(),
|
|
178
|
+
"F32": pa.float32(),
|
|
179
|
+
"F64": pa.float64(),
|
|
180
|
+
"F16": pa.float16(),
|
|
181
|
+
"BF16": pa.float16(),
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
def __init__(self, recording_file_name: str, target_file_name: str = ""):
|
|
185
|
+
super().__init__(
|
|
186
|
+
recording_file_name=recording_file_name,
|
|
187
|
+
out_file_suffix=".parquet",
|
|
188
|
+
remove_file=False,
|
|
189
|
+
target_type_map=self.MAP_TO_ARROW,
|
|
190
|
+
target_file_name=target_file_name,
|
|
191
|
+
)
|
|
192
|
+
|
|
193
|
+
def on_initialize(self) -> None:
|
|
194
|
+
super().on_initialize()
|
|
195
|
+
|
|
196
|
+
def on_finalize(self) -> None:
|
|
197
|
+
result = []
|
|
198
|
+
for arr in self.tables:
|
|
199
|
+
timestamp0 = arr.timestamp0
|
|
200
|
+
timestamp1 = arr.timestamp1
|
|
201
|
+
names = ["timestamp0", "timestamp1"]
|
|
202
|
+
data = [timestamp0, timestamp1]
|
|
203
|
+
for sd in arr.arr:
|
|
204
|
+
adt = pa.array(sd.arr, type=sd.target_type)
|
|
205
|
+
names.append(sd.name)
|
|
206
|
+
data.append(adt)
|
|
207
|
+
table = pa.Table.from_arrays(data, names=names)
|
|
208
|
+
fname = f"{arr.name}{self.out_file_suffix}"
|
|
209
|
+
self.logger.info(f"Writing file {fname!r}")
|
|
210
|
+
pq.write_table(table, fname)
|
|
211
|
+
result.append(table)
|
|
212
|
+
return result
|
|
213
|
+
|
|
214
|
+
|
|
215
|
+
class CsvConverter(XcpLogFileDecoder):
|
|
216
|
+
def __init__(self, recording_file_name: str, target_file_name: str = ""):
|
|
217
|
+
super().__init__(
|
|
218
|
+
recording_file_name=recording_file_name, out_file_suffix=".csv", remove_file=False, target_file_name=target_file_name
|
|
219
|
+
)
|
|
220
|
+
|
|
221
|
+
def on_initialize(self) -> None:
|
|
222
|
+
self.csv_writers = []
|
|
223
|
+
super().on_initialize()
|
|
224
|
+
|
|
225
|
+
def on_container(self, sc: StorageContainer) -> None:
|
|
226
|
+
fname = f"{sc.name}{self.out_file_suffix}"
|
|
227
|
+
self.logger.info(f"Creating file {fname!r}.")
|
|
228
|
+
writer = csv.writer(open(fname, "w", newline=""), dialect="excel")
|
|
229
|
+
headers = ["timestamp0", "timestamp1"] + [e.name for e in sc.arr]
|
|
230
|
+
writer.writerow(headers)
|
|
231
|
+
self.csv_writers.append(writer)
|
|
232
|
+
|
|
233
|
+
def on_finalize(self) -> None:
|
|
234
|
+
self.logger.info("Done.")
|
|
235
|
+
|
|
236
|
+
def on_daq_list(self, daq_list_num: int, timestamp0: int, timestamp1: int, measurements: list) -> None:
|
|
237
|
+
writer = self.csv_writers[daq_list_num]
|
|
238
|
+
data = [timestamp0, timestamp1, *measurements]
|
|
239
|
+
writer.writerow(data)
|
|
240
|
+
|
|
241
|
+
|
|
242
|
+
class ExcelConverter(XcpLogFileDecoder):
|
|
243
|
+
def __init__(self, recording_file_name: str, target_file_name: str = ""):
|
|
244
|
+
super().__init__(recording_file_name=recording_file_name, out_file_suffix=".xlsx", target_file_name=target_file_name)
|
|
245
|
+
|
|
246
|
+
def on_initialize(self) -> None:
|
|
247
|
+
self.logger.info(f"Creating file {str(self.out_file_name)!r}.")
|
|
248
|
+
self.xls_workbook = xlsxwriter.Workbook(self.out_file_name)
|
|
249
|
+
self.xls_sheets = []
|
|
250
|
+
self.rows = []
|
|
251
|
+
super().on_initialize()
|
|
252
|
+
|
|
253
|
+
def on_container(self, sc: StorageContainer) -> None:
|
|
254
|
+
sheet = self.xls_workbook.add_worksheet(sc.name)
|
|
255
|
+
self.xls_sheets.append(sheet)
|
|
256
|
+
headers = ["timestamp0", "timestamp1"] + [e.name for e in sc.arr]
|
|
257
|
+
sheet.write_row(0, 0, headers)
|
|
258
|
+
self.rows.append(1)
|
|
259
|
+
|
|
260
|
+
def on_finalize(self) -> None:
|
|
261
|
+
self.xls_workbook.close()
|
|
262
|
+
self.logger.info("Done.")
|
|
263
|
+
|
|
264
|
+
def on_daq_list(self, daq_list_num: int, timestamp0: int, timestamp1: int, measurements: list) -> None:
|
|
265
|
+
sheet = self.xls_sheets[daq_list_num]
|
|
266
|
+
row = self.rows[daq_list_num]
|
|
267
|
+
data = [timestamp0, timestamp1] + measurements
|
|
268
|
+
sheet.write_row(row, 0, data)
|
|
269
|
+
self.rows[daq_list_num] += 1
|
|
270
|
+
|
|
271
|
+
|
|
272
|
+
class HdfConverter(CollectRows, XcpLogFileDecoder):
|
|
273
|
+
def __init__(self, recording_file_name: str, target_file_name: str = ""):
|
|
274
|
+
super().__init__(recording_file_name=recording_file_name, out_file_suffix=".h5", target_file_name=target_file_name)
|
|
275
|
+
|
|
276
|
+
def on_initialize(self) -> None:
|
|
277
|
+
self.logger.info(f"Creating file {str(self.out_file_name)!r}")
|
|
278
|
+
self.out_file = h5py.File(self.out_file_name, "w")
|
|
279
|
+
super().on_initialize()
|
|
280
|
+
|
|
281
|
+
def on_finalize(self) -> None:
|
|
282
|
+
for arr in self.tables:
|
|
283
|
+
timestamp0 = arr.timestamp0
|
|
284
|
+
timestamp1 = arr.timestamp1
|
|
285
|
+
self.out_file[f"/{arr.name}/timestamp0"] = timestamp0
|
|
286
|
+
self.out_file[f"/{arr.name}/timestamp1"] = timestamp1
|
|
287
|
+
for sd in arr.arr:
|
|
288
|
+
self.out_file[f"/{arr.name}/{sd.name}"] = sd.arr
|
|
289
|
+
self.logger.info(f"Writing table {arr.name!r}")
|
|
290
|
+
self.logger.info("Done.")
|
|
291
|
+
self.out_file.close()
|
|
292
|
+
|
|
293
|
+
|
|
294
|
+
class MdfConverter(CollectRows, XcpLogFileDecoder):
|
|
295
|
+
def __init__(self, recording_file_name: str, target_file_name: str = ""):
|
|
296
|
+
super().__init__(
|
|
297
|
+
recording_file_name=recording_file_name,
|
|
298
|
+
out_file_suffix=".mf4",
|
|
299
|
+
target_type_map=MAP_TO_NP,
|
|
300
|
+
target_file_name=target_file_name,
|
|
301
|
+
)
|
|
302
|
+
|
|
303
|
+
def on_initialize(self) -> None:
|
|
304
|
+
super().on_initialize()
|
|
305
|
+
|
|
306
|
+
def on_finalize(self) -> None:
|
|
307
|
+
timestamp_info = self.parameters.timestamp_info
|
|
308
|
+
hdr = HeaderBlock(
|
|
309
|
+
abs_time=timestamp_info.timestamp_ns,
|
|
310
|
+
tz_offset=timestamp_info.utc_offset,
|
|
311
|
+
daylight_save_time=timestamp_info.dst_offset,
|
|
312
|
+
time_flags=FLAG_HD_TIME_OFFSET_VALID,
|
|
313
|
+
)
|
|
314
|
+
hdr.comment = f"""<HDcomment><TX>Timezone: {timestamp_info.timezone}</TX></HDcomment>""" # Test-Comment.
|
|
315
|
+
mdf4 = MDF(version="4.10", header=hdr)
|
|
316
|
+
for idx, arr in enumerate(self.tables):
|
|
317
|
+
signals = []
|
|
318
|
+
timestamps = arr.timestamp0
|
|
319
|
+
for sd in arr.arr:
|
|
320
|
+
signal = Signal(samples=sd.arr, name=sd.name, timestamps=timestamps)
|
|
321
|
+
signals.append(signal)
|
|
322
|
+
self.logger.info(f"Appending data-group {arr.name!r}")
|
|
323
|
+
mdf4.append(signals, acq_name=arr.name, comment="Created by pyXCP recorder")
|
|
324
|
+
self.logger.info(f"Writing {str(self.out_file_name)!r}")
|
|
325
|
+
mdf4.save(self.out_file_name, compression=2, overwrite=True)
|
|
326
|
+
self.logger.info("Done.")
|
|
327
|
+
|
|
328
|
+
|
|
329
|
+
class SqliteConverter(XcpLogFileDecoder):
|
|
330
|
+
""" """
|
|
331
|
+
|
|
332
|
+
MAP_TO_SQL = {
|
|
333
|
+
"U8": "INTEGER",
|
|
334
|
+
"I8": "INTEGER",
|
|
335
|
+
"U16": "INTEGER",
|
|
336
|
+
"I16": "INTEGER",
|
|
337
|
+
"U32": "INTEGER",
|
|
338
|
+
"I32": "INTEGER",
|
|
339
|
+
"U64": "INTEGER",
|
|
340
|
+
"I64": "INTEGER",
|
|
341
|
+
"F32": "FLOAT",
|
|
342
|
+
"F64": "FLOAT",
|
|
343
|
+
"F16": "FLOAT",
|
|
344
|
+
"BF16": "FLOAT",
|
|
345
|
+
}
|
|
346
|
+
|
|
347
|
+
def __init__(self, recording_file_name: str, target_file_name: str = ""):
|
|
348
|
+
super().__init__(
|
|
349
|
+
recording_file_name=recording_file_name,
|
|
350
|
+
out_file_suffix=".sq3",
|
|
351
|
+
target_type_map=self.MAP_TO_SQL,
|
|
352
|
+
target_file_name=target_file_name,
|
|
353
|
+
)
|
|
354
|
+
|
|
355
|
+
def on_initialize(self) -> None:
|
|
356
|
+
self.logger.info(f"Creating database {str(self.out_file_name)!r}.")
|
|
357
|
+
self.create_database(self.out_file_name)
|
|
358
|
+
self.insert_stmt = {}
|
|
359
|
+
super().on_initialize()
|
|
360
|
+
|
|
361
|
+
def on_container(self, sc: StorageContainer) -> None:
|
|
362
|
+
self.create_table(sc)
|
|
363
|
+
self.logger.info(f"Creating table {sc.name!r}.")
|
|
364
|
+
self.insert_stmt[sc.name] = (
|
|
365
|
+
f"""INSERT INTO {sc.name}({", ".join(["timestamp0", "timestamp1"] + [r.name for r in sc.arr])})"""
|
|
366
|
+
f""" VALUES({", ".join(["?" for _ in range(len(sc.arr) + 2)])})"""
|
|
367
|
+
)
|
|
368
|
+
|
|
369
|
+
def on_finalize(self) -> None:
|
|
370
|
+
self.conn.commit()
|
|
371
|
+
self.conn.close()
|
|
372
|
+
print("Done.")
|
|
373
|
+
|
|
374
|
+
def on_daq_list(self, daq_list_num: int, timestamp0: int, timestamp1: int, measurements: list) -> None:
|
|
375
|
+
sc = self.tables[daq_list_num]
|
|
376
|
+
insert_stmt = self.insert_stmt[sc.name]
|
|
377
|
+
data = [timestamp0, timestamp1, *measurements]
|
|
378
|
+
self.execute(insert_stmt, data)
|
|
379
|
+
|
|
380
|
+
def create_database(self, db_name: str) -> None:
|
|
381
|
+
self.conn = sqlite3.Connection(db_name)
|
|
382
|
+
self.cursor = self.conn.cursor()
|
|
383
|
+
self.execute("PRAGMA FOREIGN_KEYS=ON")
|
|
384
|
+
self.execute(f"PRAGMA PAGE_SIZE={PAGESIZE}")
|
|
385
|
+
self.execute("PRAGMA SYNCHRONOUS=OFF")
|
|
386
|
+
self.execute("PRAGMA LOCKING_MODE=EXCLUSIVE")
|
|
387
|
+
self.execute("PRAGMA TEMP_STORE=MEMORY")
|
|
388
|
+
|
|
389
|
+
timestamp_info = self.parameters.timestamp_info
|
|
390
|
+
self.execute(
|
|
391
|
+
"CREATE TABLE timestamp_info(timestamp_ns INTEGER, utc_offset INTEGER, dst_offset INTEGER, timezone VARCHAR(255))"
|
|
392
|
+
)
|
|
393
|
+
self.execute("CREATE TABLE table_names(name VARCHAR(255))")
|
|
394
|
+
self.execute(
|
|
395
|
+
"INSERT INTO timestamp_info VALUES(?, ?, ?, ?)",
|
|
396
|
+
[timestamp_info.timestamp_ns, timestamp_info.utc_offset, timestamp_info.dst_offset, timestamp_info.timezone],
|
|
397
|
+
)
|
|
398
|
+
|
|
399
|
+
def create_table(self, sc: StorageContainer) -> None:
|
|
400
|
+
columns = ["timestamp0 INTEGER", "timestamp1 INTEGER"]
|
|
401
|
+
for elem in sc.arr:
|
|
402
|
+
columns.append(f"{elem.name} {elem.target_type}")
|
|
403
|
+
ddl = f"CREATE TABLE {sc.name}({', '.join(columns)})"
|
|
404
|
+
self.execute(ddl)
|
|
405
|
+
self.execute("INSERT INTO table_names VALUES(?)", [sc.name])
|
|
406
|
+
|
|
407
|
+
def execute(self, *args: List[str]) -> None:
|
|
408
|
+
try:
|
|
409
|
+
self.cursor.execute(*args)
|
|
410
|
+
except Exception as e:
|
|
411
|
+
print(e)
|
|
412
|
+
|
|
413
|
+
|
|
414
|
+
CONVERTERS = {
|
|
415
|
+
"arrow": ArrowConverter,
|
|
416
|
+
"csv": CsvConverter,
|
|
417
|
+
"excel": ExcelConverter,
|
|
418
|
+
"hdf5": HdfConverter,
|
|
419
|
+
"mdf": MdfConverter,
|
|
420
|
+
"sqlite3": SqliteConverter,
|
|
421
|
+
}
|
|
422
|
+
|
|
423
|
+
CONVERTER_REQUIREMENTS = {
|
|
424
|
+
"arrow": (has_arrow, "pyarrow"),
|
|
425
|
+
"csv": (True, "csv"),
|
|
426
|
+
"excel": (has_xlsxwriter, "xlsxwriter"),
|
|
427
|
+
"hdf5": (has_h5py, "h5py"),
|
|
428
|
+
"mdf": (has_asammdf, "asammdf"),
|
|
429
|
+
"sqlite3": (True, "csv"),
|
|
430
|
+
}
|
|
431
|
+
|
|
432
|
+
|
|
433
|
+
def convert_xmraw(converter_name: str, recording_file_name: str, target_file_name: str, *args, **kwargs) -> None:
|
|
434
|
+
converter_class = CONVERTERS.get(converter_name.lower())
|
|
435
|
+
if converter_class is None:
|
|
436
|
+
console.print(f"Invalid converter name: {converter_name!r}")
|
|
437
|
+
return
|
|
438
|
+
available, pck_name = CONVERTER_REQUIREMENTS.get(converter_name.lower(), (True, ""))
|
|
439
|
+
if not available:
|
|
440
|
+
console.print(f"Converter {converter_name!r} requires package {pck_name!r}.")
|
|
441
|
+
console.print(f"Please run [green]pip install {pck_name}[/green] to install it.")
|
|
442
|
+
return
|
|
443
|
+
# Path(*p.parts[:-1], p.stem)
|
|
444
|
+
converter = converter_class(recording_file_name)
|
|
445
|
+
converter.run()
|