pyxcp 0.22.1__cp39-cp39-macosx_13_0_x86_64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pyxcp might be problematic. Click here for more details.
- CMakeLists.txt +117 -0
- pyxcp/__init__.py +20 -0
- pyxcp/aml/EtasCANMonitoring.a2l +82 -0
- pyxcp/aml/EtasCANMonitoring.aml +67 -0
- pyxcp/aml/XCP_Common.aml +408 -0
- pyxcp/aml/XCPonCAN.aml +78 -0
- pyxcp/aml/XCPonEth.aml +33 -0
- pyxcp/aml/XCPonFlx.aml +113 -0
- pyxcp/aml/XCPonSxI.aml +66 -0
- pyxcp/aml/XCPonUSB.aml +106 -0
- pyxcp/aml/ifdata_CAN.a2l +20 -0
- pyxcp/aml/ifdata_Eth.a2l +11 -0
- pyxcp/aml/ifdata_Flx.a2l +94 -0
- pyxcp/aml/ifdata_SxI.a2l +13 -0
- pyxcp/aml/ifdata_USB.a2l +81 -0
- pyxcp/asam/__init__.py +0 -0
- pyxcp/asam/types.py +131 -0
- pyxcp/asamkeydll.c +116 -0
- pyxcp/asamkeydll.sh +2 -0
- pyxcp/checksum.py +722 -0
- pyxcp/cmdline.py +52 -0
- pyxcp/config/__init__.py +1089 -0
- pyxcp/config/legacy.py +120 -0
- pyxcp/constants.py +47 -0
- pyxcp/cpp_ext/__init__.py +8 -0
- pyxcp/cpp_ext/bin.hpp +104 -0
- pyxcp/cpp_ext/blockmem.hpp +58 -0
- pyxcp/cpp_ext/cpp_ext.cpython-312-darwin.so +0 -0
- pyxcp/cpp_ext/daqlist.hpp +197 -0
- pyxcp/cpp_ext/event.hpp +67 -0
- pyxcp/cpp_ext/extension_wrapper.cpp +94 -0
- pyxcp/cpp_ext/helper.hpp +264 -0
- pyxcp/cpp_ext/mcobject.hpp +241 -0
- pyxcp/cpp_ext/tsqueue.hpp +46 -0
- pyxcp/daq_stim/__init__.py +226 -0
- pyxcp/daq_stim/optimize/__init__.py +67 -0
- pyxcp/daq_stim/optimize/binpacking.py +41 -0
- pyxcp/daq_stim/scheduler.cpp +28 -0
- pyxcp/daq_stim/scheduler.hpp +75 -0
- pyxcp/daq_stim/stim.cpp +13 -0
- pyxcp/daq_stim/stim.cpython-312-darwin.so +0 -0
- pyxcp/daq_stim/stim.hpp +604 -0
- pyxcp/daq_stim/stim_wrapper.cpp +48 -0
- pyxcp/dllif.py +95 -0
- pyxcp/errormatrix.py +878 -0
- pyxcp/examples/conf_can.toml +19 -0
- pyxcp/examples/conf_can_user.toml +16 -0
- pyxcp/examples/conf_can_vector.json +11 -0
- pyxcp/examples/conf_can_vector.toml +11 -0
- pyxcp/examples/conf_eth.toml +9 -0
- pyxcp/examples/conf_nixnet.json +20 -0
- pyxcp/examples/conf_socket_can.toml +12 -0
- pyxcp/examples/conf_sxi.json +9 -0
- pyxcp/examples/conf_sxi.toml +7 -0
- pyxcp/examples/ex_arrow.py +109 -0
- pyxcp/examples/ex_mdf.py +124 -0
- pyxcp/examples/ex_sqlite.py +128 -0
- pyxcp/examples/run_daq.py +146 -0
- pyxcp/examples/xcp_policy.py +60 -0
- pyxcp/examples/xcp_read_benchmark.py +38 -0
- pyxcp/examples/xcp_skel.py +49 -0
- pyxcp/examples/xcp_unlock.py +38 -0
- pyxcp/examples/xcp_user_supplied_driver.py +54 -0
- pyxcp/examples/xcphello.py +79 -0
- pyxcp/examples/xcphello_recorder.py +107 -0
- pyxcp/master/__init__.py +9 -0
- pyxcp/master/errorhandler.py +436 -0
- pyxcp/master/master.py +2029 -0
- pyxcp/py.typed +0 -0
- pyxcp/recorder/__init__.py +102 -0
- pyxcp/recorder/build_clang.cmd +1 -0
- pyxcp/recorder/build_clang.sh +2 -0
- pyxcp/recorder/build_gcc.cmd +1 -0
- pyxcp/recorder/build_gcc.sh +2 -0
- pyxcp/recorder/build_gcc_arm.sh +2 -0
- pyxcp/recorder/converter/__init__.py +37 -0
- pyxcp/recorder/lz4.c +2829 -0
- pyxcp/recorder/lz4.h +879 -0
- pyxcp/recorder/lz4hc.c +2041 -0
- pyxcp/recorder/lz4hc.h +413 -0
- pyxcp/recorder/mio.hpp +1714 -0
- pyxcp/recorder/reader.hpp +139 -0
- pyxcp/recorder/reco.py +277 -0
- pyxcp/recorder/recorder.rst +0 -0
- pyxcp/recorder/rekorder.cpp +59 -0
- pyxcp/recorder/rekorder.cpython-312-darwin.so +0 -0
- pyxcp/recorder/rekorder.hpp +274 -0
- pyxcp/recorder/setup.py +41 -0
- pyxcp/recorder/test_reko.py +34 -0
- pyxcp/recorder/unfolder.hpp +1249 -0
- pyxcp/recorder/wrap.cpp +189 -0
- pyxcp/recorder/writer.hpp +302 -0
- pyxcp/scripts/__init__.py +0 -0
- pyxcp/scripts/pyxcp_probe_can_drivers.py +20 -0
- pyxcp/scripts/xcp_fetch_a2l.py +40 -0
- pyxcp/scripts/xcp_id_scanner.py +19 -0
- pyxcp/scripts/xcp_info.py +109 -0
- pyxcp/scripts/xcp_profile.py +27 -0
- pyxcp/stim/__init__.py +0 -0
- pyxcp/tests/test_asam_types.py +24 -0
- pyxcp/tests/test_binpacking.py +184 -0
- pyxcp/tests/test_can.py +1324 -0
- pyxcp/tests/test_checksum.py +95 -0
- pyxcp/tests/test_daq.py +188 -0
- pyxcp/tests/test_frame_padding.py +153 -0
- pyxcp/tests/test_master.py +2006 -0
- pyxcp/tests/test_transport.py +64 -0
- pyxcp/tests/test_utils.py +30 -0
- pyxcp/timing.py +60 -0
- pyxcp/transport/__init__.py +10 -0
- pyxcp/transport/base.py +436 -0
- pyxcp/transport/base_transport.hpp +0 -0
- pyxcp/transport/can.py +443 -0
- pyxcp/transport/eth.py +219 -0
- pyxcp/transport/sxi.py +133 -0
- pyxcp/transport/transport_wrapper.cpp +0 -0
- pyxcp/transport/usb_transport.py +213 -0
- pyxcp/types.py +993 -0
- pyxcp/utils.py +102 -0
- pyxcp/vector/__init__.py +0 -0
- pyxcp/vector/map.py +82 -0
- pyxcp-0.22.1.dist-info/LICENSE +165 -0
- pyxcp-0.22.1.dist-info/METADATA +107 -0
- pyxcp-0.22.1.dist-info/RECORD +126 -0
- pyxcp-0.22.1.dist-info/WHEEL +4 -0
- pyxcp-0.22.1.dist-info/entry_points.txt +7 -0
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
TRANSPORT = "CAN"
|
|
2
|
+
CAN_DRIVER = "KVaser"
|
|
3
|
+
CAN_USE_DEFAULT_LISTENER = true
|
|
4
|
+
CHANNEL = "0"
|
|
5
|
+
ACCEPT_VIRTUAL = true
|
|
6
|
+
CAN_ID_MASTER = 257
|
|
7
|
+
CAN_ID_SLAVE = 258
|
|
8
|
+
CAN_ID_BROADCAST = 256
|
|
9
|
+
MAX_DLC_REQUIRED = false
|
|
10
|
+
BITRATE = 50000
|
|
11
|
+
DATA_BITRATE = 50000
|
|
12
|
+
FD=false
|
|
13
|
+
BTL_CYCLES = 16
|
|
14
|
+
SAMPLE_RATE = 1
|
|
15
|
+
SAMPLE_POINT = 87.5
|
|
16
|
+
SJW = 2
|
|
17
|
+
TSEG1 = 5
|
|
18
|
+
TSEG2 = 2
|
|
19
|
+
CREATE_DAQ_TIMESTAMPS = false
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
TRANSPORT = "CAN"
|
|
2
|
+
CAN_DRIVER = "MyCI"
|
|
3
|
+
CAN_USE_DEFAULT_LISTENER = true
|
|
4
|
+
CHANNEL = "0"
|
|
5
|
+
CAN_ID_MASTER = 257
|
|
6
|
+
CAN_ID_SLAVE = 258
|
|
7
|
+
CAN_ID_BROADCAST = 256
|
|
8
|
+
MAX_DLC_REQUIRED = false
|
|
9
|
+
BITRATE = 250000
|
|
10
|
+
BTL_CYCLES = 16
|
|
11
|
+
SAMPLE_RATE = 1
|
|
12
|
+
SAMPLE_POINT = 87.5
|
|
13
|
+
SJW = 2
|
|
14
|
+
TSEG1 = 5
|
|
15
|
+
TSEG2 = 2
|
|
16
|
+
CREATE_DAQ_TIMESTAMPS = false
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
TRANSPORT = "CAN"
|
|
2
|
+
CAN_DRIVER = "Vector"
|
|
3
|
+
CAN_USE_DEFAULT_LISTENER = true
|
|
4
|
+
CHANNEL = "0"
|
|
5
|
+
CAN_ID_MASTER = 2
|
|
6
|
+
CAN_ID_SLAVE = 1
|
|
7
|
+
CAN_ID_BROADCAST = 256
|
|
8
|
+
# MAX_DLC_REQUIRED = true
|
|
9
|
+
CREATE_DAQ_TIMESTAMPS = false
|
|
10
|
+
BITRATE=1000000
|
|
11
|
+
SEED_N_KEY_DLL = "SeedNKeyXcp.dll"
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
{
|
|
2
|
+
"TRANSPORT": "CAN",
|
|
3
|
+
"CAN_DRIVER": "NiXnet",
|
|
4
|
+
"CAN_USE_DEFAULT_LISTENER": true,
|
|
5
|
+
"CHANNEL": "CAN4",
|
|
6
|
+
"ACCEPT_VIRTUAL": true,
|
|
7
|
+
"BAUDRATE_PRESET": true,
|
|
8
|
+
"CAN_ID_MASTER": 1911,
|
|
9
|
+
"CAN_ID_SLAVE": 819,
|
|
10
|
+
"CAN_ID_BROADCAST": 256,
|
|
11
|
+
"MAX_DLC_REQUIRED": false,
|
|
12
|
+
"BITRATE": 500000,
|
|
13
|
+
"BTL_CYCLES": 16,
|
|
14
|
+
"SAMPLE_RATE": 1,
|
|
15
|
+
"SAMPLE_POINT": 87.5,
|
|
16
|
+
"SJW": 2,
|
|
17
|
+
"TSEG1": 5,
|
|
18
|
+
"TSEG2": 2,
|
|
19
|
+
"CREATE_DAQ_TIMESTAMPS": false
|
|
20
|
+
}
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
TRANSPORT = "CAN"
|
|
2
|
+
CAN_DRIVER = "SocketCAN"
|
|
3
|
+
CAN_USE_DEFAULT_LISTENER = true
|
|
4
|
+
CHANNEL = "can0"
|
|
5
|
+
CAN_ID_MASTER = 257
|
|
6
|
+
CAN_ID_SLAVE = 258
|
|
7
|
+
CAN_ID_BROADCAST = 256
|
|
8
|
+
CREATE_DAQ_TIMESTAMPS = false
|
|
9
|
+
FD = false
|
|
10
|
+
MAX_DLC_REQUIRED = true
|
|
11
|
+
MAX_CAN_FD_DLC = 32
|
|
12
|
+
PADDING_VALUE = 0
|
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
import argparse
|
|
2
|
+
import logging
|
|
3
|
+
from array import array
|
|
4
|
+
from dataclasses import dataclass, field
|
|
5
|
+
from typing import Any, List
|
|
6
|
+
|
|
7
|
+
import pyarrow as pa
|
|
8
|
+
import pyarrow.parquet as pq
|
|
9
|
+
|
|
10
|
+
from pyxcp.recorder import XcpLogFileDecoder
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
MAP_TO_ARROW = {
|
|
14
|
+
"U8": pa.uint8(),
|
|
15
|
+
"I8": pa.int8(),
|
|
16
|
+
"U16": pa.uint16(),
|
|
17
|
+
"I16": pa.int16(),
|
|
18
|
+
"U32": pa.uint32(),
|
|
19
|
+
"I32": pa.int32(),
|
|
20
|
+
"U64": pa.uint64(),
|
|
21
|
+
"I64": pa.int64(),
|
|
22
|
+
"F32": pa.float32(),
|
|
23
|
+
"F64": pa.float64(),
|
|
24
|
+
"F16": pa.float16(),
|
|
25
|
+
"BF16": pa.float16(),
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
MAP_TO_ARRAY = {
|
|
29
|
+
"U8": "B",
|
|
30
|
+
"I8": "b",
|
|
31
|
+
"U16": "H",
|
|
32
|
+
"I16": "h",
|
|
33
|
+
"U32": "L",
|
|
34
|
+
"I32": "l",
|
|
35
|
+
"U64": "Q",
|
|
36
|
+
"I64": "q",
|
|
37
|
+
"F32": "f",
|
|
38
|
+
"F64": "d",
|
|
39
|
+
"F16": "f",
|
|
40
|
+
"BF16": "f",
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
logger = logging.getLogger("PyXCP")
|
|
44
|
+
|
|
45
|
+
parser = argparse.ArgumentParser(description="Use .xmraw files in an Apache Arrow application.")
|
|
46
|
+
parser.add_argument("xmraw_file", help=".xmraw file")
|
|
47
|
+
args = parser.parse_args()
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
@dataclass
|
|
51
|
+
class Storage:
|
|
52
|
+
name: str
|
|
53
|
+
arrow_type: Any
|
|
54
|
+
arr: array
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
@dataclass
|
|
58
|
+
class StorageContainer:
|
|
59
|
+
name: str
|
|
60
|
+
arr: List[Storage] = field(default_factory=[])
|
|
61
|
+
ts0: List[int] = field(default_factory=lambda: array("Q"))
|
|
62
|
+
ts1: List[int] = field(default_factory=lambda: array("Q"))
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
class Decoder(XcpLogFileDecoder):
|
|
66
|
+
|
|
67
|
+
def initialize(self) -> None:
|
|
68
|
+
self.arrow_tables = []
|
|
69
|
+
for dl in self.daq_lists:
|
|
70
|
+
result = []
|
|
71
|
+
for name, type_str in dl.headers:
|
|
72
|
+
array_txpe = MAP_TO_ARRAY[type_str]
|
|
73
|
+
arrow_type = MAP_TO_ARROW[type_str]
|
|
74
|
+
sd = Storage(name, arrow_type, array(array_txpe))
|
|
75
|
+
print(f"\t{name!r} {array_txpe} {arrow_type}", sd)
|
|
76
|
+
result.append(sd)
|
|
77
|
+
sc = StorageContainer(dl.name, result)
|
|
78
|
+
self.arrow_tables.append(sc)
|
|
79
|
+
|
|
80
|
+
def finalize(self) -> Any:
|
|
81
|
+
result = []
|
|
82
|
+
for arr in self.arrow_tables:
|
|
83
|
+
timestamp0 = arr.ts0
|
|
84
|
+
timestamp1 = arr.ts1
|
|
85
|
+
names = ["timestamp0", "timestamp1"]
|
|
86
|
+
data = [timestamp0, timestamp1]
|
|
87
|
+
for sd in arr.arr:
|
|
88
|
+
adt = pa.array(sd.arr, type=sd.arrow_type)
|
|
89
|
+
names.append(sd.name)
|
|
90
|
+
data.append(adt)
|
|
91
|
+
table = pa.Table.from_arrays(data, names=names)
|
|
92
|
+
fname = f"{arr.name}.parquet"
|
|
93
|
+
print("Writing table", fname)
|
|
94
|
+
pq.write_table(table, fname)
|
|
95
|
+
print("done.", table.shape)
|
|
96
|
+
result.append(table)
|
|
97
|
+
return result
|
|
98
|
+
|
|
99
|
+
def on_daq_list(self, daq_list_num: int, timestamp0: int, timestamp1: int, measurements: list) -> None:
|
|
100
|
+
sc = self.arrow_tables[daq_list_num]
|
|
101
|
+
sc.ts0.append(timestamp0)
|
|
102
|
+
sc.ts1.append(timestamp1)
|
|
103
|
+
for idx, elem in enumerate(measurements):
|
|
104
|
+
sto = sc.arr[idx]
|
|
105
|
+
sto.arr.append(elem)
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
decoder = Decoder(args.xmraw_file)
|
|
109
|
+
res = decoder.run()
|
pyxcp/examples/ex_mdf.py
ADDED
|
@@ -0,0 +1,124 @@
|
|
|
1
|
+
import argparse
|
|
2
|
+
import logging
|
|
3
|
+
from array import array
|
|
4
|
+
from dataclasses import dataclass, field
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Any, List
|
|
7
|
+
|
|
8
|
+
import numpy as np
|
|
9
|
+
from asammdf import MDF, Signal
|
|
10
|
+
from asammdf.blocks.v4_blocks import HeaderBlock # ChannelGroup
|
|
11
|
+
from asammdf.blocks.v4_constants import FLAG_HD_TIME_OFFSET_VALID # FLAG_HD_LOCAL_TIME,
|
|
12
|
+
|
|
13
|
+
from pyxcp.recorder import XcpLogFileDecoder
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
MAP_TO_NP = {
|
|
17
|
+
"U8": np.uint8,
|
|
18
|
+
"I8": np.int8,
|
|
19
|
+
"U16": np.uint16,
|
|
20
|
+
"I16": np.int16,
|
|
21
|
+
"U32": np.uint32,
|
|
22
|
+
"I32": np.int32,
|
|
23
|
+
"U64": np.uint64,
|
|
24
|
+
"I64": np.int64,
|
|
25
|
+
"F32": np.float32,
|
|
26
|
+
"F64": np.float64,
|
|
27
|
+
"F16": np.float16,
|
|
28
|
+
"BF16": np.float16,
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
MAP_TO_ARRAY = {
|
|
32
|
+
"U8": "B",
|
|
33
|
+
"I8": "b",
|
|
34
|
+
"U16": "H",
|
|
35
|
+
"I16": "h",
|
|
36
|
+
"U32": "L",
|
|
37
|
+
"I32": "l",
|
|
38
|
+
"U64": "Q",
|
|
39
|
+
"I64": "q",
|
|
40
|
+
"F32": "f",
|
|
41
|
+
"F64": "d",
|
|
42
|
+
"F16": "f",
|
|
43
|
+
# "BF16"
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
logger = logging.getLogger("PyXCP")
|
|
47
|
+
|
|
48
|
+
parser = argparse.ArgumentParser(description="Use .xmraw files in an Apache Arrow application.")
|
|
49
|
+
parser.add_argument("xmraw_file", help=".xmraw file")
|
|
50
|
+
args = parser.parse_args()
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
@dataclass
|
|
54
|
+
class Storage:
|
|
55
|
+
name: str
|
|
56
|
+
arrow_type: Any
|
|
57
|
+
arr: array
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
@dataclass
|
|
61
|
+
class StorageContainer:
|
|
62
|
+
name: str
|
|
63
|
+
arr: list[Storage] = field(default_factory=[])
|
|
64
|
+
ts0: List[int] = field(default_factory=lambda: array("Q"))
|
|
65
|
+
ts1: List[int] = field(default_factory=lambda: array("Q"))
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
class Decoder(XcpLogFileDecoder):
|
|
69
|
+
|
|
70
|
+
def __init__(self, recording_file_name: str):
|
|
71
|
+
super().__init__(recording_file_name)
|
|
72
|
+
self.mdf_file_name = Path(recording_file_name).with_suffix(".mf4")
|
|
73
|
+
|
|
74
|
+
def initialize(self) -> None:
|
|
75
|
+
self.tables = []
|
|
76
|
+
for dl in self.daq_lists:
|
|
77
|
+
result = []
|
|
78
|
+
for name, type_str in dl.headers:
|
|
79
|
+
array_txpe = MAP_TO_ARRAY[type_str]
|
|
80
|
+
arrow_type = MAP_TO_NP[type_str]
|
|
81
|
+
sd = Storage(name, arrow_type, array(array_txpe))
|
|
82
|
+
result.append(sd)
|
|
83
|
+
sc = StorageContainer(dl.name, result)
|
|
84
|
+
self.tables.append(sc)
|
|
85
|
+
print("Extracting DAQ lists...")
|
|
86
|
+
|
|
87
|
+
def finalize(self) -> None:
|
|
88
|
+
print("Creating MDF result...")
|
|
89
|
+
timestamp_info = self.parameters.timestamp_info
|
|
90
|
+
hdr = HeaderBlock(
|
|
91
|
+
abs_time=timestamp_info.timestamp_ns,
|
|
92
|
+
tz_offset=timestamp_info.utc_offset,
|
|
93
|
+
daylight_save_time=timestamp_info.dst_offset,
|
|
94
|
+
time_flags=FLAG_HD_TIME_OFFSET_VALID,
|
|
95
|
+
)
|
|
96
|
+
hdr.comment = f"""<HDcomment><TX>Timezone: {timestamp_info.timezone}</TX></HDcomment>""" # Test-Comment.
|
|
97
|
+
mdf4 = MDF(version="4.10")
|
|
98
|
+
mdf4.header = hdr
|
|
99
|
+
# result = []
|
|
100
|
+
for idx, arr in enumerate(self.tables):
|
|
101
|
+
signals = []
|
|
102
|
+
timestamps = arr.ts0
|
|
103
|
+
for sd in arr.arr:
|
|
104
|
+
|
|
105
|
+
signal = Signal(samples=sd.arr, name=sd.name, timestamps=timestamps)
|
|
106
|
+
signals.append(signal)
|
|
107
|
+
print(f"Appending data-group {arr.name!r}")
|
|
108
|
+
mdf4.append(signals, acq_name=arr.name, comment="Created by pyXCP recorder")
|
|
109
|
+
print(f"Writing '{self.mdf_file_name!s}'")
|
|
110
|
+
mdf4.save(self.mdf_file_name, compression=2, overwrite=True)
|
|
111
|
+
print("Done.")
|
|
112
|
+
return mdf4
|
|
113
|
+
|
|
114
|
+
def on_daq_list(self, daq_list_num: int, timestamp0: int, timestamp1: int, measurements: list) -> None:
|
|
115
|
+
sc = self.tables[daq_list_num]
|
|
116
|
+
sc.ts0.append(timestamp0)
|
|
117
|
+
sc.ts1.append(timestamp1)
|
|
118
|
+
for idx, elem in enumerate(measurements):
|
|
119
|
+
sto = sc.arr[idx]
|
|
120
|
+
sto.arr.append(elem)
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
decoder = Decoder(args.xmraw_file)
|
|
124
|
+
res = decoder.run()
|
|
@@ -0,0 +1,128 @@
|
|
|
1
|
+
import argparse
|
|
2
|
+
import logging
|
|
3
|
+
import os
|
|
4
|
+
import sqlite3
|
|
5
|
+
from array import array
|
|
6
|
+
from dataclasses import dataclass, field
|
|
7
|
+
from mmap import PAGESIZE
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
from typing import Any, List
|
|
10
|
+
|
|
11
|
+
from pyxcp.recorder import XcpLogFileDecoder
|
|
12
|
+
from pyxcp.recorder.converter import MAP_TO_ARRAY
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
MAP_TO_SQL = {
|
|
16
|
+
"U8": "INTEGER",
|
|
17
|
+
"I8": "INTEGER",
|
|
18
|
+
"U16": "INTEGER",
|
|
19
|
+
"I16": "INTEGER",
|
|
20
|
+
"U32": "INTEGER",
|
|
21
|
+
"I32": "INTEGER",
|
|
22
|
+
"U64": "INTEGER",
|
|
23
|
+
"I64": "INTEGER",
|
|
24
|
+
"F32": "FLOAT",
|
|
25
|
+
"F64": "FLOAT",
|
|
26
|
+
"F16": "FLOAT",
|
|
27
|
+
"BF16": "FLOAT",
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
logger = logging.getLogger("PyXCP")
|
|
31
|
+
|
|
32
|
+
parser = argparse.ArgumentParser(description="Use .xmraw files in an Apache Arrow application.")
|
|
33
|
+
parser.add_argument("xmraw_file", help=".xmraw file")
|
|
34
|
+
args = parser.parse_args()
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
@dataclass
|
|
38
|
+
class Storage:
|
|
39
|
+
name: str
|
|
40
|
+
arrow_type: Any
|
|
41
|
+
arr: array
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
@dataclass
|
|
45
|
+
class StorageContainer:
|
|
46
|
+
name: str
|
|
47
|
+
arr: List[Storage] = field(default_factory=[])
|
|
48
|
+
ts0: List[int] = field(default_factory=lambda: array("Q"))
|
|
49
|
+
ts1: List[int] = field(default_factory=lambda: array("Q"))
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
class Decoder(XcpLogFileDecoder):
|
|
53
|
+
|
|
54
|
+
def __init__(self, recording_file_name: str):
|
|
55
|
+
super().__init__(recording_file_name)
|
|
56
|
+
self.sq3_file_name = Path(recording_file_name).with_suffix(".sq3")
|
|
57
|
+
try:
|
|
58
|
+
os.unlink(self.sq3_file_name)
|
|
59
|
+
except Exception as e:
|
|
60
|
+
print(e)
|
|
61
|
+
|
|
62
|
+
def initialize(self) -> None:
|
|
63
|
+
self.create_database(self.sq3_file_name)
|
|
64
|
+
self.arrow_tables = []
|
|
65
|
+
self.insert_stmt = {}
|
|
66
|
+
for dl in self.daq_lists:
|
|
67
|
+
result = []
|
|
68
|
+
for name, type_str in dl.headers:
|
|
69
|
+
array_txpe = MAP_TO_ARRAY[type_str]
|
|
70
|
+
sql_type = MAP_TO_SQL[type_str]
|
|
71
|
+
sd = Storage(name, sql_type, array(array_txpe))
|
|
72
|
+
result.append(sd)
|
|
73
|
+
sc = StorageContainer(dl.name, result)
|
|
74
|
+
print(f"Creating table {sc.name!r}.")
|
|
75
|
+
self.create_table(sc)
|
|
76
|
+
self.insert_stmt[sc.name] = (
|
|
77
|
+
f"""INSERT INTO {sc.name}({', '.join(['ts0', 'ts1'] + [r.name for r in sc.arr])}) VALUES({', '.join(["?" for _ in range(len(sc.arr) + 2)])})"""
|
|
78
|
+
)
|
|
79
|
+
self.arrow_tables.append(sc)
|
|
80
|
+
print("\nInserting data...")
|
|
81
|
+
|
|
82
|
+
def create_database(self, db_name: str) -> None:
|
|
83
|
+
self.conn = sqlite3.Connection(db_name)
|
|
84
|
+
self.cursor = self.conn.cursor()
|
|
85
|
+
self.execute("PRAGMA FOREIGN_KEYS=ON")
|
|
86
|
+
self.execute(f"PRAGMA PAGE_SIZE={PAGESIZE}")
|
|
87
|
+
self.execute("PRAGMA SYNCHRONOUS=OFF")
|
|
88
|
+
self.execute("PRAGMA LOCKING_MODE=EXCLUSIVE")
|
|
89
|
+
self.execute("PRAGMA TEMP_STORE=MEMORY")
|
|
90
|
+
|
|
91
|
+
timestamp_info = self.parameters.timestamp_info
|
|
92
|
+
self.execute(
|
|
93
|
+
"CREATE TABLE timestamp_info(timestamp_ns INTEGER, utc_offset INTEGER, dst_offset INTEGER, timezone VARCHAR(255))"
|
|
94
|
+
)
|
|
95
|
+
self.execute("CREATE TABLE table_names(name VARCHAR(255))")
|
|
96
|
+
self.execute(
|
|
97
|
+
"INSERT INTO timestamp_info VALUES(?, ?, ?, ?)",
|
|
98
|
+
[timestamp_info.timestamp_ns, timestamp_info.utc_offset, timestamp_info.dst_offset, timestamp_info.timezone],
|
|
99
|
+
)
|
|
100
|
+
|
|
101
|
+
def create_table(self, sc: StorageContainer) -> None:
|
|
102
|
+
columns = ["ts0 INTEGER", "ts1 INTEGER"]
|
|
103
|
+
for elem in sc.arr:
|
|
104
|
+
columns.append(f"{elem.name} {elem.arrow_type}")
|
|
105
|
+
ddl = f"CREATE TABLE {sc.name}({', '.join(columns)})"
|
|
106
|
+
self.execute(ddl)
|
|
107
|
+
self.execute("INSERT INTO table_names VALUES(?)", [sc.name])
|
|
108
|
+
|
|
109
|
+
def execute(self, *args: List[str]) -> None:
|
|
110
|
+
try:
|
|
111
|
+
self.cursor.execute(*args)
|
|
112
|
+
except Exception as e:
|
|
113
|
+
print(e)
|
|
114
|
+
|
|
115
|
+
def finalize(self) -> None:
|
|
116
|
+
self.conn.commit()
|
|
117
|
+
self.conn.close()
|
|
118
|
+
print("Done.")
|
|
119
|
+
|
|
120
|
+
def on_daq_list(self, daq_list_num: int, timestamp0: int, timestamp1: int, measurements: list) -> None:
|
|
121
|
+
sc = self.arrow_tables[daq_list_num]
|
|
122
|
+
insert_stmt = self.insert_stmt[sc.name]
|
|
123
|
+
data = [timestamp0, timestamp1, *measurements]
|
|
124
|
+
self.execute(insert_stmt, data)
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
decoder = Decoder(args.xmraw_file)
|
|
128
|
+
decoder.run()
|
|
@@ -0,0 +1,146 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
|
|
3
|
+
import time
|
|
4
|
+
|
|
5
|
+
from pyxcp.cmdline import ArgumentParser
|
|
6
|
+
from pyxcp.daq_stim import DaqList, DaqRecorder, DaqToCsv # noqa: F401
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
ap = ArgumentParser(description="DAQ test")
|
|
10
|
+
|
|
11
|
+
XCP_LITE = True
|
|
12
|
+
|
|
13
|
+
#
|
|
14
|
+
# NOTE: UPDATE TO CORRECT ADDRESSES BEFORE RUNNING!!!
|
|
15
|
+
#
|
|
16
|
+
if XCP_LITE:
|
|
17
|
+
# Vectorgrp XCPlite.
|
|
18
|
+
DAQ_LISTS = [
|
|
19
|
+
DaqList(
|
|
20
|
+
"part_1",
|
|
21
|
+
0,
|
|
22
|
+
False,
|
|
23
|
+
False,
|
|
24
|
+
[
|
|
25
|
+
("byteCounter", 0x203EA, 0, "U8"),
|
|
26
|
+
("wordCounter", 0x203EC, 0, "U16"),
|
|
27
|
+
("dwordCounter", 0x20410, 0, "U32"),
|
|
28
|
+
("sbyteCounter", 0x203EB, 0, "I8"),
|
|
29
|
+
],
|
|
30
|
+
),
|
|
31
|
+
DaqList(
|
|
32
|
+
"part_2",
|
|
33
|
+
0,
|
|
34
|
+
False,
|
|
35
|
+
False,
|
|
36
|
+
[
|
|
37
|
+
("swordCounter", 0x20414, 0, "I16"),
|
|
38
|
+
("sdwordCounter", 0x20418, 0, "I32"),
|
|
39
|
+
("channel1", 0x203F8, 0, "F64"),
|
|
40
|
+
("channel2", 0x20400, 0, "F64"),
|
|
41
|
+
("channel3", 0x20408, 0, "F64"),
|
|
42
|
+
],
|
|
43
|
+
),
|
|
44
|
+
]
|
|
45
|
+
else:
|
|
46
|
+
# XCPsim from CANape.
|
|
47
|
+
DAQ_LISTS = [
|
|
48
|
+
DaqList(
|
|
49
|
+
"pwm_stuff",
|
|
50
|
+
2,
|
|
51
|
+
False,
|
|
52
|
+
True,
|
|
53
|
+
[
|
|
54
|
+
("channel1", 0x1BD004, 0, "F32"),
|
|
55
|
+
("period", 0x001C0028, 0, "F32"),
|
|
56
|
+
("channel2", 0x1BD008, 0, "F32"),
|
|
57
|
+
("PWMFiltered", 0x1BDDE2, 0, "U8"),
|
|
58
|
+
("PWM", 0x1BDDDF, 0, "U8"),
|
|
59
|
+
("Triangle", 0x1BDDDE, 0, "I8"),
|
|
60
|
+
],
|
|
61
|
+
),
|
|
62
|
+
DaqList(
|
|
63
|
+
"bytes",
|
|
64
|
+
1,
|
|
65
|
+
False,
|
|
66
|
+
True,
|
|
67
|
+
[
|
|
68
|
+
("TestByte_000", 0x1BE11C, 0, "U8"),
|
|
69
|
+
("TestByte_015", 0x1BE158, 0, "U8"),
|
|
70
|
+
("TestByte_016", 0x1BE15C, 0, "U8"),
|
|
71
|
+
("TestByte_023", 0x1BE178, 0, "U8"),
|
|
72
|
+
("TestByte_024", 0x1BE17C, 0, "U8"),
|
|
73
|
+
("TestByte_034", 0x1BE1A4, 0, "U8"),
|
|
74
|
+
("TestByte_059", 0x1BE208, 0, "U8"),
|
|
75
|
+
("TestByte_061", 0x1BE210, 0, "U8"),
|
|
76
|
+
("TestByte_063", 0x1BE218, 0, "U8"),
|
|
77
|
+
("TestByte_064", 0x1BE21C, 0, "U8"),
|
|
78
|
+
("TestByte_097", 0x1BE2A0, 0, "U8"),
|
|
79
|
+
("TestByte_107", 0x1BE2C8, 0, "U8"),
|
|
80
|
+
("TestByte_131", 0x1BE328, 0, "U8"),
|
|
81
|
+
("TestByte_156", 0x1BE38C, 0, "U8"),
|
|
82
|
+
("TestByte_159", 0x1BE398, 0, "U8"),
|
|
83
|
+
("TestByte_182", 0x1BE3F4, 0, "U8"),
|
|
84
|
+
("TestByte_183", 0x1BE3F8, 0, "U8"),
|
|
85
|
+
("TestByte_189", 0x1BE410, 0, "U8"),
|
|
86
|
+
("TestByte_195", 0x1BE428, 0, "U8"),
|
|
87
|
+
("TestByte_216", 0x1BE47C, 0, "U8"),
|
|
88
|
+
("TestByte_218", 0x1BE484, 0, "U8"),
|
|
89
|
+
("TestByte_221", 0x1BE490, 0, "U8"),
|
|
90
|
+
("TestByte_251", 0x1BE508, 0, "U8"),
|
|
91
|
+
("TestByte_263", 0x1BE538, 0, "U8"),
|
|
92
|
+
("TestByte_276", 0x1BE56C, 0, "U8"),
|
|
93
|
+
("TestByte_277", 0x1BE570, 0, "U8"),
|
|
94
|
+
("TestByte_297", 0x1BE5C0, 0, "U8"),
|
|
95
|
+
("TestByte_302", 0x1BE5D4, 0, "U8"),
|
|
96
|
+
("TestByte_324", 0x1BE62C, 0, "U8"),
|
|
97
|
+
("TestByte_344", 0x1BE67C, 0, "U8"),
|
|
98
|
+
("TestByte_346", 0x1BE684, 0, "U8"),
|
|
99
|
+
],
|
|
100
|
+
),
|
|
101
|
+
DaqList(
|
|
102
|
+
"words",
|
|
103
|
+
3,
|
|
104
|
+
False,
|
|
105
|
+
True,
|
|
106
|
+
[
|
|
107
|
+
("TestWord_001", 0x1BE120, 0, "U16"),
|
|
108
|
+
("TestWord_003", 0x1BE128, 0, "U16"),
|
|
109
|
+
("TestWord_004", 0x1BE12C, 0, "U16"),
|
|
110
|
+
("TestWord_005", 0x1BE134, 0, "U16"),
|
|
111
|
+
("TestWord_006", 0x1BE134, 0, "U16"),
|
|
112
|
+
("TestWord_007", 0x1BE138, 0, "U16"),
|
|
113
|
+
("TestWord_008", 0x1BE13C, 0, "U16"),
|
|
114
|
+
("TestWord_009", 0x1BE140, 0, "U16"),
|
|
115
|
+
("TestWord_011", 0x1BE148, 0, "U16"),
|
|
116
|
+
],
|
|
117
|
+
),
|
|
118
|
+
]
|
|
119
|
+
|
|
120
|
+
# daq_parser = DaqToCsv(DAQ_LISTS) # Record to CSV file(s).
|
|
121
|
+
daq_parser = DaqRecorder(DAQ_LISTS, "run_daq", 2) # Record to ".xmraw" file.
|
|
122
|
+
|
|
123
|
+
with ap.run(policy=daq_parser) as x:
|
|
124
|
+
x.connect()
|
|
125
|
+
if x.slaveProperties.optionalCommMode:
|
|
126
|
+
x.getCommModeInfo()
|
|
127
|
+
|
|
128
|
+
x.cond_unlock("DAQ") # DAQ resource is locked in many cases.
|
|
129
|
+
|
|
130
|
+
print("setup DAQ lists.")
|
|
131
|
+
daq_parser.setup() # Execute setup procedures.
|
|
132
|
+
print("start DAQ lists.")
|
|
133
|
+
daq_parser.start() # Start DAQ lists.
|
|
134
|
+
|
|
135
|
+
time.sleep(15.0 * 60.0) # Run for 15 minutes.
|
|
136
|
+
|
|
137
|
+
print("Stop DAQ....")
|
|
138
|
+
daq_parser.stop() # Stop DAQ lists.
|
|
139
|
+
print("finalize DAQ lists.\n")
|
|
140
|
+
x.disconnect()
|
|
141
|
+
|
|
142
|
+
if hasattr(daq_parser, "files"): # `files` attribute is specific to `DaqToCsv`.
|
|
143
|
+
print("Data written to:")
|
|
144
|
+
print("================")
|
|
145
|
+
for fl in daq_parser.files.values():
|
|
146
|
+
print(fl.name)
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
"""Demostrates how to use frame recording policies including recorder extension.
|
|
3
|
+
"""
|
|
4
|
+
from pprint import pprint
|
|
5
|
+
|
|
6
|
+
from pyxcp.cmdline import ArgumentParser
|
|
7
|
+
from pyxcp.transport.base import FrameRecorderPolicy, StdoutPolicy # noqa: F401
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
ap = ArgumentParser(description="pyXCP frame recording policy example.")
|
|
11
|
+
|
|
12
|
+
LOG_FILE = "pyxcp"
|
|
13
|
+
|
|
14
|
+
policy = FrameRecorderPolicy(LOG_FILE)
|
|
15
|
+
use_recorder = True
|
|
16
|
+
|
|
17
|
+
# policy = StdoutPolicy() # You may also try this one.
|
|
18
|
+
# use_recorder = False
|
|
19
|
+
|
|
20
|
+
with ap.run(policy=policy) as x:
|
|
21
|
+
x.connect()
|
|
22
|
+
if x.slaveProperties.optionalCommMode:
|
|
23
|
+
x.getCommModeInfo()
|
|
24
|
+
identifier = x.identifier(0x01)
|
|
25
|
+
print("\nSlave Properties:")
|
|
26
|
+
print("=================")
|
|
27
|
+
print(f"ID: {identifier!r}")
|
|
28
|
+
pprint(x.slaveProperties)
|
|
29
|
+
x.disconnect()
|
|
30
|
+
|
|
31
|
+
if use_recorder:
|
|
32
|
+
from pyxcp.recorder import XcpLogFileReader
|
|
33
|
+
from pyxcp.utils import hexDump
|
|
34
|
+
|
|
35
|
+
try:
|
|
36
|
+
import pandas # noqa: F401
|
|
37
|
+
except ImportError:
|
|
38
|
+
has_pandas = False
|
|
39
|
+
else:
|
|
40
|
+
has_pandas = True
|
|
41
|
+
|
|
42
|
+
reader = XcpLogFileReader(LOG_FILE)
|
|
43
|
+
hdr = reader.get_header() # Get file information.
|
|
44
|
+
print("\nRecording file header")
|
|
45
|
+
print("=====================\n")
|
|
46
|
+
print(hdr)
|
|
47
|
+
print("\nRecorded frames")
|
|
48
|
+
print("===============\n")
|
|
49
|
+
print("CAT CTR TS PAYLOAD")
|
|
50
|
+
print("-" * 80)
|
|
51
|
+
for category, counter, timestamp, payload in reader:
|
|
52
|
+
print(f"{category.name:8} {counter:6} {timestamp:7.7f} {hexDump(payload)}")
|
|
53
|
+
print("-" * 80)
|
|
54
|
+
reader.reset_iter() # reader acts as an Python iterator -- can be reseted with this non-standard method.
|
|
55
|
+
if has_pandas:
|
|
56
|
+
print("\nRecordings as Pandas stuff")
|
|
57
|
+
print("==========================\n")
|
|
58
|
+
df = reader.as_dataframe() # Return recordings as Pandas DataFrame.
|
|
59
|
+
print(df.info())
|
|
60
|
+
print(df.head(60))
|