plexus-python-common 1.0.31__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- plexus/common/__init__.py +6 -0
- plexus/common/carto/OSMFile.py +259 -0
- plexus/common/carto/OSMNode.py +25 -0
- plexus/common/carto/OSMTags.py +101 -0
- plexus/common/carto/OSMWay.py +24 -0
- plexus/common/carto/__init__.py +11 -0
- plexus/common/pose.py +107 -0
- plexus/common/proj.py +305 -0
- plexus/common/utils/__init__.py +0 -0
- plexus/common/utils/apiutils.py +31 -0
- plexus/common/utils/bagutils.py +215 -0
- plexus/common/utils/config.py +61 -0
- plexus/common/utils/datautils.py +200 -0
- plexus/common/utils/jsonutils.py +92 -0
- plexus/common/utils/ormutils.py +1428 -0
- plexus/common/utils/s3utils.py +799 -0
- plexus/common/utils/shutils.py +234 -0
- plexus/common/utils/sqlutils.py +9 -0
- plexus/common/utils/strutils.py +382 -0
- plexus/common/utils/testutils.py +49 -0
- plexus_python_common-1.0.31.dist-info/METADATA +38 -0
- plexus_python_common-1.0.31.dist-info/RECORD +24 -0
- plexus_python_common-1.0.31.dist-info/WHEEL +5 -0
- plexus_python_common-1.0.31.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,200 @@
|
|
|
1
|
+
import datetime
|
|
2
|
+
from collections.abc import Callable, Generator
|
|
3
|
+
from typing import Any
|
|
4
|
+
|
|
5
|
+
import pyparsing as pp
|
|
6
|
+
import ujson as json
|
|
7
|
+
from iker.common.utils.funcutils import singleton
|
|
8
|
+
from iker.common.utils.jsonutils import JsonType
|
|
9
|
+
from iker.common.utils.randutils import randomizer
|
|
10
|
+
|
|
11
|
+
from plexus.common.utils.strutils import BagName, UserName, VehicleName
|
|
12
|
+
from plexus.common.utils.strutils import colon_tag_parser, slash_tag_parser
|
|
13
|
+
from plexus.common.utils.strutils import dot_case_parser, kebab_case_parser, snake_case_parser
|
|
14
|
+
from plexus.common.utils.strutils import hex_string_parser
|
|
15
|
+
from plexus.common.utils.strutils import parse_bag_name, parse_user_name, parse_vehicle_name
|
|
16
|
+
from plexus.common.utils.strutils import semver_parser, uuid_parser
|
|
17
|
+
from plexus.common.utils.strutils import strict_abspath_parser, strict_relpath_parser
|
|
18
|
+
from plexus.common.utils.strutils import topic_parser, vin_code_chars, vin_code_parser
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def make_compute_vin_code_check_digit() -> Callable[[str], str]:
|
|
22
|
+
trans_nums = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 1, 2, 3, 4, 5, 7, 9, 2, 3, 4, 5, 6, 7, 8, 9]
|
|
23
|
+
weights = [8, 7, 6, 5, 4, 3, 2, 10, 0, 9, 8, 7, 6, 5, 4, 3, 2]
|
|
24
|
+
|
|
25
|
+
trans_dict = {vin_code_char: trans_num for vin_code_char, trans_num in zip(vin_code_chars, trans_nums)}
|
|
26
|
+
|
|
27
|
+
def func(vin_code: str) -> str:
|
|
28
|
+
remainder = sum(trans_dict[vin_code_char] * weight for vin_code_char, weight in zip(vin_code, weights)) % 11
|
|
29
|
+
return "X" if remainder == 10 else str(remainder)
|
|
30
|
+
|
|
31
|
+
return func
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
compute_vin_code_check_digit = make_compute_vin_code_check_digit()
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def make_validate_string(element: pp.ParserElement) -> Callable[[str], None]:
|
|
38
|
+
def func(s: str) -> None:
|
|
39
|
+
try:
|
|
40
|
+
if not element.parse_string(s, parse_all=True):
|
|
41
|
+
raise ValueError(f"failed to parse '{s}'")
|
|
42
|
+
except Exception as e:
|
|
43
|
+
raise ValueError(f"encountered error while parsing '{s}'") from e
|
|
44
|
+
|
|
45
|
+
return func
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
validate_hex_string = make_validate_string(hex_string_parser)
|
|
49
|
+
|
|
50
|
+
validate_snake_case = make_validate_string(snake_case_parser)
|
|
51
|
+
validate_kebab_case = make_validate_string(kebab_case_parser)
|
|
52
|
+
validate_dot_case = make_validate_string(dot_case_parser)
|
|
53
|
+
|
|
54
|
+
validate_uuid = make_validate_string(uuid_parser)
|
|
55
|
+
|
|
56
|
+
validate_strict_relpath = make_validate_string(strict_relpath_parser)
|
|
57
|
+
validate_strict_abspath = make_validate_string(strict_abspath_parser)
|
|
58
|
+
|
|
59
|
+
validate_semver = make_validate_string(semver_parser)
|
|
60
|
+
|
|
61
|
+
validate_colon_tag = make_validate_string(colon_tag_parser)
|
|
62
|
+
validate_slash_tag = make_validate_string(slash_tag_parser)
|
|
63
|
+
|
|
64
|
+
validate_topic = make_validate_string(topic_parser)
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
def validate_vin_code(vin_code: str):
|
|
68
|
+
make_validate_string(vin_code_parser)(vin_code)
|
|
69
|
+
check_digit = compute_vin_code_check_digit(vin_code)
|
|
70
|
+
if check_digit != vin_code[8]:
|
|
71
|
+
raise ValueError(f"get wrong VIN code check digit from '{vin_code}', expected '{check_digit}'")
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
def make_validate_parse_string(parse: Callable[[str], Any]) -> Callable[[str], None]:
|
|
75
|
+
def func(s: str) -> None:
|
|
76
|
+
try:
|
|
77
|
+
if not parse(s):
|
|
78
|
+
raise ValueError(f"failed to parse '{s}'")
|
|
79
|
+
except Exception as e:
|
|
80
|
+
raise ValueError(f"encountered error while parsing '{s}'") from e
|
|
81
|
+
|
|
82
|
+
return func
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
validate_user_name = make_validate_parse_string(parse_user_name)
|
|
86
|
+
validate_vehicle_name = make_validate_parse_string(parse_vehicle_name)
|
|
87
|
+
validate_bag_name = make_validate_parse_string(parse_bag_name)
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
def validate_dt_timezone(dt: datetime.datetime):
|
|
91
|
+
if dt.tzinfo != datetime.timezone.utc:
|
|
92
|
+
raise ValueError(f"dt '{dt}' is not in UTC")
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
def validate_json_type_dump_size(json_type: JsonType, dump_size_limit: int = 10000):
|
|
96
|
+
dump_string = json.dumps(json_type, ensure_ascii=False)
|
|
97
|
+
if len(dump_string) > dump_size_limit:
|
|
98
|
+
raise ValueError(f"dump size exceeds the maximum length '{dump_size_limit}'")
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
def random_vin_code() -> str:
|
|
102
|
+
vin_code = randomizer().random_string(vin_code_chars, 17)
|
|
103
|
+
check_digit = compute_vin_code_check_digit(vin_code)
|
|
104
|
+
return vin_code[:8] + check_digit + vin_code[9:]
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
@singleton
|
|
108
|
+
def known_topics() -> list[str]:
|
|
109
|
+
return [
|
|
110
|
+
"/sensor/camera/front_center",
|
|
111
|
+
"/sensor/camera/front_left",
|
|
112
|
+
"/sensor/camera/front_right",
|
|
113
|
+
"/sensor/camera/side_left",
|
|
114
|
+
"/sensor/camera/side_right",
|
|
115
|
+
"/sensor/camera/rear_left",
|
|
116
|
+
"/sensor/camera/rear_right",
|
|
117
|
+
"/sensor/lidar/front_center",
|
|
118
|
+
"/sensor/lidar/front_left_corner",
|
|
119
|
+
"/sensor/lidar/front_right_corner",
|
|
120
|
+
"/sensor/lidar/side_left",
|
|
121
|
+
"/sensor/lidar/side_right",
|
|
122
|
+
]
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
@singleton
|
|
126
|
+
def known_user_names() -> list[UserName]:
|
|
127
|
+
return [
|
|
128
|
+
UserName("adam", "anderson"),
|
|
129
|
+
UserName("ben", "bennett"),
|
|
130
|
+
UserName("charlie", "clark"),
|
|
131
|
+
UserName("david", "dixon"),
|
|
132
|
+
UserName("evan", "edwards"),
|
|
133
|
+
UserName("frank", "fisher"),
|
|
134
|
+
UserName("george", "graham"),
|
|
135
|
+
UserName("henry", "harrison"),
|
|
136
|
+
UserName("isaac", "irving"),
|
|
137
|
+
UserName("jack", "jacobs"),
|
|
138
|
+
UserName("kevin", "kennedy"),
|
|
139
|
+
UserName("luke", "lawson"),
|
|
140
|
+
UserName("michael", "mitchell"),
|
|
141
|
+
UserName("nathan", "newton"),
|
|
142
|
+
UserName("oscar", "owens"),
|
|
143
|
+
UserName("paul", "peterson"),
|
|
144
|
+
UserName("quincy", "quinn"),
|
|
145
|
+
UserName("ryan", "robinson"),
|
|
146
|
+
UserName("sam", "stevens"),
|
|
147
|
+
UserName("tom", "thomas"),
|
|
148
|
+
UserName("umar", "underwood"),
|
|
149
|
+
UserName("victor", "vaughan"),
|
|
150
|
+
UserName("william", "walker"),
|
|
151
|
+
UserName("xander", "xavier"),
|
|
152
|
+
UserName("yale", "young"),
|
|
153
|
+
UserName("zane", "zimmerman"),
|
|
154
|
+
]
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
@singleton
|
|
158
|
+
def known_vehicle_names() -> list[VehicleName]:
|
|
159
|
+
return [
|
|
160
|
+
VehicleName("cascadia", "antelope", "00000", "3AKJGLD5XLS000000"),
|
|
161
|
+
VehicleName("cascadia", "bear", "00001", "3AKJGLD51LS000001"),
|
|
162
|
+
VehicleName("cascadia", "cheetah", "00002", "3AKJGLD53LS000002"),
|
|
163
|
+
VehicleName("cascadia", "dolphin", "00003", "3AKJGLD55LS000003"),
|
|
164
|
+
VehicleName("cascadia", "eagle", "00004", "3AKJGLD57LS000004"),
|
|
165
|
+
VehicleName("cascadia", "falcon", "00005", "3AKJGLD59LS000005"),
|
|
166
|
+
VehicleName("cascadia", "gorilla", "00006", "3AKJGLD50LS000006"),
|
|
167
|
+
VehicleName("cascadia", "hawk", "00007", "3AKJGLD52LS000007"),
|
|
168
|
+
VehicleName("cascadia", "iguana", "00008", "3AKJGLD54LS000008"),
|
|
169
|
+
VehicleName("cascadia", "jaguar", "00009", "3AKJGLD56LS000009"),
|
|
170
|
+
VehicleName("cascadia", "koala", "00010", "3AKJGLD52LS000010"),
|
|
171
|
+
VehicleName("cascadia", "leopard", "00011", "3AKJGLD54LS000011"),
|
|
172
|
+
VehicleName("cascadia", "mongoose", "00012", "3AKJGLD56LS000012"),
|
|
173
|
+
VehicleName("cascadia", "narwhal", "00013", "3AKJGLD58LS000013"),
|
|
174
|
+
VehicleName("cascadia", "otter", "00014", "3AKJGLD5XLS000014"),
|
|
175
|
+
VehicleName("cascadia", "panther", "00015", "3AKJGLD51LS000015"),
|
|
176
|
+
VehicleName("cascadia", "quail", "00016", "3AKJGLD53LS000016"),
|
|
177
|
+
VehicleName("cascadia", "rhino", "00017", "3AKJGLD55LS000017"),
|
|
178
|
+
VehicleName("cascadia", "snake", "00018", "3AKJGLD57LS000018"),
|
|
179
|
+
VehicleName("cascadia", "tiger", "00019", "3AKJGLD59LS000019"),
|
|
180
|
+
VehicleName("cascadia", "urial", "00020", "3AKJGLD55LS000020"),
|
|
181
|
+
VehicleName("cascadia", "vulture", "00021", "3AKJGLD57LS000021"),
|
|
182
|
+
VehicleName("cascadia", "wolf", "00022", "3AKJGLD59LS000022"),
|
|
183
|
+
VehicleName("cascadia", "xerus", "00023", "3AKJGLD50LS000023"),
|
|
184
|
+
VehicleName("cascadia", "yak", "00024", "3AKJGLD52LS000024"),
|
|
185
|
+
VehicleName("cascadia", "zebra", "00025", "3AKJGLD54LS000025"),
|
|
186
|
+
]
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
def random_bag_names_sequence(
|
|
190
|
+
min_record_dt: datetime.datetime,
|
|
191
|
+
max_record_dt: datetime.datetime,
|
|
192
|
+
min_sequence_length: int,
|
|
193
|
+
max_sequence_length: int,
|
|
194
|
+
) -> Generator[BagName, None, None]:
|
|
195
|
+
vehicle_name = randomizer().choose(known_vehicle_names())
|
|
196
|
+
record_dt = randomizer().random_datetime(begin=min_record_dt, end=max_record_dt)
|
|
197
|
+
bags_count = randomizer().next_int(min_sequence_length, max_sequence_length)
|
|
198
|
+
|
|
199
|
+
for record_sn in range(bags_count):
|
|
200
|
+
yield BagName(vehicle_name=vehicle_name, record_dt=record_dt, record_sn=record_sn)
|
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
import datetime
|
|
2
|
+
import os
|
|
3
|
+
from collections.abc import Generator, Iterable
|
|
4
|
+
from typing import Any
|
|
5
|
+
|
|
6
|
+
import ujson as json
|
|
7
|
+
from iker.common.utils.dtutils import dt_format, dt_parse, extended_format
|
|
8
|
+
from iker.common.utils.jsonutils import JsonType, JsonValueCompatible
|
|
9
|
+
from iker.common.utils.jsonutils import json_reformat
|
|
10
|
+
from iker.common.utils.sequtils import batched
|
|
11
|
+
|
|
12
|
+
from plexus.common.utils.shutils import collect_volumed_filenames, populate_volumed_filenames
|
|
13
|
+
|
|
14
|
+
__all__ = [
|
|
15
|
+
"json_datetime_decoder",
|
|
16
|
+
"json_datetime_encoder",
|
|
17
|
+
"json_loads",
|
|
18
|
+
"json_dumps",
|
|
19
|
+
"read_chunked_jsonl",
|
|
20
|
+
"write_chunked_jsonl",
|
|
21
|
+
]
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def json_datetime_decoder(v: Any) -> datetime.datetime:
|
|
25
|
+
if isinstance(v, str):
|
|
26
|
+
return json_datetime_decoder(dt_parse(v, extended_format(with_us=True, with_tz=True)))
|
|
27
|
+
if isinstance(v, datetime.datetime):
|
|
28
|
+
return v.replace(tzinfo=datetime.timezone.utc)
|
|
29
|
+
raise ValueError("unexpected type of value for datetime decoder")
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def json_datetime_encoder(v: Any) -> str:
|
|
33
|
+
if isinstance(v, str):
|
|
34
|
+
return json_datetime_encoder(dt_parse(v, extended_format(with_us=True, with_tz=True)))
|
|
35
|
+
if isinstance(v, datetime.datetime):
|
|
36
|
+
return dt_format(v.replace(tzinfo=datetime.timezone.utc), extended_format(with_us=True, with_tz=True))
|
|
37
|
+
raise ValueError("unexpected type of value for datetime encoder")
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def json_deserializer(obj):
|
|
41
|
+
def value_formatter(value: JsonValueCompatible) -> JsonType:
|
|
42
|
+
if not isinstance(value, str):
|
|
43
|
+
return value
|
|
44
|
+
try:
|
|
45
|
+
return dt_parse(value, extended_format(with_us=True, with_tz=True))
|
|
46
|
+
except Exception:
|
|
47
|
+
return value
|
|
48
|
+
|
|
49
|
+
return json_reformat(obj, value_formatter=value_formatter)
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def json_serializer(obj):
|
|
53
|
+
def unregistered_formatter(unregistered: Any) -> JsonType:
|
|
54
|
+
if isinstance(unregistered, datetime.datetime):
|
|
55
|
+
return dt_format(unregistered, extended_format(with_us=True, with_tz=True))
|
|
56
|
+
return None
|
|
57
|
+
|
|
58
|
+
return json_reformat(obj, raise_if_unregistered=False, unregistered_formatter=unregistered_formatter)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def json_loads(s: str) -> JsonType:
|
|
62
|
+
return json_deserializer(json.loads(s))
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def json_dumps(obj: JsonType) -> str:
|
|
66
|
+
return json.dumps(json_serializer(obj), ensure_ascii=False, escape_forward_slashes=False)
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
def read_chunked_jsonl(template: str) -> Generator[tuple[JsonType, str], None, None]:
|
|
70
|
+
for path, _ in collect_volumed_filenames(template):
|
|
71
|
+
with open(path, mode="r", encoding="utf-8") as fh:
|
|
72
|
+
for line in fh:
|
|
73
|
+
yield json_loads(line), path
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
def write_chunked_jsonl(records: Iterable[JsonType], template: str, chunk_size: int) -> list[tuple[str, int]]:
|
|
77
|
+
generator = populate_volumed_filenames(template)
|
|
78
|
+
entry = []
|
|
79
|
+
for batch_index, batch in enumerate(batched(records, chunk_size)):
|
|
80
|
+
path, _ = next(generator)
|
|
81
|
+
lines = 0
|
|
82
|
+
with open(path, mode="w") as fh:
|
|
83
|
+
for record in batch:
|
|
84
|
+
fh.write(json_dumps(record))
|
|
85
|
+
fh.write("\n")
|
|
86
|
+
lines += 1
|
|
87
|
+
entry.append((path, lines))
|
|
88
|
+
if len(entry) == 1:
|
|
89
|
+
path, lines = entry[0]
|
|
90
|
+
os.rename(path, template)
|
|
91
|
+
return [(template, lines)]
|
|
92
|
+
return entry
|