plexus-python-common 1.0.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of plexus-python-common might be problematic. Click here for more details.
- plexus/common/__init__.py +6 -0
- plexus/common/carto/OSMFile.py +259 -0
- plexus/common/carto/OSMNode.py +25 -0
- plexus/common/carto/OSMTags.py +101 -0
- plexus/common/carto/OSMWay.py +24 -0
- plexus/common/carto/__init__.py +11 -0
- plexus/common/config.py +84 -0
- plexus/common/pose.py +107 -0
- plexus/common/proj.py +305 -0
- plexus/common/utils/__init__.py +0 -0
- plexus/common/utils/bagutils.py +218 -0
- plexus/common/utils/datautils.py +195 -0
- plexus/common/utils/jsonutils.py +92 -0
- plexus/common/utils/ormutils.py +335 -0
- plexus/common/utils/s3utils.py +118 -0
- plexus/common/utils/shutils.py +234 -0
- plexus/common/utils/strutils.py +285 -0
- plexus_python_common-1.0.7.dist-info/METADATA +29 -0
- plexus_python_common-1.0.7.dist-info/RECORD +21 -0
- plexus_python_common-1.0.7.dist-info/WHEEL +5 -0
- plexus_python_common-1.0.7.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
import contextlib
|
|
2
|
+
import functools
|
|
3
|
+
import os.path
|
|
4
|
+
from collections.abc import Callable
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Literal
|
|
7
|
+
|
|
8
|
+
from cloudpathlib import CloudPath, S3Client, S3Path
|
|
9
|
+
from rich.progress import BarColumn, DownloadColumn, Progress, TaskID, TextColumn, TransferSpeedColumn
|
|
10
|
+
|
|
11
|
+
__all__ = [
|
|
12
|
+
"TransferCallbackS3Client",
|
|
13
|
+
"make_progress_callback",
|
|
14
|
+
"make_progressed_s3_client",
|
|
15
|
+
]
|
|
16
|
+
|
|
17
|
+
TransferDirection = Literal["download", "upload"]
|
|
18
|
+
TransferState = Literal["start", "update", "stop"]
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
@contextlib.contextmanager
|
|
22
|
+
def make_transfer_callback(
|
|
23
|
+
callback: Callable[[CloudPath, TransferDirection, TransferState, int], None],
|
|
24
|
+
path: Path | CloudPath,
|
|
25
|
+
direction: TransferDirection,
|
|
26
|
+
):
|
|
27
|
+
if callback is None:
|
|
28
|
+
yield None
|
|
29
|
+
return
|
|
30
|
+
|
|
31
|
+
callback(path, direction, "start", 0)
|
|
32
|
+
try:
|
|
33
|
+
yield functools.partial(callback, path, direction, "update")
|
|
34
|
+
finally:
|
|
35
|
+
callback(path, direction, "stop", 0)
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
class TransferCallbackS3Client(S3Client):
|
|
39
|
+
def __init__(
|
|
40
|
+
self,
|
|
41
|
+
*args,
|
|
42
|
+
transfer_callback: Callable[[CloudPath, TransferDirection, TransferState, int], None],
|
|
43
|
+
**kwargs,
|
|
44
|
+
):
|
|
45
|
+
super().__init__(*args, **kwargs)
|
|
46
|
+
self.transfer_callback = transfer_callback
|
|
47
|
+
|
|
48
|
+
def _download_file(self, cloud_path: S3Path, local_path: str | os.PathLike) -> Path:
|
|
49
|
+
local_path = Path(local_path)
|
|
50
|
+
|
|
51
|
+
obj = self.s3.Object(cloud_path.bucket, cloud_path.key)
|
|
52
|
+
|
|
53
|
+
with make_transfer_callback(self.transfer_callback, cloud_path, "download") as callback:
|
|
54
|
+
obj.download_file(
|
|
55
|
+
str(local_path),
|
|
56
|
+
Config=self.boto3_transfer_config,
|
|
57
|
+
ExtraArgs=self.boto3_dl_extra_args,
|
|
58
|
+
Callback=callback,
|
|
59
|
+
)
|
|
60
|
+
return local_path
|
|
61
|
+
|
|
62
|
+
def _upload_file(self, local_path: str | os.PathLike, cloud_path: S3Path) -> S3Path:
|
|
63
|
+
local_path = Path(local_path)
|
|
64
|
+
|
|
65
|
+
obj = self.s3.Object(cloud_path.bucket, cloud_path.key)
|
|
66
|
+
|
|
67
|
+
extra_args = self.boto3_ul_extra_args.copy()
|
|
68
|
+
|
|
69
|
+
if self.content_type_method is not None:
|
|
70
|
+
content_type, content_encoding = self.content_type_method(str(local_path))
|
|
71
|
+
if content_type is not None:
|
|
72
|
+
extra_args["ContentType"] = content_type
|
|
73
|
+
if content_encoding is not None:
|
|
74
|
+
extra_args["ContentEncoding"] = content_encoding
|
|
75
|
+
|
|
76
|
+
with make_transfer_callback(self.transfer_callback, local_path, "upload") as callback:
|
|
77
|
+
obj.upload_file(
|
|
78
|
+
str(local_path),
|
|
79
|
+
Config=self.boto3_transfer_config,
|
|
80
|
+
ExtraArgs=extra_args,
|
|
81
|
+
Callback=callback,
|
|
82
|
+
)
|
|
83
|
+
return cloud_path
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
def make_progress_callback(progress: Progress) -> Callable[[CloudPath, TransferDirection, TransferState, int], None]:
|
|
87
|
+
task_ids: dict[Path | CloudPath, TaskID] = {}
|
|
88
|
+
|
|
89
|
+
def progress_callback(path: Path | CloudPath, direction: TransferDirection, state: TransferState, bytes_sent: int):
|
|
90
|
+
if state == "start":
|
|
91
|
+
size = path.stat().st_size
|
|
92
|
+
task_ids[path] = progress.add_task(direction, total=size, filename=path.name)
|
|
93
|
+
elif state == "stop":
|
|
94
|
+
if path in task_ids:
|
|
95
|
+
progress.remove_task(task_ids[path])
|
|
96
|
+
del task_ids[path]
|
|
97
|
+
else:
|
|
98
|
+
progress.update(task_ids[path], advance=bytes_sent)
|
|
99
|
+
|
|
100
|
+
return progress_callback
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
@contextlib.contextmanager
|
|
104
|
+
def make_progressed_s3_client(aws_access_key_id: str, aws_secret_access_key: str, endpoint_url: str):
|
|
105
|
+
aws_access_key_id = aws_access_key_id or os.environ.get("AWS_ACCESS_KEY_ID")
|
|
106
|
+
aws_secret_access_key = aws_secret_access_key or os.environ.get("AWS_SECRET_ACCESS_KEY")
|
|
107
|
+
endpoint_url = endpoint_url or os.environ.get("AWS_ENDPOINT_URL")
|
|
108
|
+
|
|
109
|
+
with Progress(
|
|
110
|
+
TextColumn("[blue]{task.fields[filename]}"),
|
|
111
|
+
BarColumn(),
|
|
112
|
+
DownloadColumn(),
|
|
113
|
+
TransferSpeedColumn(),
|
|
114
|
+
) as progress:
|
|
115
|
+
yield TransferCallbackS3Client(aws_access_key_id=aws_access_key_id,
|
|
116
|
+
aws_secret_access_key=aws_secret_access_key,
|
|
117
|
+
endpoint_url=endpoint_url,
|
|
118
|
+
transfer_callback=make_progress_callback(progress))
|
|
@@ -0,0 +1,234 @@
|
|
|
1
|
+
import os
|
|
2
|
+
from collections.abc import Callable, Generator
|
|
3
|
+
|
|
4
|
+
import pyparsing as pp
|
|
5
|
+
from iker.common.utils.funcutils import singleton
|
|
6
|
+
from iker.common.utils.strutils import is_blank
|
|
7
|
+
|
|
8
|
+
__all__ = [
|
|
9
|
+
"volume_template_parser",
|
|
10
|
+
"make_volume_parser",
|
|
11
|
+
"make_volume_generator",
|
|
12
|
+
"collect_volumed_filenames",
|
|
13
|
+
"populate_volumed_filenames",
|
|
14
|
+
]
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
@singleton
|
|
18
|
+
def volume_template_parser() -> pp.ParserElement:
|
|
19
|
+
lbrace_token = pp.Char("{")
|
|
20
|
+
rbrace_token = pp.Char("}")
|
|
21
|
+
colon_token = pp.Char(":")
|
|
22
|
+
chars_token = pp.Regex(r"[^{}]*")
|
|
23
|
+
name_token = pp.Regex(r"[a-z][a-z0-9_]*")
|
|
24
|
+
|
|
25
|
+
parser_expr = pp.Combine(lbrace_token +
|
|
26
|
+
(name_token("name") + colon_token)[0, 1] +
|
|
27
|
+
chars_token("prefix") +
|
|
28
|
+
lbrace_token +
|
|
29
|
+
rbrace_token +
|
|
30
|
+
chars_token("suffix") +
|
|
31
|
+
rbrace_token)
|
|
32
|
+
return parser_expr
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def make_trivial_volume_func() -> Callable[[str], int]:
|
|
36
|
+
"""
|
|
37
|
+
Creates a trivial volume function that generates unique integer volumes for each name.
|
|
38
|
+
|
|
39
|
+
The function maintains a generator for each unique name and increments the volume
|
|
40
|
+
value each time the function is called with the same name. If a new name is provided,
|
|
41
|
+
a new generator is created for that name.
|
|
42
|
+
|
|
43
|
+
:return: a callable function that takes a name (str) and returns the next integer volume.
|
|
44
|
+
"""
|
|
45
|
+
|
|
46
|
+
def trivial_volume_generator() -> Generator[int, None, None]:
|
|
47
|
+
volume = 0
|
|
48
|
+
while True:
|
|
49
|
+
yield volume
|
|
50
|
+
volume += 1
|
|
51
|
+
|
|
52
|
+
volume_generators = {}
|
|
53
|
+
|
|
54
|
+
def volume_func(name: str) -> int:
|
|
55
|
+
if name not in volume_generators:
|
|
56
|
+
volume_generators[name] = trivial_volume_generator()
|
|
57
|
+
return next(volume_generators[name])
|
|
58
|
+
|
|
59
|
+
return volume_func
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
def make_volume_parser(template: str) -> Callable[[str], dict[str, int]]:
|
|
63
|
+
"""
|
|
64
|
+
Creates a parser function for extracting volume information from filenames based on a given template.
|
|
65
|
+
|
|
66
|
+
The template defines the structure of filenames, including placeholders for volume values.
|
|
67
|
+
The returned parser function takes a filename as input and extracts the volume values
|
|
68
|
+
as a dictionary where the keys are the placeholder names and the values are the corresponding integers.
|
|
69
|
+
|
|
70
|
+
:param template: a string template defining the filename structure with placeholders for volumes.
|
|
71
|
+
Placeholders are enclosed in double curly braces `{{}}` and can optionally include
|
|
72
|
+
a name, a prefix, and a suffix (e.g., `{name:prefix{}suffix}`).
|
|
73
|
+
|
|
74
|
+
:return: a callable function that takes a filename (str) and returns a dictionary mapping
|
|
75
|
+
placeholder names to their extracted integer values.
|
|
76
|
+
|
|
77
|
+
:raises ValueError: if the template is invalid or cannot be parsed.
|
|
78
|
+
"""
|
|
79
|
+
try:
|
|
80
|
+
scan_result = list(volume_template_parser().scan_string(template, overlap=False))
|
|
81
|
+
except pp.ParseException as e:
|
|
82
|
+
raise ValueError(f"bad template '{template}'") from e
|
|
83
|
+
|
|
84
|
+
volume_token = pp.Regex(r"\d+")
|
|
85
|
+
|
|
86
|
+
parser_expr = pp.Literal("")
|
|
87
|
+
prev_end_pos = 0
|
|
88
|
+
index = 0
|
|
89
|
+
for parse_results, begin_pos, end_pos in scan_result:
|
|
90
|
+
if prev_end_pos < begin_pos:
|
|
91
|
+
parser_expr = parser_expr + pp.Literal(template[prev_end_pos:begin_pos])
|
|
92
|
+
prefix = parse_results.get("prefix")
|
|
93
|
+
suffix = parse_results.get("suffix")
|
|
94
|
+
name = parse_results.get("name")
|
|
95
|
+
if is_blank(name):
|
|
96
|
+
name = str(index)
|
|
97
|
+
index += 1
|
|
98
|
+
if not is_blank(prefix):
|
|
99
|
+
parser_expr = parser_expr + pp.Literal(prefix)
|
|
100
|
+
parser_expr = parser_expr + volume_token(name)
|
|
101
|
+
if not is_blank(suffix):
|
|
102
|
+
parser_expr = parser_expr + pp.Literal(suffix)
|
|
103
|
+
prev_end_pos = end_pos
|
|
104
|
+
if prev_end_pos < len(template):
|
|
105
|
+
parser_expr = parser_expr + pp.Literal(template[prev_end_pos:])
|
|
106
|
+
|
|
107
|
+
parser_expr = pp.Combine(pp.StringStart() + parser_expr + pp.StringEnd())
|
|
108
|
+
|
|
109
|
+
def parser(s: str) -> dict[str, int]:
|
|
110
|
+
parser_results = parser_expr.parse_string(s, parse_all=True)
|
|
111
|
+
|
|
112
|
+
volumes = {}
|
|
113
|
+
for name in parser_results.keys():
|
|
114
|
+
volume = parser_results.get(name)
|
|
115
|
+
volumes[name] = int(volume)
|
|
116
|
+
|
|
117
|
+
return volumes
|
|
118
|
+
|
|
119
|
+
return parser
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
def make_volume_generator(template: str) -> Callable[[Callable[[str], int]], tuple[str, dict[str, int]]]:
|
|
123
|
+
"""
|
|
124
|
+
Creates a generator function for producing filenames and their associated volume values
|
|
125
|
+
based on a given template.
|
|
126
|
+
|
|
127
|
+
The template defines the structure of filenames, including placeholders for volume values.
|
|
128
|
+
The returned generator function takes a volume function as input, which determines the
|
|
129
|
+
volume values for each placeholder, and produces filenames with the corresponding volumes.
|
|
130
|
+
|
|
131
|
+
:param template: a string template defining the filename structure with placeholders for volumes.
|
|
132
|
+
Placeholders are enclosed in double curly braces `{{}}` and can optionally include
|
|
133
|
+
a name, a prefix, and a suffix (e.g., `{name:prefix{}suffix}`).
|
|
134
|
+
|
|
135
|
+
:return: a callable generator function that takes a volume function (Callable[[str], int]) as input
|
|
136
|
+
and returns a tuple containing the generated filename (str) and a dictionary mapping
|
|
137
|
+
placeholder names to their corresponding volume values (dict[str, int]).
|
|
138
|
+
|
|
139
|
+
:raises ValueError: if the template is invalid or cannot be parsed.
|
|
140
|
+
"""
|
|
141
|
+
try:
|
|
142
|
+
scan_result = list(volume_template_parser().scan_string(template, overlap=False))
|
|
143
|
+
except pp.ParseException as e:
|
|
144
|
+
raise ValueError(f"bad template '{template}'") from e
|
|
145
|
+
|
|
146
|
+
def generator(volume_func: Callable[[str], int]) -> tuple[str, dict[str, int]]:
|
|
147
|
+
volumes = {}
|
|
148
|
+
|
|
149
|
+
volume_expr = ""
|
|
150
|
+
prev_end_pos = 0
|
|
151
|
+
index = 0
|
|
152
|
+
for parse_results, begin_pos, end_pos in scan_result:
|
|
153
|
+
if prev_end_pos < begin_pos:
|
|
154
|
+
volume_expr = volume_expr + template[prev_end_pos:begin_pos]
|
|
155
|
+
prefix = parse_results.get("prefix")
|
|
156
|
+
suffix = parse_results.get("suffix")
|
|
157
|
+
name = parse_results.get("name")
|
|
158
|
+
if is_blank(name):
|
|
159
|
+
name = str(index)
|
|
160
|
+
index += 1
|
|
161
|
+
if not is_blank(prefix):
|
|
162
|
+
volume_expr = volume_expr + prefix
|
|
163
|
+
volumes[name] = int(volume_func(name))
|
|
164
|
+
volume_expr = volume_expr + str(volumes[name])
|
|
165
|
+
if not is_blank(suffix):
|
|
166
|
+
volume_expr = volume_expr + suffix
|
|
167
|
+
prev_end_pos = end_pos
|
|
168
|
+
if prev_end_pos < len(template):
|
|
169
|
+
volume_expr = volume_expr + template[prev_end_pos:]
|
|
170
|
+
|
|
171
|
+
return volume_expr, volumes
|
|
172
|
+
|
|
173
|
+
return generator
|
|
174
|
+
|
|
175
|
+
|
|
176
|
+
def collect_volumed_filenames(template: str) -> Generator[tuple[str, dict[str, int]], None, None]:
|
|
177
|
+
"""
|
|
178
|
+
Collects filenames in a folder that match a given template and extracts their volume information.
|
|
179
|
+
|
|
180
|
+
The template defines the structure of filenames, including placeholders for volume values.
|
|
181
|
+
This function scans the folder containing the template and attempts to parse filenames
|
|
182
|
+
to extract volume values based on the template.
|
|
183
|
+
|
|
184
|
+
:param template: a string template defining the filename structure with placeholders for volumes.
|
|
185
|
+
Placeholders are enclosed in double curly braces `{{}}` and can optionally include
|
|
186
|
+
a name, a prefix, and a suffix (e.g., `{name:prefix{}suffix}`).
|
|
187
|
+
|
|
188
|
+
:return: a generator yielding tuples where the first element is the full path of the filename
|
|
189
|
+
and the second element is a dictionary mapping placeholder names to their extracted
|
|
190
|
+
integer volume values.
|
|
191
|
+
"""
|
|
192
|
+
folder = os.path.dirname(template)
|
|
193
|
+
basename = os.path.basename(template)
|
|
194
|
+
|
|
195
|
+
parser = make_volume_parser(basename)
|
|
196
|
+
|
|
197
|
+
for name in os.listdir(folder):
|
|
198
|
+
try:
|
|
199
|
+
volumes = parser(name)
|
|
200
|
+
yield os.path.join(folder, name), volumes
|
|
201
|
+
except pp.ParseException:
|
|
202
|
+
pass
|
|
203
|
+
|
|
204
|
+
|
|
205
|
+
def populate_volumed_filenames(
|
|
206
|
+
template: str,
|
|
207
|
+
*,
|
|
208
|
+
volume_func: Callable[[str], int] = None,
|
|
209
|
+
) -> Generator[tuple[str, dict[str, int]], None, None]:
|
|
210
|
+
"""
|
|
211
|
+
Generates filenames and their associated volume values based on a given template.
|
|
212
|
+
|
|
213
|
+
The template defines the structure of filenames, including placeholders for volume values.
|
|
214
|
+
This function uses a volume function to generate unique volume values for each placeholder
|
|
215
|
+
and produces filenames with the corresponding volumes.
|
|
216
|
+
|
|
217
|
+
:param template: a string template defining the filename structure with placeholders for volumes.
|
|
218
|
+
Placeholders are enclosed in double curly braces `{{}}` and can optionally include
|
|
219
|
+
a name, a prefix, and a suffix (e.g., `{name:prefix{}suffix}`).
|
|
220
|
+
:param volume_func: a callable function that takes a placeholder name (str) and returns the next
|
|
221
|
+
integer volume value. If not provided, a trivial volume function is used.
|
|
222
|
+
|
|
223
|
+
:return: a generator yielding tuples where the first element is the generated filename (str)
|
|
224
|
+
and the second element is a dictionary mapping placeholder names to their corresponding
|
|
225
|
+
"""
|
|
226
|
+
folder = os.path.dirname(template)
|
|
227
|
+
basename = os.path.basename(template)
|
|
228
|
+
|
|
229
|
+
generator = make_volume_generator(basename)
|
|
230
|
+
volume_func = volume_func or make_trivial_volume_func()
|
|
231
|
+
|
|
232
|
+
while True:
|
|
233
|
+
name, volumes = generator(volume_func)
|
|
234
|
+
yield os.path.join(folder, name), volumes
|
|
@@ -0,0 +1,285 @@
|
|
|
1
|
+
import dataclasses
|
|
2
|
+
import datetime
|
|
3
|
+
import re
|
|
4
|
+
from collections.abc import Callable
|
|
5
|
+
|
|
6
|
+
import pyparsing as pp
|
|
7
|
+
from iker.common.utils.dtutils import basic_format, dt_format, dt_parse
|
|
8
|
+
from iker.common.utils.funcutils import singleton
|
|
9
|
+
|
|
10
|
+
__all__ = [
|
|
11
|
+
"hex_string_pattern",
|
|
12
|
+
"hex_string_parser",
|
|
13
|
+
"snake_case_pattern",
|
|
14
|
+
"snake_case_parser",
|
|
15
|
+
"kebab_case_pattern",
|
|
16
|
+
"kebab_case_parser",
|
|
17
|
+
"dot_case_pattern",
|
|
18
|
+
"dot_case_parser",
|
|
19
|
+
"strict_relpath_pattern",
|
|
20
|
+
"strict_relpath_parser",
|
|
21
|
+
"strict_abspath_pattern",
|
|
22
|
+
"strict_abspath_parser",
|
|
23
|
+
"uuid_pattern",
|
|
24
|
+
"uuid_parser",
|
|
25
|
+
"tag_pattern",
|
|
26
|
+
"tag_parser",
|
|
27
|
+
"topic_pattern",
|
|
28
|
+
"topic_parser",
|
|
29
|
+
"vin_code_chars",
|
|
30
|
+
"vin_code_pattern",
|
|
31
|
+
"vin_code_parser",
|
|
32
|
+
"UserName",
|
|
33
|
+
"VehicleName",
|
|
34
|
+
"BagName",
|
|
35
|
+
"parse_user_name",
|
|
36
|
+
"parse_vehicle_name",
|
|
37
|
+
"parse_bag_name",
|
|
38
|
+
]
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def token_check(cond: Callable[[str], bool]) -> Callable[[pp.ParseResults], bool]:
|
|
42
|
+
def cond_func(results: pp.ParseResults) -> bool:
|
|
43
|
+
token, *_ = results
|
|
44
|
+
return cond(token)
|
|
45
|
+
|
|
46
|
+
return cond_func
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def token_reparse(elem: pp.ParserElement, negate: bool = False) -> Callable[[pp.ParseResults], bool]:
|
|
50
|
+
def cond_func(results: pp.ParseResults) -> bool:
|
|
51
|
+
token, *_ = results
|
|
52
|
+
try:
|
|
53
|
+
elem.parse_string(token, parse_all=True)
|
|
54
|
+
return not negate
|
|
55
|
+
except pp.ParseException:
|
|
56
|
+
return negate
|
|
57
|
+
|
|
58
|
+
return cond_func
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def make_string_pattern(pattern: re.Pattern[str]) -> re.Pattern[str]:
|
|
62
|
+
return re.compile(rf"^{pattern.pattern}$")
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def make_string_parser(element: pp.ParserElement) -> pp.ParserElement:
|
|
66
|
+
return pp.Combine(pp.StringStart() + element + pp.StringEnd())
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
lowers_regexp: re.Pattern[str] = re.compile(r"[a-z]+")
|
|
70
|
+
uppers_regexp: re.Pattern[str] = re.compile(r"[A-Z]+")
|
|
71
|
+
digits_regexp: re.Pattern[str] = re.compile(r"[0-9]+")
|
|
72
|
+
lower_digits_regexp: re.Pattern[str] = re.compile(r"[a-z0-9]+")
|
|
73
|
+
upper_digits_regexp: re.Pattern[str] = re.compile(r"[A-Z0-9]+")
|
|
74
|
+
alpha_digits_regexp: re.Pattern[str] = re.compile(r"[a-zA-Z0-9]+")
|
|
75
|
+
hex_digits_regexp: re.Pattern[str] = re.compile(r"[a-f0-9]+")
|
|
76
|
+
lower_identifier_regexp: re.Pattern[str] = re.compile(r"[a-z][a-z0-9]*")
|
|
77
|
+
upper_identifier_regexp: re.Pattern[str] = re.compile(r"[A-Z][A-Z0-9]*")
|
|
78
|
+
strict_chars_regexp: re.Pattern[str] = re.compile(r"[a-zA-Z0-9._-]+")
|
|
79
|
+
|
|
80
|
+
lowers_element: pp.ParserElement = pp.Regex(lowers_regexp.pattern)
|
|
81
|
+
uppers_element: pp.ParserElement = pp.Regex(uppers_regexp.pattern)
|
|
82
|
+
digits_element: pp.ParserElement = pp.Regex(digits_regexp.pattern)
|
|
83
|
+
lower_digits_element: pp.ParserElement = pp.Regex(lower_digits_regexp.pattern)
|
|
84
|
+
upper_digits_element: pp.ParserElement = pp.Regex(upper_digits_regexp.pattern)
|
|
85
|
+
alpha_digits_element: pp.ParserElement = pp.Regex(alpha_digits_regexp.pattern)
|
|
86
|
+
hex_digits_element: pp.ParserElement = pp.Regex(hex_digits_regexp.pattern)
|
|
87
|
+
lower_identifier_element: pp.ParserElement = pp.Regex(lower_identifier_regexp.pattern)
|
|
88
|
+
upper_identifier_element: pp.ParserElement = pp.Regex(upper_identifier_regexp.pattern)
|
|
89
|
+
strict_chars_element: pp.ParserElement = pp.Regex(strict_chars_regexp.pattern)
|
|
90
|
+
|
|
91
|
+
underscore_token: pp.ParserElement = pp.Char("_")
|
|
92
|
+
hyphen_token: pp.ParserElement = pp.Char("-")
|
|
93
|
+
period_token: pp.ParserElement = pp.Char(".")
|
|
94
|
+
colon_token: pp.ParserElement = pp.Char(":")
|
|
95
|
+
slash_token: pp.ParserElement = pp.Char("/")
|
|
96
|
+
plus_token: pp.ParserElement = pp.Char("+")
|
|
97
|
+
|
|
98
|
+
basic_datetime_regexp: re.Pattern[str] = re.compile(r"\d{8}T\d{6}")
|
|
99
|
+
extended_datetime_regexp: re.Pattern[str] = re.compile(r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}")
|
|
100
|
+
|
|
101
|
+
basic_datetime_element: pp.ParserElement = pp.Regex(basic_datetime_regexp.pattern)
|
|
102
|
+
extended_datetime_element: pp.ParserElement = pp.Regex(extended_datetime_regexp.pattern)
|
|
103
|
+
|
|
104
|
+
number_regexp: re.Pattern[str] = re.compile(r"0|([1-9][0-9]*)")
|
|
105
|
+
positive_number_regexp: re.Pattern[str] = re.compile(r"[1-9][0-9]*")
|
|
106
|
+
|
|
107
|
+
number_element: pp.ParserElement = pp.Regex(number_regexp.pattern)
|
|
108
|
+
positive_number_element: pp.ParserElement = pp.Regex(positive_number_regexp.pattern)
|
|
109
|
+
|
|
110
|
+
snake_case_regexp: re.Pattern[str] = re.compile(
|
|
111
|
+
rf"{lower_digits_regexp.pattern}(?:_{lower_digits_regexp.pattern})*")
|
|
112
|
+
kebab_case_regexp: re.Pattern[str] = re.compile(
|
|
113
|
+
rf"{lower_digits_regexp.pattern}(?:-{lower_digits_regexp.pattern})*")
|
|
114
|
+
dot_case_regexp: re.Pattern[str] = re.compile(
|
|
115
|
+
rf"{lower_digits_regexp.pattern}(?:\.{lower_digits_regexp.pattern})*")
|
|
116
|
+
|
|
117
|
+
snake_case_element: pp.ParserElement = pp.Combine(
|
|
118
|
+
lower_digits_element + (underscore_token + lower_digits_element)[...])
|
|
119
|
+
kebab_case_element: pp.ParserElement = pp.Combine(
|
|
120
|
+
lower_digits_element + (hyphen_token + lower_digits_element)[...])
|
|
121
|
+
dot_case_element: pp.ParserElement = pp.Combine(
|
|
122
|
+
lower_digits_element + (period_token + lower_digits_element)[...])
|
|
123
|
+
|
|
124
|
+
uuid_regexp: re.Pattern[str] = re.compile(r"[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}")
|
|
125
|
+
uuid_element: pp.ParserElement = pp.Regex(uuid_regexp.pattern)
|
|
126
|
+
|
|
127
|
+
strict_relpath_regexp: re.Pattern[str] = re.compile(
|
|
128
|
+
rf"(?!.*(^|/)\.+($|/))(?:{strict_chars_regexp.pattern}/)*(?:{strict_chars_regexp.pattern})?")
|
|
129
|
+
strict_abspath_regexp: re.Pattern[str] = re.compile(
|
|
130
|
+
rf"(?!.*(^|/)\.+($|/))/(?:{strict_chars_regexp.pattern}/)*(?:{strict_chars_regexp.pattern})?")
|
|
131
|
+
|
|
132
|
+
strict_path_chars_element = strict_chars_element.copy()
|
|
133
|
+
strict_path_chars_element.add_condition(token_reparse(period_token[1, ...], negate=True),
|
|
134
|
+
message="cannot be pure dots")
|
|
135
|
+
|
|
136
|
+
strict_relpath_element: pp.ParserElement = pp.Combine(
|
|
137
|
+
(strict_path_chars_element + slash_token)[...] + strict_path_chars_element[0, 1])
|
|
138
|
+
strict_abspath_element: pp.ParserElement = pp.Combine(
|
|
139
|
+
slash_token + (strict_path_chars_element + slash_token)[...] + strict_path_chars_element[0, 1])
|
|
140
|
+
|
|
141
|
+
tag_regexp: re.Pattern[str] = re.compile(rf"{snake_case_regexp.pattern}(?:::{snake_case_regexp.pattern})*")
|
|
142
|
+
tag_element: pp.ParserElement = pp.Combine(snake_case_element + (colon_token + colon_token + snake_case_element)[...])
|
|
143
|
+
|
|
144
|
+
topic_regexp: re.Pattern[str] = re.compile(rf"(?:/{snake_case_regexp.pattern})+")
|
|
145
|
+
topic_element: pp.ParserElement = pp.Combine((slash_token + snake_case_element)[1, ...])
|
|
146
|
+
|
|
147
|
+
vin_code_chars: str = "0123456789ABCDEFGHJKLMNPRSTUVWXYZ"
|
|
148
|
+
|
|
149
|
+
vin_code_regexp: re.Pattern[str] = re.compile(rf"[{vin_code_chars}]{{17}}")
|
|
150
|
+
vin_code_element: pp.ParserElement = pp.Regex(vin_code_regexp.pattern)
|
|
151
|
+
|
|
152
|
+
hex_string_pattern = make_string_pattern(hex_digits_regexp)
|
|
153
|
+
hex_string_parser = make_string_parser(hex_digits_element)
|
|
154
|
+
|
|
155
|
+
snake_case_pattern = make_string_pattern(snake_case_regexp)
|
|
156
|
+
snake_case_parser = make_string_parser(snake_case_element)
|
|
157
|
+
kebab_case_pattern = make_string_pattern(kebab_case_regexp)
|
|
158
|
+
kebab_case_parser = make_string_parser(kebab_case_element)
|
|
159
|
+
dot_case_pattern = make_string_pattern(dot_case_regexp)
|
|
160
|
+
dot_case_parser = make_string_parser(dot_case_element)
|
|
161
|
+
|
|
162
|
+
uuid_pattern = make_string_pattern(uuid_regexp)
|
|
163
|
+
uuid_parser = make_string_parser(uuid_element)
|
|
164
|
+
|
|
165
|
+
strict_relpath_pattern = make_string_pattern(strict_relpath_regexp)
|
|
166
|
+
strict_relpath_parser = make_string_parser(strict_relpath_element)
|
|
167
|
+
strict_abspath_pattern = make_string_pattern(strict_abspath_regexp)
|
|
168
|
+
strict_abspath_parser = make_string_parser(strict_abspath_element)
|
|
169
|
+
|
|
170
|
+
tag_pattern = make_string_pattern(tag_regexp)
|
|
171
|
+
tag_parser = make_string_parser(tag_element)
|
|
172
|
+
topic_pattern = make_string_pattern(topic_regexp)
|
|
173
|
+
topic_parser = make_string_parser(topic_element)
|
|
174
|
+
vin_code_pattern = make_string_pattern(vin_code_regexp)
|
|
175
|
+
vin_code_parser = make_string_parser(vin_code_element)
|
|
176
|
+
|
|
177
|
+
|
|
178
|
+
@dataclasses.dataclass(frozen=True, eq=True)
|
|
179
|
+
class UserName(object):
|
|
180
|
+
first_name: str
|
|
181
|
+
last_name: str
|
|
182
|
+
sn: int = 0
|
|
183
|
+
|
|
184
|
+
def __str__(self) -> str:
|
|
185
|
+
if self.sn == 0:
|
|
186
|
+
return f"{self.first_name}.{self.last_name}"
|
|
187
|
+
return f"{self.first_name}{self.sn}.{self.last_name}"
|
|
188
|
+
|
|
189
|
+
|
|
190
|
+
@dataclasses.dataclass(frozen=True, eq=True)
|
|
191
|
+
class VehicleName(object):
|
|
192
|
+
brand: str
|
|
193
|
+
alias: str
|
|
194
|
+
code: str | None = None
|
|
195
|
+
vin: str | None = None
|
|
196
|
+
|
|
197
|
+
def __str__(self) -> str:
|
|
198
|
+
if self.code and self.vin:
|
|
199
|
+
return f"{self.brand}_{self.alias}_{self.code}_V{self.vin}"
|
|
200
|
+
if self.code:
|
|
201
|
+
return f"{self.brand}_{self.alias}_{self.code}"
|
|
202
|
+
if self.vin:
|
|
203
|
+
return f"{self.brand}_{self.alias}_V{self.vin}"
|
|
204
|
+
return f"{self.brand}_{self.alias}"
|
|
205
|
+
|
|
206
|
+
|
|
207
|
+
@dataclasses.dataclass(frozen=True, eq=True)
|
|
208
|
+
class BagName(object):
|
|
209
|
+
vehicle_name: VehicleName
|
|
210
|
+
record_dt: datetime.datetime
|
|
211
|
+
record_sn: int
|
|
212
|
+
|
|
213
|
+
def __str__(self) -> str:
|
|
214
|
+
return f"{dt_format(self.record_dt, basic_format())}-{self.vehicle_name}-{self.record_sn}.bag"
|
|
215
|
+
|
|
216
|
+
|
|
217
|
+
@singleton
|
|
218
|
+
def get_user_name_parser() -> pp.ParserElement:
|
|
219
|
+
element = pp.Combine(lowers_element("first_name") +
|
|
220
|
+
positive_number_element("sn")[0, 1] +
|
|
221
|
+
period_token +
|
|
222
|
+
lowers_element("last_name"))
|
|
223
|
+
return make_string_parser(element)
|
|
224
|
+
|
|
225
|
+
|
|
226
|
+
@singleton
|
|
227
|
+
def get_vehicle_name_parser() -> pp.ParserElement:
|
|
228
|
+
element = pp.Combine(lower_identifier_element("brand") +
|
|
229
|
+
(underscore_token +
|
|
230
|
+
pp.Combine(lower_identifier_element +
|
|
231
|
+
(underscore_token + lower_identifier_element)[...])("alias")) +
|
|
232
|
+
(underscore_token + digits_element("code"))[0, 1] +
|
|
233
|
+
(underscore_token + pp.Char("V") + vin_code_element("vin"))[0, 1])
|
|
234
|
+
return make_string_parser(element)
|
|
235
|
+
|
|
236
|
+
|
|
237
|
+
@singleton
|
|
238
|
+
def get_bag_name_parser() -> pp.ParserElement:
|
|
239
|
+
element = pp.Combine(basic_datetime_element("record_dt") +
|
|
240
|
+
(hyphen_token +
|
|
241
|
+
pp.Group(lower_identifier_element("brand") +
|
|
242
|
+
(underscore_token +
|
|
243
|
+
pp.Combine(lower_identifier_element +
|
|
244
|
+
(underscore_token + lower_identifier_element)[...])("alias")) +
|
|
245
|
+
(underscore_token + digits_element("code"))[0, 1] +
|
|
246
|
+
(underscore_token + pp.Char("V") + vin_code_element("vin"))[0, 1])("vehicle_name")) +
|
|
247
|
+
(hyphen_token + number_element("record_sn")) +
|
|
248
|
+
(period_token + pp.Literal("bag"))[0, 1])
|
|
249
|
+
return make_string_parser(element)
|
|
250
|
+
|
|
251
|
+
|
|
252
|
+
def parse_user_name(s: str) -> UserName | None:
|
|
253
|
+
user_name_match = get_user_name_parser().parse_string(s, parse_all=True)
|
|
254
|
+
|
|
255
|
+
return UserName(
|
|
256
|
+
user_name_match.get("first_name"),
|
|
257
|
+
user_name_match.get("last_name"),
|
|
258
|
+
int(user_name_match.get("sn", 0)),
|
|
259
|
+
)
|
|
260
|
+
|
|
261
|
+
|
|
262
|
+
def parse_vehicle_name(s: str) -> VehicleName | None:
|
|
263
|
+
vehicle_name_match = get_vehicle_name_parser().parse_string(s, parse_all=True)
|
|
264
|
+
|
|
265
|
+
return VehicleName(
|
|
266
|
+
vehicle_name_match.get("brand"),
|
|
267
|
+
vehicle_name_match.get("alias"),
|
|
268
|
+
vehicle_name_match.get("code"),
|
|
269
|
+
vehicle_name_match.get("vin"),
|
|
270
|
+
)
|
|
271
|
+
|
|
272
|
+
|
|
273
|
+
def parse_bag_name(s: str) -> BagName | None:
|
|
274
|
+
bag_name_match = get_bag_name_parser().parse_string(s, parse_all=True)
|
|
275
|
+
|
|
276
|
+
return BagName(
|
|
277
|
+
VehicleName(
|
|
278
|
+
bag_name_match.get("vehicle_name").get("brand"),
|
|
279
|
+
bag_name_match.get("vehicle_name").get("alias"),
|
|
280
|
+
bag_name_match.get("vehicle_name").get("code"),
|
|
281
|
+
bag_name_match.get("vehicle_name").get("vin"),
|
|
282
|
+
),
|
|
283
|
+
dt_parse(bag_name_match.get("record_dt"), basic_format()),
|
|
284
|
+
int(bag_name_match.get("record_sn")),
|
|
285
|
+
)
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: plexus-python-common
|
|
3
|
+
Version: 1.0.7
|
|
4
|
+
Classifier: Programming Language :: Python :: 3
|
|
5
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
6
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
7
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
8
|
+
Requires-Python: <3.14,>=3.11
|
|
9
|
+
Requires-Dist: cloudpathlib>=0.21
|
|
10
|
+
Requires-Dist: lxml>=4.9
|
|
11
|
+
Requires-Dist: numpy>=1.26
|
|
12
|
+
Requires-Dist: pydantic>=2.8
|
|
13
|
+
Requires-Dist: pygeodesy>=24.10
|
|
14
|
+
Requires-Dist: pyparsing>=3.2
|
|
15
|
+
Requires-Dist: pyproj>=3.6
|
|
16
|
+
Requires-Dist: pyquaternion>=0.9
|
|
17
|
+
Requires-Dist: sqlalchemy>=2.0
|
|
18
|
+
Requires-Dist: sqlmodel
|
|
19
|
+
Requires-Dist: rich>=13.9
|
|
20
|
+
Requires-Dist: ujson>=5.9
|
|
21
|
+
Requires-Dist: iker-python-common[all]>=1.0
|
|
22
|
+
Provides-Extra: all
|
|
23
|
+
Requires-Dist: plexus-python-common; extra == "all"
|
|
24
|
+
Provides-Extra: test
|
|
25
|
+
Requires-Dist: ddt>=1.7; extra == "test"
|
|
26
|
+
Requires-Dist: pytest-cov>=5.0; extra == "test"
|
|
27
|
+
Requires-Dist: pytest-order>=1.3; extra == "test"
|
|
28
|
+
Requires-Dist: pytest-postgresql>=6.1; extra == "test"
|
|
29
|
+
Requires-Dist: pytest>=8.3; extra == "test"
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
plexus/common/__init__.py,sha256=uWSQ4w3n7-zO3xJA1MTde4zbyZ73W8eVQ2EcoDzHwqc,177
|
|
2
|
+
plexus/common/config.py,sha256=kbJAkhAF1E8kGjp-0hrDlWav5XjpsDTDPjZS0L0R5MQ,2960
|
|
3
|
+
plexus/common/pose.py,sha256=abF-kOZIApFoTaqBGWDAQne75y3dv7RpFmHiNO-drIM,2850
|
|
4
|
+
plexus/common/proj.py,sha256=qs3UFLmZaDyxentQgK7suSqZ8_KXYSvvLQhuKzDBJlQ,8742
|
|
5
|
+
plexus/common/carto/OSMFile.py,sha256=yKG0ImkUNOrOs6oiEANDKZ6KaQCTEaPSolYMVLACVB0,8136
|
|
6
|
+
plexus/common/carto/OSMNode.py,sha256=XJkp-tD945tFz3MWO5v3IdGfsjZy3YZE_yaBNc4dTdM,593
|
|
7
|
+
plexus/common/carto/OSMTags.py,sha256=wGtdfl1gzuXMk75xU2cyeF6hwFdKADxq77fah7vrFwg,3450
|
|
8
|
+
plexus/common/carto/OSMWay.py,sha256=zqSUzn_evoYwG51ZbSOCBbzHKQ2fFIDcaToYE9SVKxE,562
|
|
9
|
+
plexus/common/carto/__init__.py,sha256=rmUEoSB4Rq3xmFGD16Y7zkjWpzbkxJgZEXt2Vnqxhws,264
|
|
10
|
+
plexus/common/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
11
|
+
plexus/common/utils/bagutils.py,sha256=SzlSaAVUJeIXvQH8PFPV7CJHxspBfWafPQn8_TXRr9o,7326
|
|
12
|
+
plexus/common/utils/datautils.py,sha256=Hb7wYvMS1XUBh3RVxuGypuZ-JYm37_cWfRtD1Whv4fQ,7972
|
|
13
|
+
plexus/common/utils/jsonutils.py,sha256=bhaB6mT-otPhBptoxAU_IG7fBYvGVSTYLhC_r_H5JNI,3243
|
|
14
|
+
plexus/common/utils/ormutils.py,sha256=lhZvzZsr5Ann0_Vc1XNrDKo_C8JaesN8L5eaww_zFeo,12779
|
|
15
|
+
plexus/common/utils/s3utils.py,sha256=DCMv_GbMg8Z_xhi32zLAlr5ja08m4JJcKnXY9-ufZTA,4271
|
|
16
|
+
plexus/common/utils/shutils.py,sha256=hGJqSLj08tuOeZ7WeC5d4BtjnPI732BuntVQBQsqOaI,9581
|
|
17
|
+
plexus/common/utils/strutils.py,sha256=unZgCiiE3WOBrZ6O4G_YEuSCk9peJLsfd6vrGRiDRIo,11445
|
|
18
|
+
plexus_python_common-1.0.7.dist-info/METADATA,sha256=WBIINeZpFqrnBZ6MNrk5j340U7QUXKvQ7znWTL6rpvY,1014
|
|
19
|
+
plexus_python_common-1.0.7.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
20
|
+
plexus_python_common-1.0.7.dist-info/top_level.txt,sha256=ug_g7CVwaMQuas5UzAXbHUrQvKGCn8ezc6ZNvvRlJOE,7
|
|
21
|
+
plexus_python_common-1.0.7.dist-info/RECORD,,
|