plexus-python-common 1.0.31__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,234 @@
1
+ import os
2
+ from collections.abc import Callable, Generator
3
+
4
+ import pyparsing as pp
5
+ from iker.common.utils.funcutils import singleton
6
+ from iker.common.utils.strutils import is_blank
7
+
8
+ __all__ = [
9
+ "volume_template_parser",
10
+ "make_volume_parser",
11
+ "make_volume_generator",
12
+ "collect_volumed_filenames",
13
+ "populate_volumed_filenames",
14
+ ]
15
+
16
+
17
+ @singleton
18
+ def volume_template_parser() -> pp.ParserElement:
19
+ lbrace_token = pp.Char("{")
20
+ rbrace_token = pp.Char("}")
21
+ colon_token = pp.Char(":")
22
+ chars_token = pp.Regex(r"[^{}]*")
23
+ name_token = pp.Regex(r"[a-z][a-z0-9_]*")
24
+
25
+ parser_expr = pp.Combine(lbrace_token +
26
+ (name_token("name") + colon_token)[0, 1] +
27
+ chars_token("prefix") +
28
+ lbrace_token +
29
+ rbrace_token +
30
+ chars_token("suffix") +
31
+ rbrace_token)
32
+ return parser_expr
33
+
34
+
35
+ def make_trivial_volume_func() -> Callable[[str], int]:
36
+ """
37
+ Creates a trivial volume function that generates unique integer volumes for each name.
38
+
39
+ The function maintains a generator for each unique name and increments the volume
40
+ value each time the function is called with the same name. If a new name is provided,
41
+ a new generator is created for that name.
42
+
43
+ :return: a callable function that takes a name (str) and returns the next integer volume.
44
+ """
45
+
46
+ def trivial_volume_generator() -> Generator[int, None, None]:
47
+ volume = 0
48
+ while True:
49
+ yield volume
50
+ volume += 1
51
+
52
+ volume_generators = {}
53
+
54
+ def volume_func(name: str) -> int:
55
+ if name not in volume_generators:
56
+ volume_generators[name] = trivial_volume_generator()
57
+ return next(volume_generators[name])
58
+
59
+ return volume_func
60
+
61
+
62
+ def make_volume_parser(template: str) -> Callable[[str], dict[str, int]]:
63
+ """
64
+ Creates a parser function for extracting volume information from filenames based on a given template.
65
+
66
+ The template defines the structure of filenames, including placeholders for volume values.
67
+ The returned parser function takes a filename as input and extracts the volume values
68
+ as a dictionary where the keys are the placeholder names and the values are the corresponding integers.
69
+
70
+ :param template: a string template defining the filename structure with placeholders for volumes.
71
+ Placeholders are enclosed in double curly braces `{{}}` and can optionally include
72
+ a name, a prefix, and a suffix (e.g., `{name:prefix{}suffix}`).
73
+
74
+ :return: a callable function that takes a filename (str) and returns a dictionary mapping
75
+ placeholder names to their extracted integer values.
76
+
77
+ :raises ValueError: if the template is invalid or cannot be parsed.
78
+ """
79
+ try:
80
+ scan_result = list(volume_template_parser().scan_string(template, overlap=False))
81
+ except pp.ParseException as e:
82
+ raise ValueError(f"bad template '{template}'") from e
83
+
84
+ volume_token = pp.Regex(r"\d+")
85
+
86
+ parser_expr = pp.Literal("")
87
+ prev_end_pos = 0
88
+ index = 0
89
+ for parse_results, begin_pos, end_pos in scan_result:
90
+ if prev_end_pos < begin_pos:
91
+ parser_expr = parser_expr + pp.Literal(template[prev_end_pos:begin_pos])
92
+ prefix = parse_results.get("prefix")
93
+ suffix = parse_results.get("suffix")
94
+ name = parse_results.get("name")
95
+ if is_blank(name):
96
+ name = str(index)
97
+ index += 1
98
+ if not is_blank(prefix):
99
+ parser_expr = parser_expr + pp.Literal(prefix)
100
+ parser_expr = parser_expr + volume_token(name)
101
+ if not is_blank(suffix):
102
+ parser_expr = parser_expr + pp.Literal(suffix)
103
+ prev_end_pos = end_pos
104
+ if prev_end_pos < len(template):
105
+ parser_expr = parser_expr + pp.Literal(template[prev_end_pos:])
106
+
107
+ parser_expr = pp.Combine(pp.StringStart() + parser_expr + pp.StringEnd())
108
+
109
+ def parser(s: str) -> dict[str, int]:
110
+ parser_results = parser_expr.parse_string(s, parse_all=True)
111
+
112
+ volumes = {}
113
+ for name in parser_results.keys():
114
+ volume = parser_results.get(name)
115
+ volumes[name] = int(volume)
116
+
117
+ return volumes
118
+
119
+ return parser
120
+
121
+
122
+ def make_volume_generator(template: str) -> Callable[[Callable[[str], int]], tuple[str, dict[str, int]]]:
123
+ """
124
+ Creates a generator function for producing filenames and their associated volume values
125
+ based on a given template.
126
+
127
+ The template defines the structure of filenames, including placeholders for volume values.
128
+ The returned generator function takes a volume function as input, which determines the
129
+ volume values for each placeholder, and produces filenames with the corresponding volumes.
130
+
131
+ :param template: a string template defining the filename structure with placeholders for volumes.
132
+ Placeholders are enclosed in double curly braces `{{}}` and can optionally include
133
+ a name, a prefix, and a suffix (e.g., `{name:prefix{}suffix}`).
134
+
135
+ :return: a callable generator function that takes a volume function (Callable[[str], int]) as input
136
+ and returns a tuple containing the generated filename (str) and a dictionary mapping
137
+ placeholder names to their corresponding volume values (dict[str, int]).
138
+
139
+ :raises ValueError: if the template is invalid or cannot be parsed.
140
+ """
141
+ try:
142
+ scan_result = list(volume_template_parser().scan_string(template, overlap=False))
143
+ except pp.ParseException as e:
144
+ raise ValueError(f"bad template '{template}'") from e
145
+
146
+ def generator(volume_func: Callable[[str], int]) -> tuple[str, dict[str, int]]:
147
+ volumes = {}
148
+
149
+ volume_expr = ""
150
+ prev_end_pos = 0
151
+ index = 0
152
+ for parse_results, begin_pos, end_pos in scan_result:
153
+ if prev_end_pos < begin_pos:
154
+ volume_expr = volume_expr + template[prev_end_pos:begin_pos]
155
+ prefix = parse_results.get("prefix")
156
+ suffix = parse_results.get("suffix")
157
+ name = parse_results.get("name")
158
+ if is_blank(name):
159
+ name = str(index)
160
+ index += 1
161
+ if not is_blank(prefix):
162
+ volume_expr = volume_expr + prefix
163
+ volumes[name] = int(volume_func(name))
164
+ volume_expr = volume_expr + str(volumes[name])
165
+ if not is_blank(suffix):
166
+ volume_expr = volume_expr + suffix
167
+ prev_end_pos = end_pos
168
+ if prev_end_pos < len(template):
169
+ volume_expr = volume_expr + template[prev_end_pos:]
170
+
171
+ return volume_expr, volumes
172
+
173
+ return generator
174
+
175
+
176
+ def collect_volumed_filenames(template: str) -> Generator[tuple[str, dict[str, int]], None, None]:
177
+ """
178
+ Collects filenames in a folder that match a given template and extracts their volume information.
179
+
180
+ The template defines the structure of filenames, including placeholders for volume values.
181
+ This function scans the folder containing the template and attempts to parse filenames
182
+ to extract volume values based on the template.
183
+
184
+ :param template: a string template defining the filename structure with placeholders for volumes.
185
+ Placeholders are enclosed in double curly braces `{{}}` and can optionally include
186
+ a name, a prefix, and a suffix (e.g., `{name:prefix{}suffix}`).
187
+
188
+ :return: a generator yielding tuples where the first element is the full path of the filename
189
+ and the second element is a dictionary mapping placeholder names to their extracted
190
+ integer volume values.
191
+ """
192
+ folder = os.path.dirname(template)
193
+ basename = os.path.basename(template)
194
+
195
+ parser = make_volume_parser(basename)
196
+
197
+ for name in os.listdir(folder):
198
+ try:
199
+ volumes = parser(name)
200
+ yield os.path.join(folder, name), volumes
201
+ except pp.ParseException:
202
+ pass
203
+
204
+
205
+ def populate_volumed_filenames(
206
+ template: str,
207
+ *,
208
+ volume_func: Callable[[str], int] = None,
209
+ ) -> Generator[tuple[str, dict[str, int]], None, None]:
210
+ """
211
+ Generates filenames and their associated volume values based on a given template.
212
+
213
+ The template defines the structure of filenames, including placeholders for volume values.
214
+ This function uses a volume function to generate unique volume values for each placeholder
215
+ and produces filenames with the corresponding volumes.
216
+
217
+ :param template: a string template defining the filename structure with placeholders for volumes.
218
+ Placeholders are enclosed in double curly braces `{{}}` and can optionally include
219
+ a name, a prefix, and a suffix (e.g., `{name:prefix{}suffix}`).
220
+ :param volume_func: a callable function that takes a placeholder name (str) and returns the next
221
+ integer volume value. If not provided, a trivial volume function is used.
222
+
223
+ :return: a generator yielding tuples where the first element is the generated filename (str)
224
+ and the second element is a dictionary mapping placeholder names to their corresponding
225
+ """
226
+ folder = os.path.dirname(template)
227
+ basename = os.path.basename(template)
228
+
229
+ generator = make_volume_generator(basename)
230
+ volume_func = volume_func or make_trivial_volume_func()
231
+
232
+ while True:
233
+ name, volumes = generator(volume_func)
234
+ yield os.path.join(folder, name), volumes
@@ -0,0 +1,9 @@
1
+ __all__ = [
2
+ "escape_sql_like",
3
+ ]
4
+
5
+
6
+ def escape_sql_like(s: str | None) -> str:
7
+ if s is None:
8
+ raise ValueError("input string cannot be None")
9
+ return s.replace("\\", "\\\\").replace("%", "\\%").replace("_", "\\_")
@@ -0,0 +1,382 @@
1
+ import dataclasses
2
+ import datetime
3
+ import re
4
+ from collections.abc import Callable
5
+
6
+ import pyparsing as pp
7
+ from iker.common.utils.dtutils import basic_format, dt_format, dt_parse
8
+ from iker.common.utils.funcutils import singleton
9
+
10
+ __all__ = [
11
+ "hex_string_pattern",
12
+ "hex_string_parser",
13
+ "snake_case_pattern",
14
+ "snake_case_parser",
15
+ "kebab_case_pattern",
16
+ "kebab_case_parser",
17
+ "dot_case_pattern",
18
+ "dot_case_parser",
19
+ "uuid_pattern",
20
+ "uuid_parser",
21
+ "strict_relpath_pattern",
22
+ "strict_relpath_parser",
23
+ "strict_abspath_pattern",
24
+ "strict_abspath_parser",
25
+ "email_address_pattern",
26
+ "email_address_parser",
27
+ "semver_pattern",
28
+ "semver_parser",
29
+ "colon_tag_pattern",
30
+ "colon_tag_parser",
31
+ "slash_tag_pattern",
32
+ "slash_tag_parser",
33
+ "topic_pattern",
34
+ "topic_parser",
35
+ "vin_code_chars",
36
+ "vin_code_pattern",
37
+ "vin_code_parser",
38
+ "to_colon_tag",
39
+ "to_slash_tag",
40
+ "UserName",
41
+ "UserEmail",
42
+ "VehicleName",
43
+ "BagName",
44
+ "parse_user_name",
45
+ "parse_user_email",
46
+ "parse_vehicle_name",
47
+ "parse_bag_name",
48
+ ]
49
+
50
+
51
+ def token_check(cond: Callable[[str], bool]) -> Callable[[pp.ParseResults], bool]:
52
+ def cond_func(results: pp.ParseResults) -> bool:
53
+ token, *_ = results
54
+ return cond(token)
55
+
56
+ return cond_func
57
+
58
+
59
+ def token_reparse(elem: pp.ParserElement, negate: bool = False) -> Callable[[pp.ParseResults], bool]:
60
+ def cond_func(results: pp.ParseResults) -> bool:
61
+ token, *_ = results
62
+ try:
63
+ elem.parse_string(token, parse_all=True)
64
+ return not negate
65
+ except pp.ParseException:
66
+ return negate
67
+
68
+ return cond_func
69
+
70
+
71
+ def make_string_pattern(pattern: re.Pattern[str]) -> re.Pattern[str]:
72
+ return re.compile(rf"^{pattern.pattern}$")
73
+
74
+
75
+ def make_string_parser(element: pp.ParserElement) -> pp.ParserElement:
76
+ return pp.Combine(pp.StringStart() + element + pp.StringEnd())
77
+
78
+
79
+ underscore_token: pp.ParserElement = pp.Char("_")
80
+ hyphen_token: pp.ParserElement = pp.Char("-")
81
+ period_token: pp.ParserElement = pp.Char(".")
82
+ colon_token: pp.ParserElement = pp.Char(":")
83
+ slash_token: pp.ParserElement = pp.Char("/")
84
+ plus_token: pp.ParserElement = pp.Char("+")
85
+
86
+ lower_regexp: re.Pattern[str] = re.compile(r"[a-z]")
87
+ upper_regexp: re.Pattern[str] = re.compile(r"[A-Z]")
88
+ digit_regexp: re.Pattern[str] = re.compile(r"[0-9]")
89
+ lower_digit_regexp: re.Pattern[str] = re.compile(r"[a-z0-9]")
90
+ upper_digit_regexp: re.Pattern[str] = re.compile(r"[A-Z0-9]")
91
+ alpha_digit_regexp: re.Pattern[str] = re.compile(r"[a-zA-Z0-9]")
92
+ hex_digit_regexp: re.Pattern[str] = re.compile(r"[a-f0-9]")
93
+
94
+ lowers_regexp: re.Pattern[str] = re.compile(rf"{lower_regexp.pattern}+")
95
+ uppers_regexp: re.Pattern[str] = re.compile(rf"{upper_regexp.pattern}+")
96
+ digits_regexp: re.Pattern[str] = re.compile(rf"{digit_regexp.pattern}+")
97
+ lower_digits_regexp: re.Pattern[str] = re.compile(rf"{lower_digit_regexp.pattern}+")
98
+ upper_digits_regexp: re.Pattern[str] = re.compile(rf"{upper_digit_regexp.pattern}+")
99
+ alpha_digits_regexp: re.Pattern[str] = re.compile(rf"{alpha_digit_regexp.pattern}+")
100
+ hex_digits_regexp: re.Pattern[str] = re.compile(rf"{hex_digit_regexp.pattern}+")
101
+ lower_identifier_regexp: re.Pattern[str] = re.compile(rf"{lower_regexp.pattern}{lower_digit_regexp.pattern}*")
102
+ upper_identifier_regexp: re.Pattern[str] = re.compile(rf"{upper_regexp.pattern}{upper_digit_regexp.pattern}*")
103
+ strict_chars_regexp: re.Pattern[str] = re.compile(rf"({alpha_digit_regexp.pattern}|[._-])+")
104
+
105
+ lower_element: pp.ParserElement = pp.Regex(lower_regexp.pattern)
106
+ upper_element: pp.ParserElement = pp.Regex(upper_regexp.pattern)
107
+ digit_element: pp.ParserElement = pp.Regex(digit_regexp.pattern)
108
+ lower_digit_element: pp.ParserElement = pp.Regex(lower_digit_regexp.pattern)
109
+ upper_digit_element: pp.ParserElement = pp.Regex(upper_digit_regexp.pattern)
110
+ alpha_digit_element: pp.ParserElement = pp.Regex(alpha_digit_regexp.pattern)
111
+ hex_digit_element: pp.ParserElement = pp.Regex(hex_digit_regexp.pattern)
112
+
113
+ lowers_element: pp.ParserElement = pp.Regex(lowers_regexp.pattern)
114
+ uppers_element: pp.ParserElement = pp.Regex(uppers_regexp.pattern)
115
+ digits_element: pp.ParserElement = pp.Regex(digits_regexp.pattern)
116
+ lower_digits_element: pp.ParserElement = pp.Regex(lower_digits_regexp.pattern)
117
+ upper_digits_element: pp.ParserElement = pp.Regex(upper_digits_regexp.pattern)
118
+ alpha_digits_element: pp.ParserElement = pp.Regex(alpha_digits_regexp.pattern)
119
+ hex_digits_element: pp.ParserElement = pp.Regex(hex_digits_regexp.pattern)
120
+ lower_identifier_element: pp.ParserElement = pp.Regex(lower_identifier_regexp.pattern)
121
+ upper_identifier_element: pp.ParserElement = pp.Regex(upper_identifier_regexp.pattern)
122
+ strict_chars_element: pp.ParserElement = pp.Regex(strict_chars_regexp.pattern)
123
+
124
+ basic_datetime_regexp: re.Pattern[str] = re.compile(r"\d{8}T\d{6}")
125
+ extended_datetime_regexp: re.Pattern[str] = re.compile(r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}")
126
+
127
+ basic_datetime_element: pp.ParserElement = pp.Regex(basic_datetime_regexp.pattern)
128
+ extended_datetime_element: pp.ParserElement = pp.Regex(extended_datetime_regexp.pattern)
129
+
130
+ positive_number_regexp: re.Pattern[str] = re.compile(rf"[1-9]{digit_regexp.pattern}*")
131
+ number_regexp: re.Pattern[str] = re.compile(rf"0|({positive_number_regexp.pattern})")
132
+
133
+ positive_number_element: pp.ParserElement = pp.Regex(positive_number_regexp.pattern)
134
+ number_element: pp.ParserElement = pp.Regex(number_regexp.pattern)
135
+
136
+ snake_case_regexp: re.Pattern[str] = re.compile(
137
+ rf"{lower_digits_regexp.pattern}(?:_{lower_digits_regexp.pattern})*")
138
+ kebab_case_regexp: re.Pattern[str] = re.compile(
139
+ rf"{lower_digits_regexp.pattern}(?:-{lower_digits_regexp.pattern})*")
140
+ dot_case_regexp: re.Pattern[str] = re.compile(
141
+ rf"{lower_digits_regexp.pattern}(?:\.{lower_digits_regexp.pattern})*")
142
+
143
+ snake_case_element: pp.ParserElement = pp.Combine(
144
+ lower_digits_element + (underscore_token + lower_digits_element)[...])
145
+ kebab_case_element: pp.ParserElement = pp.Combine(
146
+ lower_digits_element + (hyphen_token + lower_digits_element)[...])
147
+ dot_case_element: pp.ParserElement = pp.Combine(
148
+ lower_digits_element + (period_token + lower_digits_element)[...])
149
+
150
+ uuid_regexp: re.Pattern[str] = re.compile(
151
+ rf"{hex_digit_regexp.pattern}{{8}}(-{hex_digit_regexp.pattern}{{4}}){{3}}-{hex_digit_regexp.pattern}{{12}}")
152
+ uuid_element: pp.ParserElement = pp.Regex(uuid_regexp.pattern)
153
+
154
+ strict_relpath_regexp: re.Pattern[str] = re.compile(
155
+ rf"(?!.*(^|/)\.+($|/))(?:{strict_chars_regexp.pattern}/)*(?:{strict_chars_regexp.pattern})?")
156
+ strict_abspath_regexp: re.Pattern[str] = re.compile(
157
+ rf"(?!.*(^|/)\.+($|/))/(?:{strict_chars_regexp.pattern}/)*(?:{strict_chars_regexp.pattern})?")
158
+
159
+ strict_path_chars_element = strict_chars_element.copy()
160
+ strict_path_chars_element.add_condition(token_reparse(period_token[1, ...], negate=True),
161
+ message="cannot be pure dots")
162
+
163
+ strict_relpath_element: pp.ParserElement = pp.Combine(
164
+ (strict_path_chars_element + slash_token)[...] + strict_path_chars_element[0, 1])
165
+ strict_abspath_element: pp.ParserElement = pp.Combine(
166
+ slash_token + (strict_path_chars_element + slash_token)[...] + strict_path_chars_element[0, 1])
167
+
168
+ email_address_regexp: re.Pattern[str] = re.compile(
169
+ rf"(({lower_digit_regexp.pattern}|[_-])+)(?:\.({lower_digit_regexp.pattern}|[_-])+)*@(?:{kebab_case_regexp.pattern}\.)+({lower_digit_regexp.pattern}{{2,63}})")
170
+ email_address_element: pp.ParserElement = pp.Regex(email_address_regexp.pattern)
171
+
172
+ semver_regexp: re.Pattern[str] = re.compile(
173
+ rf"({number_regexp.pattern})\.({number_regexp.pattern})\.({number_regexp.pattern})"
174
+ rf"(?:-{alpha_digits_regexp.pattern}(?:\.{alpha_digits_regexp.pattern})*)?"
175
+ rf"(?:\+{alpha_digits_regexp.pattern}(?:\.{alpha_digits_regexp.pattern})*)?")
176
+ semver_element: pp.ParserElement = pp.Regex(semver_regexp.pattern)
177
+
178
+ colon_tag_regexp: re.Pattern[str] = re.compile(rf"{snake_case_regexp.pattern}(?::{snake_case_regexp.pattern})*")
179
+ colon_tag_element: pp.ParserElement = pp.Combine(snake_case_element + (colon_token + snake_case_element)[...])
180
+
181
+ slash_tag_regexp: re.Pattern[str] = re.compile(rf"{snake_case_regexp.pattern}(?:/{snake_case_regexp.pattern})*")
182
+ slash_tag_element: pp.ParserElement = pp.Combine(snake_case_element + (slash_token + snake_case_element)[...])
183
+
184
+ topic_regexp: re.Pattern[str] = re.compile(rf"(?:/{snake_case_regexp.pattern})+")
185
+ topic_element: pp.ParserElement = pp.Combine((slash_token + snake_case_element)[1, ...])
186
+
187
+ vin_code_chars: str = "0123456789ABCDEFGHJKLMNPRSTUVWXYZ"
188
+
189
+ vin_code_regexp: re.Pattern[str] = re.compile(rf"[{vin_code_chars}]{{17}}")
190
+ vin_code_element: pp.ParserElement = pp.Regex(vin_code_regexp.pattern)
191
+
192
+ hex_string_pattern = make_string_pattern(hex_digits_regexp)
193
+ hex_string_parser = make_string_parser(hex_digits_element)
194
+
195
+ snake_case_pattern = make_string_pattern(snake_case_regexp)
196
+ snake_case_parser = make_string_parser(snake_case_element)
197
+ kebab_case_pattern = make_string_pattern(kebab_case_regexp)
198
+ kebab_case_parser = make_string_parser(kebab_case_element)
199
+ dot_case_pattern = make_string_pattern(dot_case_regexp)
200
+ dot_case_parser = make_string_parser(dot_case_element)
201
+
202
+ uuid_pattern = make_string_pattern(uuid_regexp)
203
+ uuid_parser = make_string_parser(uuid_element)
204
+
205
+ strict_relpath_pattern = make_string_pattern(strict_relpath_regexp)
206
+ strict_relpath_parser = make_string_parser(strict_relpath_element)
207
+ strict_abspath_pattern = make_string_pattern(strict_abspath_regexp)
208
+ strict_abspath_parser = make_string_parser(strict_abspath_element)
209
+
210
+ email_address_pattern = make_string_pattern(email_address_regexp)
211
+ email_address_parser = make_string_parser(email_address_element)
212
+
213
+ semver_pattern = make_string_pattern(semver_regexp)
214
+ semver_parser = make_string_parser(semver_element)
215
+
216
+ colon_tag_pattern = make_string_pattern(colon_tag_regexp)
217
+ colon_tag_parser = make_string_parser(colon_tag_element)
218
+ slash_tag_pattern = make_string_pattern(slash_tag_regexp)
219
+ slash_tag_parser = make_string_parser(slash_tag_element)
220
+ topic_pattern = make_string_pattern(topic_regexp)
221
+ topic_parser = make_string_parser(topic_element)
222
+ vin_code_pattern = make_string_pattern(vin_code_regexp)
223
+ vin_code_parser = make_string_parser(vin_code_element)
224
+
225
+
226
+ def to_colon_tag(tag: str) -> str:
227
+ return tag.replace("/", ":")
228
+
229
+
230
+ def to_slash_tag(tag: str) -> str:
231
+ return tag.replace(":", "/")
232
+
233
+
234
+ @dataclasses.dataclass(frozen=True, eq=True)
235
+ class UserName(object):
236
+ first_name: str
237
+ last_name: str
238
+ sn: int = 0
239
+
240
+ def __str__(self) -> str:
241
+ if self.sn == 0:
242
+ return f"{self.first_name}.{self.last_name}"
243
+ return f"{self.first_name}{self.sn}.{self.last_name}"
244
+
245
+
246
+ @dataclasses.dataclass(frozen=True, eq=True)
247
+ class UserEmail(object):
248
+ user_name: UserName
249
+ domain: str
250
+
251
+ def __str__(self) -> str:
252
+ return f"{str(self.user_name)}@{self.domain}"
253
+
254
+
255
+ @dataclasses.dataclass(frozen=True, eq=True)
256
+ class VehicleName(object):
257
+ brand: str
258
+ alias: str
259
+ code: str | None = None
260
+ vin: str | None = None
261
+
262
+ def __str__(self) -> str:
263
+ if self.code and self.vin:
264
+ return f"{self.brand}_{self.alias}_{self.code}_V{self.vin}"
265
+ if self.code:
266
+ return f"{self.brand}_{self.alias}_{self.code}"
267
+ if self.vin:
268
+ return f"{self.brand}_{self.alias}_V{self.vin}"
269
+ return f"{self.brand}_{self.alias}"
270
+
271
+
272
+ @dataclasses.dataclass(frozen=True, eq=True)
273
+ class BagName(object):
274
+ vehicle_name: VehicleName
275
+ record_dt: datetime.datetime
276
+ record_sn: int
277
+
278
+ def __str__(self) -> str:
279
+ return f"{dt_format(self.record_dt, basic_format())}-{self.vehicle_name}-{self.record_sn}.bag"
280
+
281
+
282
+ @singleton
283
+ def get_user_name_element() -> pp.ParserElement:
284
+ return pp.Combine(lowers_element("first_name") +
285
+ positive_number_element("sn")[0, 1] +
286
+ period_token +
287
+ lowers_element("last_name"))
288
+
289
+
290
+ @singleton
291
+ def get_user_name_parser() -> pp.ParserElement:
292
+ return make_string_parser(get_user_name_element())
293
+
294
+
295
+ @singleton
296
+ def get_user_email_element() -> pp.ParserElement:
297
+ return pp.Combine(get_user_name_element()("user_name") +
298
+ pp.Char("@") +
299
+ pp.Combine((kebab_case_element + period_token)[1, ...] + lowers_element)("domain"))
300
+
301
+
302
+ @singleton
303
+ def get_user_email_parser() -> pp.ParserElement:
304
+ return make_string_parser(get_user_email_element())
305
+
306
+
307
+ @singleton
308
+ def get_vehicle_name_element() -> pp.ParserElement:
309
+ return pp.Combine(lower_identifier_element("brand") +
310
+ (underscore_token +
311
+ pp.Combine(lower_identifier_element +
312
+ (underscore_token + lower_identifier_element)[...])("alias")) +
313
+ (underscore_token + digits_element("code"))[0, 1] +
314
+ (underscore_token + pp.Char("V") + vin_code_element("vin"))[0, 1])
315
+
316
+
317
+ @singleton
318
+ def get_vehicle_name_parser() -> pp.ParserElement:
319
+ return make_string_parser(get_vehicle_name_element())
320
+
321
+
322
+ @singleton
323
+ def get_bag_name_element() -> pp.ParserElement:
324
+ return pp.Combine(basic_datetime_element("record_dt") +
325
+ (hyphen_token +
326
+ get_vehicle_name_element()("vehicle_name")) +
327
+ (hyphen_token + number_element("record_sn")) +
328
+ (period_token + pp.Literal("bag"))[0, 1])
329
+
330
+
331
+ @singleton
332
+ def get_bag_name_parser() -> pp.ParserElement:
333
+ return make_string_parser(get_bag_name_element())
334
+
335
+
336
+ def parse_user_name(s: str) -> UserName | None:
337
+ user_name_match = get_user_name_parser().parse_string(s, parse_all=True)
338
+
339
+ return UserName(
340
+ user_name_match.get("first_name"),
341
+ user_name_match.get("last_name"),
342
+ int(user_name_match.get("sn", 0)),
343
+ )
344
+
345
+
346
+ def parse_user_email(s: str) -> UserEmail | None:
347
+ user_email_match = get_user_email_parser().parse_string(s, parse_all=True)
348
+
349
+ return UserEmail(
350
+ UserName(
351
+ user_email_match.get("user_name").get("first_name"),
352
+ user_email_match.get("user_name").get("last_name"),
353
+ int(user_email_match.get("user_name").get("sn", 0)),
354
+ ),
355
+ user_email_match.get("domain"),
356
+ )
357
+
358
+
359
+ def parse_vehicle_name(s: str) -> VehicleName | None:
360
+ vehicle_name_match = get_vehicle_name_parser().parse_string(s, parse_all=True)
361
+
362
+ return VehicleName(
363
+ vehicle_name_match.get("brand"),
364
+ vehicle_name_match.get("alias"),
365
+ vehicle_name_match.get("code"),
366
+ vehicle_name_match.get("vin"),
367
+ )
368
+
369
+
370
+ def parse_bag_name(s: str) -> BagName | None:
371
+ bag_name_match = get_bag_name_parser().parse_string(s, parse_all=True)
372
+
373
+ return BagName(
374
+ VehicleName(
375
+ bag_name_match.get("vehicle_name").get("brand"),
376
+ bag_name_match.get("vehicle_name").get("alias"),
377
+ bag_name_match.get("vehicle_name").get("code"),
378
+ bag_name_match.get("vehicle_name").get("vin"),
379
+ ),
380
+ dt_parse(bag_name_match.get("record_dt"), basic_format()),
381
+ int(bag_name_match.get("record_sn")),
382
+ )
@@ -0,0 +1,49 @@
1
+ import uuid as py_uuid
2
+
3
+ import textcase
4
+ from iker.common.utils.jsonutils import JsonType
5
+ from iker.common.utils.jsonutils import json_difference, json_reformat
6
+
7
+ from plexus.common.utils.datautils import compute_vin_code_check_digit
8
+
9
+ __all__ = [
10
+ "generate_dummy_uuid_str",
11
+ "generate_dummy_uuid",
12
+ "generate_dummy_vin_code",
13
+ "case_insensitive_json_compare",
14
+ ]
15
+
16
+
17
+ def generate_dummy_uuid_str(*nums: int) -> str:
18
+ if len(nums) > 8:
19
+ raise ValueError("a maximum of 8 integers can be provided")
20
+ if not all(0 <= num <= 0xFFFF for num in nums):
21
+ raise ValueError("all integers must be in the range 0 to 65535 (0xFFFF)")
22
+ i0, i1, i2, i3, i4, i5, i6, i7 = list(nums) + [0] * (8 - len(nums))
23
+ return f"{i0:04x}{i1:04x}-{i2:04x}-{i3:04x}-{i4:04x}-{i5:04x}{i6:04x}{i7:04x}"
24
+
25
+
26
+ def generate_dummy_uuid(*nums: int) -> py_uuid.UUID:
27
+ return py_uuid.UUID(generate_dummy_uuid_str(*nums))
28
+
29
+
30
+ def generate_dummy_vin_code(*nums: int) -> str:
31
+ if len(nums) > 4:
32
+ raise ValueError("a maximum of 4 integers can be provided")
33
+ if not all(0 <= num <= 9999 for num in nums):
34
+ raise ValueError("all integers must be in the range 0 to 9999")
35
+ i0, i1, i2, i3 = list(nums) + [0] * (4 - len(nums))
36
+ unchecked_vin_code = f"{i0:04d}{i1:04d}0{i2:04d}{i3:04d}"
37
+ check_digit = compute_vin_code_check_digit(unchecked_vin_code)
38
+ return unchecked_vin_code[:8] + check_digit + unchecked_vin_code[-8:]
39
+
40
+
41
+ def case_insensitive_json_compare(a: JsonType, b: JsonType, *, print_diff_messages: bool = True) -> bool:
42
+ identical = True
43
+ for node_path, diff_message in json_difference(json_reformat(a, key_formatter=textcase.camel),
44
+ json_reformat(b, key_formatter=textcase.camel),
45
+ []):
46
+ if print_diff_messages:
47
+ print(node_path, diff_message)
48
+ identical = False
49
+ return identical