omlish 0.0.0.dev309__py3-none-any.whl → 0.0.0.dev311__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- omlish/__about__.py +2 -2
- omlish/dataclasses/api/classes/metadata.py +2 -1
- omlish/formats/json5/_antlr/Json5Lexer.py +1 -1
- omlish/formats/json5/_antlr/Json5Listener.py +1 -1
- omlish/formats/json5/_antlr/Json5Parser.py +1 -1
- omlish/formats/json5/_antlr/Json5Visitor.py +1 -1
- omlish/formats/json5/parsing.py +1 -1
- omlish/formats/toml/parser.py +101 -0
- omlish/inject/__init__.py +1 -0
- omlish/inject/utils.py +11 -0
- omlish/lang/__init__.py +1 -0
- omlish/lang/iterables.py +15 -2
- omlish/libc.py +8 -5
- omlish/specs/proto/_antlr/Protobuf3Lexer.py +1 -1
- omlish/specs/proto/_antlr/Protobuf3Listener.py +1 -1
- omlish/specs/proto/_antlr/Protobuf3Parser.py +1 -1
- omlish/specs/proto/_antlr/Protobuf3Visitor.py +1 -1
- omlish/specs/proto/parsing.py +2 -2
- omlish/sql/parsing/_antlr/MinisqlLexer.py +1 -1
- omlish/sql/parsing/_antlr/MinisqlListener.py +1 -1
- omlish/sql/parsing/_antlr/MinisqlParser.py +1 -1
- omlish/sql/parsing/_antlr/MinisqlVisitor.py +1 -1
- omlish/sql/parsing/parsing.py +3 -3
- omlish/term/coloring.py +108 -0
- omlish/{antlr → text/antlr}/delimit.py +4 -1
- omlish/{antlr → text/antlr}/dot.py +4 -1
- omlish/{antlr → text/antlr}/errors.py +3 -0
- omlish/{antlr → text/antlr}/parsing.py +4 -1
- {omlish-0.0.0.dev309.dist-info → omlish-0.0.0.dev311.dist-info}/METADATA +1 -1
- {omlish-0.0.0.dev309.dist-info → omlish-0.0.0.dev311.dist-info}/RECORD +98 -97
- /omlish/{antlr → text/antlr}/__init__.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/BufferedTokenStream.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/CommonTokenFactory.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/CommonTokenStream.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/FileStream.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/InputStream.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/IntervalSet.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/LICENSE.txt +0 -0
- /omlish/{antlr → text/antlr}/_runtime/LL1Analyzer.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/Lexer.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/ListTokenSource.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/Parser.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/ParserInterpreter.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/ParserRuleContext.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/PredictionContext.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/Recognizer.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/RuleContext.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/StdinStream.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/Token.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/TokenStreamRewriter.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/Utils.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/__init__.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/_all.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/_pygrun.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/atn/ATN.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/atn/ATNConfig.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/atn/ATNConfigSet.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/atn/ATNDeserializationOptions.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/atn/ATNDeserializer.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/atn/ATNSimulator.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/atn/ATNState.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/atn/ATNType.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/atn/LexerATNSimulator.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/atn/LexerAction.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/atn/LexerActionExecutor.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/atn/ParserATNSimulator.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/atn/PredictionMode.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/atn/SemanticContext.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/atn/Transition.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/atn/__init__.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/dfa/DFA.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/dfa/DFASerializer.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/dfa/DFAState.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/dfa/__init__.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/error/DiagnosticErrorListener.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/error/ErrorListener.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/error/ErrorStrategy.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/error/Errors.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/error/__init__.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/tree/Chunk.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/tree/ParseTreeMatch.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/tree/ParseTreePattern.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/tree/ParseTreePatternMatcher.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/tree/RuleTagToken.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/tree/TokenTagToken.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/tree/Tree.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/tree/Trees.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/tree/__init__.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/xpath/XPath.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/xpath/XPathLexer.py +0 -0
- /omlish/{antlr → text/antlr}/_runtime/xpath/__init__.py +0 -0
- /omlish/{antlr → text/antlr}/input.py +0 -0
- /omlish/{antlr → text/antlr}/runtime.py +0 -0
- /omlish/{antlr → text/antlr}/utils.py +0 -0
- {omlish-0.0.0.dev309.dist-info → omlish-0.0.0.dev311.dist-info}/WHEEL +0 -0
- {omlish-0.0.0.dev309.dist-info → omlish-0.0.0.dev311.dist-info}/entry_points.txt +0 -0
- {omlish-0.0.0.dev309.dist-info → omlish-0.0.0.dev311.dist-info}/licenses/LICENSE +0 -0
- {omlish-0.0.0.dev309.dist-info → omlish-0.0.0.dev311.dist-info}/top_level.txt +0 -0
omlish/__about__.py
CHANGED
@@ -18,7 +18,8 @@ METADATA_ATTR = '__dataclass_metadata__'
|
|
18
18
|
|
19
19
|
def _get_cls_metadata_dct(cls: type) -> dict:
|
20
20
|
check.isinstance(cls, type)
|
21
|
-
|
21
|
+
if is_immediate_dataclass(cls):
|
22
|
+
raise TypeError(f'Cannot alter dataclass metadata on already processed class {cls!r}')
|
22
23
|
try:
|
23
24
|
return cls.__dict__[METADATA_ATTR]
|
24
25
|
except KeyError:
|
omlish/formats/json5/parsing.py
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
# ruff: noqa: N802 N803
|
2
2
|
import typing as ta
|
3
3
|
|
4
|
-
from ... import antlr
|
4
|
+
from ...text import antlr
|
5
5
|
from ._antlr.Json5Lexer import Json5Lexer # type: ignore
|
6
6
|
from ._antlr.Json5Parser import Json5Parser # type: ignore
|
7
7
|
from ._antlr.Json5Visitor import Json5Visitor # type: ignore
|
omlish/formats/toml/parser.py
CHANGED
@@ -156,29 +156,37 @@ class TomlFlags:
|
|
156
156
|
def set(self, key: TomlKey, flag: int, *, recursive: bool) -> None: # noqa: A003
|
157
157
|
cont = self._flags
|
158
158
|
key_parent, key_stem = key[:-1], key[-1]
|
159
|
+
|
159
160
|
for k in key_parent:
|
160
161
|
if k not in cont:
|
161
162
|
cont[k] = {'flags': set(), 'recursive_flags': set(), 'nested': {}}
|
162
163
|
cont = cont[k]['nested']
|
164
|
+
|
163
165
|
if key_stem not in cont:
|
164
166
|
cont[key_stem] = {'flags': set(), 'recursive_flags': set(), 'nested': {}}
|
167
|
+
|
165
168
|
cont[key_stem]['recursive_flags' if recursive else 'flags'].add(flag)
|
166
169
|
|
167
170
|
def is_(self, key: TomlKey, flag: int) -> bool:
|
168
171
|
if not key:
|
169
172
|
return False # document root has no flags
|
173
|
+
|
170
174
|
cont = self._flags
|
171
175
|
for k in key[:-1]:
|
172
176
|
if k not in cont:
|
173
177
|
return False
|
178
|
+
|
174
179
|
inner_cont = cont[k]
|
175
180
|
if flag in inner_cont['recursive_flags']:
|
176
181
|
return True
|
182
|
+
|
177
183
|
cont = inner_cont['nested']
|
184
|
+
|
178
185
|
key_stem = key[-1]
|
179
186
|
if key_stem in cont:
|
180
187
|
cont = cont[key_stem]
|
181
188
|
return flag in cont['flags'] or flag in cont['recursive_flags']
|
189
|
+
|
182
190
|
return False
|
183
191
|
|
184
192
|
|
@@ -196,24 +204,31 @@ class TomlNestedDict:
|
|
196
204
|
access_lists: bool = True,
|
197
205
|
) -> dict:
|
198
206
|
cont: ta.Any = self.dict
|
207
|
+
|
199
208
|
for k in key:
|
200
209
|
if k not in cont:
|
201
210
|
cont[k] = {}
|
211
|
+
|
202
212
|
cont = cont[k]
|
213
|
+
|
203
214
|
if access_lists and isinstance(cont, list):
|
204
215
|
cont = cont[-1]
|
216
|
+
|
205
217
|
if not isinstance(cont, dict):
|
206
218
|
raise KeyError('There is no nest behind this key')
|
219
|
+
|
207
220
|
return cont
|
208
221
|
|
209
222
|
def append_nest_to_list(self, key: TomlKey) -> None:
|
210
223
|
cont = self.get_or_create_nest(key[:-1])
|
224
|
+
|
211
225
|
last_key = key[-1]
|
212
226
|
if last_key in cont:
|
213
227
|
list_ = cont[last_key]
|
214
228
|
if not isinstance(list_, list):
|
215
229
|
raise KeyError('An object other than list found behind this key')
|
216
230
|
list_.append({})
|
231
|
+
|
217
232
|
else:
|
218
233
|
cont[last_key] = [{}]
|
219
234
|
|
@@ -283,23 +298,30 @@ class TomlParser:
|
|
283
298
|
char = self.src[self.pos]
|
284
299
|
except IndexError:
|
285
300
|
break
|
301
|
+
|
286
302
|
if char == '\n':
|
287
303
|
self.pos += 1
|
288
304
|
continue
|
305
|
+
|
289
306
|
if char in self.KEY_INITIAL_CHARS:
|
290
307
|
self.key_value_rule(header)
|
291
308
|
self.skip_chars(self.WS)
|
309
|
+
|
292
310
|
elif char == '[':
|
293
311
|
try:
|
294
312
|
second_char: ta.Optional[str] = self.src[self.pos + 1]
|
295
313
|
except IndexError:
|
296
314
|
second_char = None
|
315
|
+
|
297
316
|
self.flags.finalize_pending()
|
317
|
+
|
298
318
|
if second_char == '[':
|
299
319
|
header = self.create_list_rule()
|
300
320
|
else:
|
301
321
|
header = self.create_dict_rule()
|
322
|
+
|
302
323
|
self.skip_chars(self.WS)
|
324
|
+
|
303
325
|
elif char != '#':
|
304
326
|
raise self.suffixed_err('Invalid statement')
|
305
327
|
|
@@ -311,8 +333,10 @@ class TomlParser:
|
|
311
333
|
char = self.src[self.pos]
|
312
334
|
except IndexError:
|
313
335
|
break
|
336
|
+
|
314
337
|
if char != '\n':
|
315
338
|
raise self.suffixed_err('Expected newline or end of document after a statement')
|
339
|
+
|
316
340
|
self.pos += 1
|
317
341
|
|
318
342
|
return self.data.dict
|
@@ -341,7 +365,9 @@ class TomlParser:
|
|
341
365
|
if not error_on.isdisjoint(self.src[self.pos:new_pos]):
|
342
366
|
while self.src[self.pos] not in error_on:
|
343
367
|
self.pos += 1
|
368
|
+
|
344
369
|
raise self.suffixed_err(f'Found invalid character {self.src[self.pos]!r}')
|
370
|
+
|
345
371
|
self.pos = new_pos
|
346
372
|
|
347
373
|
def skip_comment(self) -> None:
|
@@ -349,6 +375,7 @@ class TomlParser:
|
|
349
375
|
char: ta.Optional[str] = self.src[self.pos]
|
350
376
|
except IndexError:
|
351
377
|
char = None
|
378
|
+
|
352
379
|
if char == '#':
|
353
380
|
self.pos += 1
|
354
381
|
self.skip_until(
|
@@ -372,7 +399,9 @@ class TomlParser:
|
|
372
399
|
|
373
400
|
if self.flags.is_(key, TomlFlags.EXPLICIT_NEST) or self.flags.is_(key, TomlFlags.FROZEN):
|
374
401
|
raise self.suffixed_err(f'Cannot declare {key} twice')
|
402
|
+
|
375
403
|
self.flags.set(key, TomlFlags.EXPLICIT_NEST, recursive=False)
|
404
|
+
|
376
405
|
try:
|
377
406
|
self.data.get_or_create_nest(key)
|
378
407
|
except KeyError:
|
@@ -380,20 +409,25 @@ class TomlParser:
|
|
380
409
|
|
381
410
|
if not self.src.startswith(']', self.pos):
|
382
411
|
raise self.suffixed_err("Expected ']' at the end of a table declaration")
|
412
|
+
|
383
413
|
self.pos += 1
|
384
414
|
return key
|
385
415
|
|
386
416
|
def create_list_rule(self) -> TomlKey:
|
387
417
|
self.pos += 2 # Skip "[["
|
388
418
|
self.skip_chars(self.WS)
|
419
|
+
|
389
420
|
key = self.parse_key()
|
390
421
|
|
391
422
|
if self.flags.is_(key, TomlFlags.FROZEN):
|
392
423
|
raise self.suffixed_err(f'Cannot mutate immutable namespace {key}')
|
424
|
+
|
393
425
|
# Free the namespace now that it points to another empty list item...
|
394
426
|
self.flags.unset_all(key)
|
427
|
+
|
395
428
|
# ...but this key precisely is still prohibited from table declaration
|
396
429
|
self.flags.set(key, TomlFlags.EXPLICIT_NEST, recursive=False)
|
430
|
+
|
397
431
|
try:
|
398
432
|
self.data.append_nest_to_list(key)
|
399
433
|
except KeyError:
|
@@ -401,6 +435,7 @@ class TomlParser:
|
|
401
435
|
|
402
436
|
if not self.src.startswith(']]', self.pos):
|
403
437
|
raise self.suffixed_err("Expected ']]' at the end of an array declaration")
|
438
|
+
|
404
439
|
self.pos += 2
|
405
440
|
return key
|
406
441
|
|
@@ -414,6 +449,7 @@ class TomlParser:
|
|
414
449
|
# Check that dotted key syntax does not redefine an existing table
|
415
450
|
if self.flags.is_(cont_key, TomlFlags.EXPLICIT_NEST):
|
416
451
|
raise self.suffixed_err(f'Cannot redefine namespace {cont_key}')
|
452
|
+
|
417
453
|
# Containers in the relative path can't be opened with the table syntax or dotted key/value syntax in
|
418
454
|
# following table sections.
|
419
455
|
self.flags.add_pending(cont_key, TomlFlags.EXPLICIT_NEST)
|
@@ -425,41 +461,54 @@ class TomlParser:
|
|
425
461
|
nest = self.data.get_or_create_nest(abs_key_parent)
|
426
462
|
except KeyError:
|
427
463
|
raise self.suffixed_err('Cannot overwrite a value') from None
|
464
|
+
|
428
465
|
if key_stem in nest:
|
429
466
|
raise self.suffixed_err('Cannot overwrite a value')
|
467
|
+
|
430
468
|
# Mark inline table and array namespaces recursively immutable
|
431
469
|
if isinstance(value, (dict, list)):
|
432
470
|
self.flags.set(header + key, TomlFlags.FROZEN, recursive=True)
|
471
|
+
|
433
472
|
nest[key_stem] = value
|
434
473
|
|
435
474
|
def parse_key_value_pair(self) -> ta.Tuple[TomlKey, ta.Any]:
|
436
475
|
key = self.parse_key()
|
476
|
+
|
437
477
|
try:
|
438
478
|
char: ta.Optional[str] = self.src[self.pos]
|
439
479
|
except IndexError:
|
440
480
|
char = None
|
481
|
+
|
441
482
|
if char != '=':
|
442
483
|
raise self.suffixed_err("Expected '=' after a key in a key/value pair")
|
484
|
+
|
443
485
|
self.pos += 1
|
444
486
|
self.skip_chars(self.WS)
|
487
|
+
|
445
488
|
value = self.parse_value()
|
446
489
|
return key, value
|
447
490
|
|
448
491
|
def parse_key(self) -> TomlKey:
|
449
492
|
key_part = self.parse_key_part()
|
450
493
|
key: TomlKey = (key_part,)
|
494
|
+
|
451
495
|
self.skip_chars(self.WS)
|
496
|
+
|
452
497
|
while True:
|
453
498
|
try:
|
454
499
|
char: ta.Optional[str] = self.src[self.pos]
|
455
500
|
except IndexError:
|
456
501
|
char = None
|
502
|
+
|
457
503
|
if char != '.':
|
458
504
|
return key
|
505
|
+
|
459
506
|
self.pos += 1
|
460
507
|
self.skip_chars(self.WS)
|
508
|
+
|
461
509
|
key_part = self.parse_key_part()
|
462
510
|
key += (key_part,)
|
511
|
+
|
463
512
|
self.skip_chars(self.WS)
|
464
513
|
|
465
514
|
def parse_key_part(self) -> str:
|
@@ -467,14 +516,18 @@ class TomlParser:
|
|
467
516
|
char: ta.Optional[str] = self.src[self.pos]
|
468
517
|
except IndexError:
|
469
518
|
char = None
|
519
|
+
|
470
520
|
if char in self.BARE_KEY_CHARS:
|
471
521
|
start_pos = self.pos
|
472
522
|
self.skip_chars(self.BARE_KEY_CHARS)
|
473
523
|
return self.src[start_pos:self.pos]
|
524
|
+
|
474
525
|
if char == "'":
|
475
526
|
return self.parse_literal_str()
|
527
|
+
|
476
528
|
if char == '"':
|
477
529
|
return self.parse_one_line_basic_str()
|
530
|
+
|
478
531
|
raise self.suffixed_err('Invalid initial character for a key part')
|
479
532
|
|
480
533
|
def parse_one_line_basic_str(self) -> str:
|
@@ -489,6 +542,7 @@ class TomlParser:
|
|
489
542
|
if self.src.startswith(']', self.pos):
|
490
543
|
self.pos += 1
|
491
544
|
return array
|
545
|
+
|
492
546
|
while True:
|
493
547
|
val = self.parse_value()
|
494
548
|
array.append(val)
|
@@ -498,11 +552,14 @@ class TomlParser:
|
|
498
552
|
if c == ']':
|
499
553
|
self.pos += 1
|
500
554
|
return array
|
555
|
+
|
501
556
|
if c != ',':
|
502
557
|
raise self.suffixed_err('Unclosed array')
|
558
|
+
|
503
559
|
self.pos += 1
|
504
560
|
|
505
561
|
self.skip_comments_and_array_ws()
|
562
|
+
|
506
563
|
if self.src.startswith(']', self.pos):
|
507
564
|
self.pos += 1
|
508
565
|
return array
|
@@ -513,54 +570,72 @@ class TomlParser:
|
|
513
570
|
flags = TomlFlags()
|
514
571
|
|
515
572
|
self.skip_chars(self.WS)
|
573
|
+
|
516
574
|
if self.src.startswith('}', self.pos):
|
517
575
|
self.pos += 1
|
518
576
|
return nested_dict.dict
|
577
|
+
|
519
578
|
while True:
|
520
579
|
key, value = self.parse_key_value_pair()
|
521
580
|
key_parent, key_stem = key[:-1], key[-1]
|
581
|
+
|
522
582
|
if flags.is_(key, TomlFlags.FROZEN):
|
523
583
|
raise self.suffixed_err(f'Cannot mutate immutable namespace {key}')
|
584
|
+
|
524
585
|
try:
|
525
586
|
nest = nested_dict.get_or_create_nest(key_parent, access_lists=False)
|
526
587
|
except KeyError:
|
527
588
|
raise self.suffixed_err('Cannot overwrite a value') from None
|
589
|
+
|
528
590
|
if key_stem in nest:
|
529
591
|
raise self.suffixed_err(f'Duplicate inline table key {key_stem!r}')
|
592
|
+
|
530
593
|
nest[key_stem] = value
|
531
594
|
self.skip_chars(self.WS)
|
595
|
+
|
532
596
|
c = self.src[self.pos:self.pos + 1]
|
533
597
|
if c == '}':
|
534
598
|
self.pos += 1
|
535
599
|
return nested_dict.dict
|
600
|
+
|
536
601
|
if c != ',':
|
537
602
|
raise self.suffixed_err('Unclosed inline table')
|
603
|
+
|
538
604
|
if isinstance(value, (dict, list)):
|
539
605
|
flags.set(key, TomlFlags.FROZEN, recursive=True)
|
606
|
+
|
540
607
|
self.pos += 1
|
541
608
|
self.skip_chars(self.WS)
|
542
609
|
|
543
610
|
def parse_basic_str_escape(self, multiline: bool = False) -> str:
|
544
611
|
escape_id = self.src[self.pos:self.pos + 2]
|
545
612
|
self.pos += 2
|
613
|
+
|
546
614
|
if multiline and escape_id in {'\\ ', '\\\t', '\\\n'}:
|
547
615
|
# Skip whitespace until next non-whitespace character or end of the doc. Error if non-whitespace is found
|
548
616
|
# before newline.
|
549
617
|
if escape_id != '\\\n':
|
550
618
|
self.skip_chars(self.WS)
|
619
|
+
|
551
620
|
try:
|
552
621
|
char = self.src[self.pos]
|
553
622
|
except IndexError:
|
554
623
|
return ''
|
624
|
+
|
555
625
|
if char != '\n':
|
556
626
|
raise self.suffixed_err("Unescaped '\\' in a string")
|
627
|
+
|
557
628
|
self.pos += 1
|
629
|
+
|
558
630
|
self.skip_chars(self.WS_AND_NEWLINE)
|
559
631
|
return ''
|
632
|
+
|
560
633
|
if escape_id == '\\u':
|
561
634
|
return self.parse_hex_char(4)
|
635
|
+
|
562
636
|
if escape_id == '\\U':
|
563
637
|
return self.parse_hex_char(8)
|
638
|
+
|
564
639
|
try:
|
565
640
|
return self.BASIC_STR_ESCAPE_REPLACEMENTS[escape_id]
|
566
641
|
except KeyError:
|
@@ -575,12 +650,16 @@ class TomlParser:
|
|
575
650
|
|
576
651
|
def parse_hex_char(self, hex_len: int) -> str:
|
577
652
|
hex_str = self.src[self.pos:self.pos + hex_len]
|
653
|
+
|
578
654
|
if len(hex_str) != hex_len or not self.HEXDIGIT_CHARS.issuperset(hex_str):
|
579
655
|
raise self.suffixed_err('Invalid hex value')
|
656
|
+
|
580
657
|
self.pos += hex_len
|
581
658
|
hex_int = int(hex_str, 16)
|
659
|
+
|
582
660
|
if not self.is_unicode_scalar_value(hex_int):
|
583
661
|
raise self.suffixed_err('Escaped character is not a Unicode scalar value')
|
662
|
+
|
584
663
|
return chr(hex_int)
|
585
664
|
|
586
665
|
def parse_literal_str(self) -> str:
|
@@ -606,6 +685,7 @@ class TomlParser:
|
|
606
685
|
)
|
607
686
|
result = self.src[start_pos:self.pos]
|
608
687
|
self.pos += 3
|
688
|
+
|
609
689
|
else:
|
610
690
|
delim = '"'
|
611
691
|
result = self.parse_basic_str(multiline=True)
|
@@ -613,9 +693,11 @@ class TomlParser:
|
|
613
693
|
# Add at maximum two extra apostrophes/quotes if the end sequence is 4 or 5 chars long instead of just 3.
|
614
694
|
if not self.src.startswith(delim, self.pos):
|
615
695
|
return result
|
696
|
+
|
616
697
|
self.pos += 1
|
617
698
|
if not self.src.startswith(delim, self.pos):
|
618
699
|
return result + delim
|
700
|
+
|
619
701
|
self.pos += 1
|
620
702
|
return result + (delim * 2)
|
621
703
|
|
@@ -626,6 +708,7 @@ class TomlParser:
|
|
626
708
|
else:
|
627
709
|
error_on = self.ILLEGAL_BASIC_STR_CHARS
|
628
710
|
parse_escapes = self.parse_basic_str_escape
|
711
|
+
|
629
712
|
result = ''
|
630
713
|
start_pos = self.pos
|
631
714
|
while True:
|
@@ -633,25 +716,31 @@ class TomlParser:
|
|
633
716
|
char = self.src[self.pos]
|
634
717
|
except IndexError:
|
635
718
|
raise self.suffixed_err('Unterminated string') from None
|
719
|
+
|
636
720
|
if char == '"':
|
637
721
|
if not multiline:
|
638
722
|
end_pos = self.pos
|
639
723
|
self.pos += 1
|
640
724
|
return result + self.src[start_pos:end_pos]
|
725
|
+
|
641
726
|
if self.src.startswith('"""', self.pos):
|
642
727
|
end_pos = self.pos
|
643
728
|
self.pos += 3
|
644
729
|
return result + self.src[start_pos:end_pos]
|
730
|
+
|
645
731
|
self.pos += 1
|
646
732
|
continue
|
733
|
+
|
647
734
|
if char == '\\':
|
648
735
|
result += self.src[start_pos:self.pos]
|
649
736
|
parsed_escape = parse_escapes()
|
650
737
|
result += parsed_escape
|
651
738
|
start_pos = self.pos
|
652
739
|
continue
|
740
|
+
|
653
741
|
if char in error_on:
|
654
742
|
raise self.suffixed_err(f'Illegal character {char!r}')
|
743
|
+
|
655
744
|
self.pos += 1
|
656
745
|
|
657
746
|
def parse_value(self) -> ta.Any: # noqa: C901
|
@@ -679,6 +768,7 @@ class TomlParser:
|
|
679
768
|
if self.src.startswith('true', self.pos):
|
680
769
|
self.pos += 4
|
681
770
|
return True
|
771
|
+
|
682
772
|
if char == 'f':
|
683
773
|
if self.src.startswith('false', self.pos):
|
684
774
|
self.pos += 5
|
@@ -699,8 +789,10 @@ class TomlParser:
|
|
699
789
|
datetime_obj = self.match_to_datetime(datetime_match)
|
700
790
|
except ValueError as e:
|
701
791
|
raise self.suffixed_err('Invalid date or datetime') from e
|
792
|
+
|
702
793
|
self.pos = datetime_match.end()
|
703
794
|
return datetime_obj
|
795
|
+
|
704
796
|
localtime_match = self.RE_LOCALTIME.match(self.src, self.pos)
|
705
797
|
if localtime_match:
|
706
798
|
self.pos = localtime_match.end()
|
@@ -718,6 +810,7 @@ class TomlParser:
|
|
718
810
|
if first_three in {'inf', 'nan'}:
|
719
811
|
self.pos += 3
|
720
812
|
return self.parse_float(first_three)
|
813
|
+
|
721
814
|
first_four = self.src[self.pos:self.pos + 4]
|
722
815
|
if first_four in {'-inf', '+inf', '-nan', '+nan'}:
|
723
816
|
self.pos += 4
|
@@ -728,11 +821,13 @@ class TomlParser:
|
|
728
821
|
def coord_repr(self, pos: TomlPos) -> str:
|
729
822
|
if pos >= len(self.src):
|
730
823
|
return 'end of document'
|
824
|
+
|
731
825
|
line = self.src.count('\n', 0, pos) + 1
|
732
826
|
if line == 1:
|
733
827
|
column = pos + 1
|
734
828
|
else:
|
735
829
|
column = pos - self.src.rindex('\n', 0, pos)
|
830
|
+
|
736
831
|
return f'line {line}, column {column}'
|
737
832
|
|
738
833
|
def suffixed_err(self, msg: str, *, pos: ta.Optional[TomlPos] = None) -> TomlDecodeError:
|
@@ -799,11 +894,16 @@ class TomlParser:
|
|
799
894
|
offset_hour_str,
|
800
895
|
offset_minute_str,
|
801
896
|
) = match.groups()
|
897
|
+
|
802
898
|
year, month, day = int(year_str), int(month_str), int(day_str)
|
899
|
+
|
803
900
|
if hour_str is None:
|
804
901
|
return datetime.date(year, month, day)
|
902
|
+
|
805
903
|
hour, minute, sec = int(hour_str), int(minute_str), int(sec_str)
|
904
|
+
|
806
905
|
micros = int(micros_str.ljust(6, '0')) if micros_str else 0
|
906
|
+
|
807
907
|
if offset_sign_str:
|
808
908
|
tz: ta.Optional[datetime.tzinfo] = toml_cached_tz(
|
809
909
|
offset_hour_str, offset_minute_str, offset_sign_str,
|
@@ -812,6 +912,7 @@ class TomlParser:
|
|
812
912
|
tz = datetime.UTC
|
813
913
|
else: # local date-time
|
814
914
|
tz = None
|
915
|
+
|
815
916
|
return datetime.datetime(year, month, day, hour, minute, sec, micros, tzinfo=tz)
|
816
917
|
|
817
918
|
@classmethod
|
omlish/inject/__init__.py
CHANGED
omlish/inject/utils.py
CHANGED
@@ -19,3 +19,14 @@ class ConstFn(HasOriginsImpl, lang.Final, ta.Generic[T]):
|
|
19
19
|
|
20
20
|
def __call__(self) -> T:
|
21
21
|
return self.v
|
22
|
+
|
23
|
+
|
24
|
+
##
|
25
|
+
|
26
|
+
|
27
|
+
@dc.dataclass(frozen=True)
|
28
|
+
@dc.extra_class_params(terse_repr=True)
|
29
|
+
class Id:
|
30
|
+
"""A utility dataclass intended to be used as a key tag for disambiguation."""
|
31
|
+
|
32
|
+
v: int
|
omlish/lang/__init__.py
CHANGED
omlish/lang/iterables.py
CHANGED
@@ -40,6 +40,17 @@ def interleave(vs: ta.Iterable[T], d: T) -> ta.Iterable[T]:
|
|
40
40
|
yield v
|
41
41
|
|
42
42
|
|
43
|
+
def renumerate(it: ta.Iterable[T]) -> ta.Iterable[tuple[T, int]]:
|
44
|
+
return ((e, i) for i, e in enumerate(it))
|
45
|
+
|
46
|
+
|
47
|
+
def common_prefix_len(*its: ta.Iterable) -> int:
|
48
|
+
return ilen(itertools.takewhile(lambda t: all(e == t[0] for e in t[1:]), zip(*its)))
|
49
|
+
|
50
|
+
|
51
|
+
##
|
52
|
+
|
53
|
+
|
43
54
|
@ta.overload
|
44
55
|
def readiter(f: ta.TextIO, sz: int) -> ta.Iterator[str]:
|
45
56
|
...
|
@@ -59,6 +70,9 @@ def readiter(f, sz):
|
|
59
70
|
return iter(functools.partial(f.read, sz), None)
|
60
71
|
|
61
72
|
|
73
|
+
##
|
74
|
+
|
75
|
+
|
62
76
|
@dc.dataclass(frozen=True)
|
63
77
|
class IterGen(ta.Generic[T]):
|
64
78
|
fn: ta.Callable[[], ta.Iterable[T]]
|
@@ -70,8 +84,7 @@ class IterGen(ta.Generic[T]):
|
|
70
84
|
itergen = IterGen
|
71
85
|
|
72
86
|
|
73
|
-
|
74
|
-
return ((e, i) for i, e in enumerate(it))
|
87
|
+
##
|
75
88
|
|
76
89
|
|
77
90
|
flatten = itertools.chain.from_iterable
|
omlish/libc.py
CHANGED
@@ -8,6 +8,9 @@ import sys
|
|
8
8
|
import typing as ta
|
9
9
|
|
10
10
|
|
11
|
+
##
|
12
|
+
|
13
|
+
|
11
14
|
LINUX_PLATFORMS = ('linux',)
|
12
15
|
DARWIN_PLATFORMS = ('darwin',)
|
13
16
|
|
@@ -38,14 +41,14 @@ def lasterr() -> tuple[int, str]:
|
|
38
41
|
|
39
42
|
|
40
43
|
# int raise(int sig);
|
41
|
-
libc.
|
42
|
-
libc.
|
43
|
-
libc.
|
44
|
-
|
44
|
+
libc.raise_ = libc['raise'] # type: ignore # noqa
|
45
|
+
libc.raise_.restype = ct.c_int # noqa
|
46
|
+
libc.raise_.argtypes = [ct.c_int] # noqa
|
47
|
+
raise_ = libc.raise_ # noqa
|
45
48
|
|
46
49
|
|
47
50
|
def sigtrap() -> None:
|
48
|
-
libc.
|
51
|
+
libc.raise_(signal.SIGTRAP) # noqa
|
49
52
|
|
50
53
|
|
51
54
|
##
|