omlish 0.0.0.dev169__py3-none-any.whl → 0.0.0.dev170__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
omlish/__about__.py CHANGED
@@ -1,5 +1,5 @@
1
- __version__ = '0.0.0.dev169'
2
- __revision__ = 'ad8bd22d9c1cd7a5a22af3b9d27906ce64fd8451'
1
+ __version__ = '0.0.0.dev170'
2
+ __revision__ = 'be0f1bb54e3cbc28c30a174a14aa4790f265a33a'
3
3
 
4
4
 
5
5
  #
@@ -1,24 +1,2 @@
1
- # type: ignore
2
1
  # ruff: noqa
3
2
  # flake8: noqa
4
- from .Token import Token
5
- from .InputStream import InputStream
6
- from .FileStream import FileStream
7
- from .StdinStream import StdinStream
8
- from .BufferedTokenStream import TokenStream
9
- from .CommonTokenStream import CommonTokenStream
10
- from .Lexer import Lexer
11
- from .Parser import Parser
12
- from .dfa.DFA import DFA
13
- from .atn.ATN import ATN
14
- from .atn.ATNDeserializer import ATNDeserializer
15
- from .atn.LexerATNSimulator import LexerATNSimulator
16
- from .atn.ParserATNSimulator import ParserATNSimulator
17
- from .atn.PredictionMode import PredictionMode
18
- from .PredictionContext import PredictionContextCache
19
- from .ParserRuleContext import RuleContext, ParserRuleContext
20
- from .tree.Tree import ParseTreeListener, ParseTreeVisitor, ParseTreeWalker, TerminalNode, ErrorNode, RuleNode
21
- from .error.Errors import RecognitionException, IllegalStateException, NoViableAltException
22
- from .error.ErrorStrategy import BailErrorStrategy
23
- from .error.DiagnosticErrorListener import DiagnosticErrorListener
24
- from .Utils import str_list
@@ -0,0 +1,24 @@
1
+ # ruff: noqa
2
+ # flake8: noqa
3
+ # type: ignore
4
+ from .Token import Token
5
+ from .InputStream import InputStream
6
+ from .FileStream import FileStream
7
+ from .StdinStream import StdinStream
8
+ from .BufferedTokenStream import TokenStream
9
+ from .CommonTokenStream import CommonTokenStream
10
+ from .Lexer import Lexer
11
+ from .Parser import Parser
12
+ from .dfa.DFA import DFA
13
+ from .atn.ATN import ATN
14
+ from .atn.ATNDeserializer import ATNDeserializer
15
+ from .atn.LexerATNSimulator import LexerATNSimulator
16
+ from .atn.ParserATNSimulator import ParserATNSimulator
17
+ from .atn.PredictionMode import PredictionMode
18
+ from .PredictionContext import PredictionContextCache
19
+ from .ParserRuleContext import RuleContext, ParserRuleContext
20
+ from .tree.Tree import ParseTreeListener, ParseTreeVisitor, ParseTreeWalker, TerminalNode, ErrorNode, RuleNode
21
+ from .error.Errors import RecognitionException, IllegalStateException, NoViableAltException
22
+ from .error.ErrorStrategy import BailErrorStrategy
23
+ from .error.DiagnosticErrorListener import DiagnosticErrorListener
24
+ from .Utils import str_list
@@ -235,7 +235,7 @@
235
235
  # the input.</p>
236
236
  #
237
237
  import sys
238
- from .. import DFA
238
+ from ..dfa.DFA import DFA
239
239
  from ..BufferedTokenStream import TokenStream
240
240
  from ..Parser import Parser
241
241
  from ..ParserRuleContext import ParserRuleContext
@@ -9,7 +9,7 @@
9
9
 
10
10
  # A DFA walker that knows how to dump them to serialized strings.#/
11
11
  from io import StringIO
12
- from .. import DFA
12
+ from ..dfa.DFA import DFA
13
13
  from ..Utils import str_list
14
14
  from .DFAState import DFAState
15
15
 
@@ -28,7 +28,8 @@
28
28
  # </ul>
29
29
 
30
30
  from io import StringIO
31
- from .. import Parser, DFA
31
+ from ..Parser import Parser
32
+ from ..dfa.DFA import DFA
32
33
  from ..atn.ATNConfigSet import ATNConfigSet
33
34
  from .ErrorListener import ErrorListener
34
35
 
@@ -50,7 +50,13 @@
50
50
  # <p>
51
51
  # Whitespace is not allowed.</p>
52
52
  #
53
- from .. import CommonTokenStream, DFA, PredictionContextCache, Lexer, LexerATNSimulator, ParserRuleContext, TerminalNode
53
+ from ..CommonTokenStream import CommonTokenStream
54
+ from ..dfa.DFA import DFA
55
+ from ..PredictionContext import PredictionContextCache
56
+ from ..Lexer import Lexer
57
+ from ..atn.LexerATNSimulator import LexerATNSimulator
58
+ from ..ParserRuleContext import ParserRuleContext
59
+ from ..tree.Tree import TerminalNode
54
60
  from ..InputStream import InputStream
55
61
  from ..Parser import Parser
56
62
  from ..RuleContext import RuleContext
@@ -2,7 +2,7 @@
2
2
  # ruff: noqa
3
3
  # flake8: noqa
4
4
  # Generated from XPathLexer.g4 by ANTLR 4.11.2-SNAPSHOT
5
- from .. import *
5
+ from .._all import *
6
6
  from io import StringIO
7
7
  import sys
8
8
  if sys.version_info[1] > 5:
@@ -0,0 +1,106 @@
1
+ # ruff: noqa: N802 N815
2
+ import io
3
+ import typing as ta
4
+
5
+ from .. import check
6
+ from . import runtime as antlr4
7
+
8
+
9
+ class DelimitingLexer(antlr4.Lexer):
10
+ def __init__(
11
+ self,
12
+ *args: ta.Any,
13
+ delimiter_token: ta.Any,
14
+ delimiters: ta.Iterable[str],
15
+ no_skip: bool = False,
16
+ **kwargs,
17
+ ) -> None:
18
+ super().__init__(*args, **kwargs)
19
+
20
+ self._delimiter_token = delimiter_token
21
+ self._delimiters = set(check.not_isinstance(delimiters, str))
22
+ self._no_skip = no_skip
23
+
24
+ _hitEOF: bool
25
+
26
+ def nextToken(self) -> antlr4.Token:
27
+ if self._input is None:
28
+ raise antlr4.IllegalStateException('nextToken requires a non-null input stream.')
29
+
30
+ token_start_marker = self._input.mark()
31
+ try:
32
+ while True:
33
+ if self._hitEOF:
34
+ self.emitEOF()
35
+ return self._token
36
+
37
+ self._token: antlr4.Token | None = None
38
+ self._channel = antlr4.Token.DEFAULT_CHANNEL
39
+ self._tokenStartCharIndex = self._input.index
40
+ self._tokenStartColumn = self._interp.column
41
+ self._tokenStartLine = self._interp.line
42
+ self._text = None
43
+
44
+ continue_outer = False
45
+ while True:
46
+ self._type = antlr4.Token.INVALID_TYPE
47
+ ttype = self.SKIP
48
+
49
+ for delimiter in self._delimiters:
50
+ if self._match_delimiter(delimiter):
51
+ ttype = self._delimiter_token
52
+ break
53
+ else:
54
+ try:
55
+ ttype = self._interp.match(self._input, self._mode)
56
+ except antlr4.LexerNoViableAltException as e:
57
+ self.notifyListeners(e) # report error
58
+ self.recover(e)
59
+
60
+ if self._input.LA(1) == antlr4.Token.EOF:
61
+ self._hitEOF = True
62
+
63
+ if self._type == antlr4.Token.INVALID_TYPE:
64
+ self._type = ttype
65
+
66
+ if not self._no_skip and self._type == self.SKIP:
67
+ continue_outer = True
68
+ break
69
+
70
+ if self._type != self.MORE:
71
+ break
72
+
73
+ if continue_outer:
74
+ continue
75
+
76
+ if self._token is None:
77
+ self.emit()
78
+
79
+ return self._token
80
+
81
+ finally:
82
+ self._input.release(token_start_marker)
83
+
84
+ def _match_delimiter(self, delimiter: str) -> bool:
85
+ for i, c in enumerate(delimiter):
86
+ if chr(self._input.LA(i + 1)) != c:
87
+ return False
88
+ self._input.seek(self._input.index + len(delimiter))
89
+ return True
90
+
91
+ def split(self) -> tuple[list[tuple[str, str]], str]:
92
+ lst = []
93
+ sb = io.StringIO()
94
+ while True:
95
+ token = self.nextToken()
96
+ if token.type == antlr4.Token.EOF:
97
+ break
98
+ if token.type == self._delimiter_token:
99
+ statement = sb.getvalue().strip()
100
+ if statement:
101
+ lst.append((statement, token.text))
102
+ sb = io.StringIO()
103
+ else:
104
+ sb.write(token.text)
105
+ partial = sb.getvalue()
106
+ return lst, partial
omlish/antlr/dot.py ADDED
@@ -0,0 +1,31 @@
1
+ from ..graphs import dot
2
+ from . import runtime as antlr4
3
+ from .utils import yield_contexts
4
+
5
+
6
+ def dot_ctx(root: antlr4.ParserRuleContext) -> dot.Graph:
7
+ stmts: list[dot.Stmt] = [
8
+ dot.RawStmt('rankdir=LR;'),
9
+ ]
10
+
11
+ for c in yield_contexts(root):
12
+ if isinstance(c, antlr4.TerminalNode):
13
+ continue
14
+
15
+ lbl = [
16
+ [type(c).__name__],
17
+ [str(id(c))],
18
+ [f'{c.start} {c.stop}'],
19
+ ]
20
+
21
+ stmts.append(dot.Node(f'_{id(c)}', {'label': lbl, 'shape': 'box'}))
22
+
23
+ for n in (c.children or []):
24
+ if not isinstance(n, antlr4.TerminalNode):
25
+ stmts.append(dot.Edge(f'_{id(c)}', f'_{id(n)}'))
26
+
27
+ return dot.Graph(stmts)
28
+
29
+
30
+ def open_dot_ctx(root: antlr4.ParserRuleContext) -> None:
31
+ dot.open_dot(dot.render(dot_ctx(root)))
omlish/antlr/errors.py ADDED
@@ -0,0 +1,11 @@
1
+ # ruff: noqa: N802 N803
2
+ from . import runtime as antlr4
3
+
4
+
5
+ class ParseError(Exception):
6
+ pass
7
+
8
+
9
+ class SilentRaisingErrorListener(antlr4.ErrorListener):
10
+ def syntaxError(self, recognizer, offendingSymbol, line, column, msg, e):
11
+ raise ParseError(recognizer, offendingSymbol, line, column, msg, e)
omlish/antlr/input.py ADDED
@@ -0,0 +1,96 @@
1
+ # ruff: noqa: N802
2
+ import typing as ta
3
+
4
+
5
+ ##
6
+
7
+
8
+ # class InputStream(ta.Protocol):
9
+ #
10
+ # @property
11
+ # def index(self) -> int: ...
12
+ #
13
+ # @property
14
+ # def size(self) -> int: ...
15
+ #
16
+ # # Reset the stream so that it's in the same state it was when the object was created *except* the data array is not
17
+ # # touched.
18
+ # def reset(self) -> None: ...
19
+ #
20
+ # def consume(self) -> None: ...
21
+ #
22
+ # def LA(self, offset: int) -> int: ...
23
+ #
24
+ # def LT(self, offset: int) -> int: ...
25
+ #
26
+ # def mark(self) -> int: ...
27
+ #
28
+ # def release(self, marker: int) -> None: ...
29
+ #
30
+ # # consume() ahead until p==_index; can't just set p=_index as we must update line and column. If we seek backwards,
31
+ # # just set p
32
+ # def seek(self, _index: int) -> None: ...
33
+ #
34
+ # def getText(self, start: int, stop: int) -> str: ...
35
+ #
36
+ # def __str__(self) -> str: ...
37
+
38
+
39
+ InputStream: ta.TypeAlias = ta.Any
40
+
41
+
42
+ ##
43
+
44
+
45
+ # @lang.protocol_check(InputStream)
46
+ class ProxyInputStream:
47
+ def __init__(self, target: InputStream) -> None:
48
+ super().__init__()
49
+
50
+ self._target = target
51
+
52
+ @property
53
+ def index(self) -> int:
54
+ return self._target.index
55
+
56
+ @property
57
+ def size(self) -> int:
58
+ return self._target.size
59
+
60
+ def reset(self) -> None:
61
+ self._target.reset()
62
+
63
+ def consume(self) -> None:
64
+ self._target.consume()
65
+
66
+ def LA(self, offset: int) -> int:
67
+ return self._target.LA(offset)
68
+
69
+ def LT(self, offset: int) -> int:
70
+ return self._target.LT(offset)
71
+
72
+ def mark(self) -> int:
73
+ return self._target.mark()
74
+
75
+ def release(self, marker: int) -> None:
76
+ return self._target.release(marker)
77
+
78
+ def seek(self, _index: int) -> None:
79
+ return self._target.seek(_index)
80
+
81
+ def getText(self, start: int, stop: int) -> str:
82
+ return self._target.getText(start, stop)
83
+
84
+ def __str__(self) -> str:
85
+ return str(self._target)
86
+
87
+
88
+ ##
89
+
90
+
91
+ class CaseInsensitiveInputStream(ProxyInputStream):
92
+ def LA(self, offset: int) -> int:
93
+ ret = super().LA(offset)
94
+ if ret != -1:
95
+ ret = ord(chr(ret).upper())
96
+ return ret
@@ -0,0 +1,19 @@
1
+ # ruff: noqa: N802 N803
2
+ import typing as ta
3
+
4
+ from . import runtime as antlr4
5
+
6
+
7
+ LexerT = ta.TypeVar('LexerT', bound=antlr4.Lexer)
8
+ ParserT = ta.TypeVar('ParserT', bound=antlr4.Parser)
9
+
10
+
11
+ def parse(
12
+ buf: str,
13
+ lexer_cls: type[LexerT],
14
+ parser_cls: type[ParserT],
15
+ ) -> ParserT:
16
+ lexer = lexer_cls(antlr4.InputStream(buf))
17
+ stream = antlr4.CommonTokenStream(lexer)
18
+ stream.fill()
19
+ return parser_cls(stream)
@@ -0,0 +1,102 @@
1
+ # ruff: noqa: I001
2
+ # flake8: noqa: F401
3
+
4
+ from ._runtime.BufferedTokenStream import ( # type: ignore
5
+ TokenStream,
6
+ )
7
+
8
+ from ._runtime.CommonTokenStream import ( # type: ignore
9
+ CommonTokenStream,
10
+ )
11
+
12
+ from ._runtime.FileStream import ( # type: ignore
13
+ FileStream,
14
+ )
15
+
16
+ from ._runtime.InputStream import ( # type: ignore
17
+ InputStream,
18
+ )
19
+
20
+ from ._runtime.Lexer import ( # type: ignore
21
+ Lexer,
22
+ )
23
+
24
+ from ._runtime.Parser import ( # type: ignore
25
+ Parser,
26
+ )
27
+
28
+ from ._runtime.ParserRuleContext import ( # type: ignore
29
+ ParserRuleContext,
30
+ RuleContext,
31
+ )
32
+
33
+ from ._runtime.PredictionContext import ( # type: ignore
34
+ PredictionContextCache,
35
+ )
36
+
37
+ from ._runtime.StdinStream import ( # type: ignore
38
+ StdinStream,
39
+ )
40
+
41
+ from ._runtime.Token import ( # type: ignore
42
+ Token,
43
+ )
44
+
45
+ from ._runtime.Utils import ( # type: ignore
46
+ str_list,
47
+ )
48
+
49
+ from ._runtime.atn.ATN import ( # type: ignore
50
+ ATN,
51
+ )
52
+
53
+ from ._runtime.atn.ATNDeserializer import ( # type: ignore
54
+ ATNDeserializer,
55
+ )
56
+
57
+ from ._runtime.atn.LexerATNSimulator import ( # type: ignore
58
+ LexerATNSimulator,
59
+ )
60
+
61
+ from ._runtime.atn.ParserATNSimulator import ( # type: ignore
62
+ ParserATNSimulator,
63
+ )
64
+
65
+ from ._runtime.atn.PredictionMode import ( # type: ignore
66
+ PredictionMode,
67
+ )
68
+
69
+ from ._runtime.dfa.DFA import ( # type: ignore
70
+ DFA,
71
+ )
72
+
73
+ from ._runtime.error.DiagnosticErrorListener import ( # type: ignore
74
+ DiagnosticErrorListener,
75
+ )
76
+
77
+ from ._runtime.error.ErrorListener import ( # type: ignore
78
+ ErrorListener,
79
+ )
80
+
81
+ from ._runtime.error.ErrorStrategy import ( # type: ignore
82
+ BailErrorStrategy,
83
+ )
84
+
85
+ from ._runtime.error.Errors import ( # type: ignore
86
+ LexerNoViableAltException,
87
+ )
88
+
89
+ from ._runtime.error.Errors import ( # type: ignore
90
+ IllegalStateException,
91
+ NoViableAltException,
92
+ RecognitionException,
93
+ )
94
+
95
+ from ._runtime.tree.Tree import ( # type: ignore
96
+ ErrorNode,
97
+ ParseTreeListener,
98
+ ParseTreeVisitor,
99
+ ParseTreeWalker,
100
+ RuleNode,
101
+ TerminalNode,
102
+ )
omlish/antlr/utils.py ADDED
@@ -0,0 +1,38 @@
1
+ # ruff: noqa: N802 N803
2
+ import io
3
+ import typing as ta
4
+
5
+ from . import runtime as antlr4
6
+
7
+
8
+ ##
9
+
10
+
11
+ def pformat(
12
+ node: ta.Any,
13
+ *,
14
+ buf: ta.IO | None = None,
15
+ indent: str = '',
16
+ child_indent: str = ' ',
17
+ ) -> ta.IO:
18
+ if buf is None:
19
+ buf = io.StringIO()
20
+ buf.write(indent)
21
+ buf.write(node.__class__.__name__)
22
+ if hasattr(node, 'start') and hasattr(node, 'stop'):
23
+ buf.write(f' ({node.start} -> {node.stop})')
24
+ buf.write('\n')
25
+ for child in getattr(node, 'children', []) or []:
26
+ pformat(child, buf=buf, indent=indent + child_indent, child_indent=child_indent)
27
+ return buf
28
+
29
+
30
+ def yield_contexts(
31
+ root: antlr4.ParserRuleContext,
32
+ ) -> ta.Iterator[antlr4.ParserRuleContext]:
33
+ q = [root]
34
+ while q:
35
+ c = q.pop()
36
+ yield c
37
+ if not isinstance(c, antlr4.TerminalNode) and c.children:
38
+ q.extend(c.children)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: omlish
3
- Version: 0.0.0.dev169
3
+ Version: 0.0.0.dev170
4
4
  Summary: omlish
5
5
  Author: wrmsr
6
6
  License: BSD-3-Clause
@@ -1,5 +1,5 @@
1
1
  omlish/.manifests.json,sha256=0BnQGD2dcXEma0Jop2ZesvDNzSj3CAJBNq8aTGuBz9A,7276
2
- omlish/__about__.py,sha256=Vbn2IOi3EX8LX5PEfmtAHPOYwizDkndZ2-r_FU5omCw,3409
2
+ omlish/__about__.py,sha256=PoGd-lg7WxO1UiKHGhsCnxmHLiTfBft9-eJMax5A1Vs,3409
3
3
  omlish/__init__.py,sha256=SsyiITTuK0v74XpKV8dqNaCmjOlan1JZKrHQv5rWKPA,253
4
4
  omlish/c3.py,sha256=ubu7lHwss5V4UznbejAI0qXhXahrU01MysuHOZI9C4U,8116
5
5
  omlish/cached.py,sha256=UI-XTFBwA6YXWJJJeBn-WkwBkfzDjLBBaZf4nIJA9y0,510
@@ -14,6 +14,13 @@ omlish/subprocesses.py,sha256=n6pk0nUaTFHzD_A6duyKNJ4ggncU7uNepfh_T90etHE,8671
14
14
  omlish/sync.py,sha256=QJ79kxmIqDP9SeHDoZAf--DpFIhDQe1jACy8H4N0yZI,2928
15
15
  omlish/term.py,sha256=EVHm3lEEIc9hT4f8BPmzbNUwlqZ8nrRpCwyQMN7LBm0,9313
16
16
  omlish/antlr/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
17
+ omlish/antlr/delimit.py,sha256=3Byvh9_Ip8ftM_SeSEmMbnNo1jrxk-xm8HnHDp_nDaI,3466
18
+ omlish/antlr/dot.py,sha256=uH2X7-8xNLYDQNJ30uW8ssv1MLkZSm07GsalcRuunYI,817
19
+ omlish/antlr/errors.py,sha256=foYz2109WReT1C7qZsIrb4zCAkZg4vM_UiDOAPC0AqQ,308
20
+ omlish/antlr/input.py,sha256=baeO279AIxR50pymya0eabtnc2A0bSdA5u7jvIGebzA,2090
21
+ omlish/antlr/parsing.py,sha256=PH4WlQAqr6ZfnOUNeQLaTMaJJjx3vK2TCgI9HQUNAKk,435
22
+ omlish/antlr/runtime.py,sha256=wYUiJ0qoj4soHFL6fsq91MnUrDSKUEQVmJScKJibOAc,1975
23
+ omlish/antlr/utils.py,sha256=hi_RFUl222r2gQsmmm5MYg5_vYa3q8u-KP4CC1ZHndA,912
17
24
  omlish/antlr/_runtime/BufferedTokenStream.py,sha256=1Rnhm62MZCWSuQeRs7lRUbdtdyo7Gyg8r4gAETjv-cE,10793
18
25
  omlish/antlr/_runtime/CommonTokenFactory.py,sha256=QrSSTH0gYhOpPeOHqrs6-2g1PGcgYvjhR6J6pynKLOc,2147
19
26
  omlish/antlr/_runtime/CommonTokenStream.py,sha256=L7giynpsS97oM6ZtPRIw8m6eIbGSaeEUJ7HN6b2etN4,2795
@@ -34,7 +41,8 @@ omlish/antlr/_runtime/StdinStream.py,sha256=8up7-oFlr-ydpFV7kFqI1mRDD83Y8wYcXs0r
34
41
  omlish/antlr/_runtime/Token.py,sha256=Ywq-AwJz6GXQ431TTFExNylshQbNr6T5u4_48dxlrtk,5249
35
42
  omlish/antlr/_runtime/TokenStreamRewriter.py,sha256=2Zd8vdNQ7_XF1Y4r3axxHTD-hdauxyEPaePOqArF1g0,10355
36
43
  omlish/antlr/_runtime/Utils.py,sha256=Edv360609RwDaEz2JxlXCj9-HuXNEmqr4RVVcbhdLco,974
37
- omlish/antlr/_runtime/__init__.py,sha256=i-TtI82yr2C7owc6_T7nYULFvULcwJ9wSVbq3p71DgY,1042
44
+ omlish/antlr/_runtime/__init__.py,sha256=Jn5lqTVbeUQXD5a4IxDHKibOatAQWVTlaQ8M4mYu2Yw,28
45
+ omlish/antlr/_runtime/_all.py,sha256=MbNYoQYRympa1nGlgtuZ1AkJTv1SacqvBKG7bTXG0So,1042
38
46
  omlish/antlr/_runtime/_pygrun.py,sha256=22tgQKyhLO2UbnwacAzsew7bkXjENctDs4XrmSHAqXc,6328
39
47
  omlish/antlr/_runtime/atn/ATN.py,sha256=0V07bHRY-_rX-Du85F08KbMDbvr055co3Crt0VZbwOo,5792
40
48
  omlish/antlr/_runtime/atn/ATNConfig.py,sha256=sY7zO2oSh9SLSvs05gY84vtFmV4ZcTKz7dXvDIxH_ew,6573
@@ -47,16 +55,16 @@ omlish/antlr/_runtime/atn/ATNType.py,sha256=_UdEWJXYM9gmOQ0u8XxruKLJTYU5ePuNGMAQ
47
55
  omlish/antlr/_runtime/atn/LexerATNSimulator.py,sha256=ZFUTqu7D39AiC8VZunslTrsl7sZKpukEzlkWFK8CRBA,25408
48
56
  omlish/antlr/_runtime/atn/LexerAction.py,sha256=PS8ArQadDRVXYnfCbOxJBuEVh7jMX2JtarhxZJ9c-jE,10057
49
57
  omlish/antlr/_runtime/atn/LexerActionExecutor.py,sha256=GoLnxIG-mzOAHLZvfgIBMGB9i2FyXrZrqtFK3B0Hoak,6448
50
- omlish/antlr/_runtime/atn/ParserATNSimulator.py,sha256=f88xNWRRsMSUz7Nf4oX-MQ9HTMdl0zBK7RcraKWZZlE,80739
58
+ omlish/antlr/_runtime/atn/ParserATNSimulator.py,sha256=5N0htVWAy3x8rfWVs1YN2Jz16LLpXKjAwefFPXgBHcM,80746
51
59
  omlish/antlr/_runtime/atn/PredictionMode.py,sha256=JAnsYHfd6vD1t1wvLNpBuyi5avjxn8Flbkh_lcrgs1g,22479
52
60
  omlish/antlr/_runtime/atn/SemanticContext.py,sha256=jYz90ZjiL4Hcya5881fTQd3zvTzTYpjsntiqiRx7xLA,10674
53
61
  omlish/antlr/_runtime/atn/Transition.py,sha256=YrPBbwKK9E4SwFNmS5u6ATfZ-uC-fx5GSAtPyQsBn4E,8775
54
62
  omlish/antlr/_runtime/atn/__init__.py,sha256=lMd_BbXYdlDhZQN_q0TKN978XW5G0pq618F0NaLkpFE,71
55
63
  omlish/antlr/_runtime/dfa/DFA.py,sha256=1eLI8ckM7_Q4dx_l5m1eiiqJPpaTWJ1DDKUMUUbc-qA,5381
56
- omlish/antlr/_runtime/dfa/DFASerializer.py,sha256=_adTLo6rq6qJqDATZ3s2mTMqLXuaSQeNsHQfYAH8a-A,2542
64
+ omlish/antlr/_runtime/dfa/DFASerializer.py,sha256=HmQpIrVl0_kit8GSCqYyT4AnFvWyBj8vuye0YeFNEKE,2549
57
65
  omlish/antlr/_runtime/dfa/DFAState.py,sha256=vZ5sBJc0hp5BaOVVT3sdQEd1jEi6yb-u-aO9DHpC2Tw,5616
58
66
  omlish/antlr/_runtime/dfa/__init__.py,sha256=lMd_BbXYdlDhZQN_q0TKN978XW5G0pq618F0NaLkpFE,71
59
- omlish/antlr/_runtime/error/DiagnosticErrorListener.py,sha256=Jnzwgij80-xjj54GR2YYsC6QubSEp9RuWnw2fH9a6tU,4452
67
+ omlish/antlr/_runtime/error/DiagnosticErrorListener.py,sha256=SyUCrVs2Vzgvf8j2cSkzjODRBeXrVVrKytRH2LxjALs,4479
60
68
  omlish/antlr/_runtime/error/ErrorListener.py,sha256=pPVVFGIDbELkMhG-A5-b39y3SkNkJvsP6n8Y0TfotfA,2765
61
69
  omlish/antlr/_runtime/error/ErrorStrategy.py,sha256=0nZwpqR8L0264DphJHoEmYkQHlxZxCEdO54RAvHY6FI,30407
62
70
  omlish/antlr/_runtime/error/Errors.py,sha256=sCf-5-vb9MyO8XIDdHSqV_ozfEcRrm87yjg_sNtStXM,6808
@@ -70,8 +78,8 @@ omlish/antlr/_runtime/tree/TokenTagToken.py,sha256=V2ymw3sTiykgi7Lp64a7VsW_s8Lvx
70
78
  omlish/antlr/_runtime/tree/Tree.py,sha256=-5aYTkO_eUUGY2BPY03IJLyG72lNMeO_uuf2u1kZL_4,5610
71
79
  omlish/antlr/_runtime/tree/Trees.py,sha256=iRcq2bhjBziXEUr6OFFs1rJs82q6wXTSduSBfCMMG7k,3985
72
80
  omlish/antlr/_runtime/tree/__init__.py,sha256=Jn5lqTVbeUQXD5a4IxDHKibOatAQWVTlaQ8M4mYu2Yw,28
73
- omlish/antlr/_runtime/xpath/XPath.py,sha256=CbS0Fpnd2aRt_nQUBJlTpoHpxCyT9qbVW8ldj1aQJKY,9643
74
- omlish/antlr/_runtime/xpath/XPathLexer.py,sha256=xFtdr4ZXMZxb2dnB_ggWyhvlQiC7RXQlDS5ePhTyOGg,3505
81
+ omlish/antlr/_runtime/xpath/XPath.py,sha256=KSL1SH3VAeRDZCe4dAD7xmUdfk-j434ypZKRreFG2vk,9820
82
+ omlish/antlr/_runtime/xpath/XPathLexer.py,sha256=WvGKQjQnu7pX5C4CFKtsCzba2B2W6ie4ivtWLvlgymM,3509
75
83
  omlish/antlr/_runtime/xpath/__init__.py,sha256=lMd_BbXYdlDhZQN_q0TKN978XW5G0pq618F0NaLkpFE,71
76
84
  omlish/argparse/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
77
85
  omlish/argparse/all.py,sha256=EfUSf27vFWqa4Q93AycU5YRsrHt-Nx3pU3uNVapb-EE,1054
@@ -559,9 +567,9 @@ omlish/text/glyphsplit.py,sha256=Ug-dPRO7x-OrNNr8g1y6DotSZ2KH0S-VcOmUobwa4B0,329
559
567
  omlish/text/indent.py,sha256=6Jj6TFY9unaPa4xPzrnZemJ-fHsV53IamP93XGjSUHs,1274
560
568
  omlish/text/parts.py,sha256=7vPF1aTZdvLVYJ4EwBZVzRSy8XB3YqPd7JwEnNGGAOo,6495
561
569
  omlish/text/random.py,sha256=jNWpqiaKjKyTdMXC-pWAsSC10AAP-cmRRPVhm59ZWLk,194
562
- omlish-0.0.0.dev169.dist-info/LICENSE,sha256=B_hVtavaA8zCYDW99DYdcpDLKz1n3BBRjZrcbv8uG8c,1451
563
- omlish-0.0.0.dev169.dist-info/METADATA,sha256=CsUpWiDs5ax6CJfLmh_ssyLkwhwgoju6FMPzic0v_58,4264
564
- omlish-0.0.0.dev169.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
565
- omlish-0.0.0.dev169.dist-info/entry_points.txt,sha256=Lt84WvRZJskWCAS7xnQGZIeVWksprtUHj0llrvVmod8,35
566
- omlish-0.0.0.dev169.dist-info/top_level.txt,sha256=pePsKdLu7DvtUiecdYXJ78iO80uDNmBlqe-8hOzOmfs,7
567
- omlish-0.0.0.dev169.dist-info/RECORD,,
570
+ omlish-0.0.0.dev170.dist-info/LICENSE,sha256=B_hVtavaA8zCYDW99DYdcpDLKz1n3BBRjZrcbv8uG8c,1451
571
+ omlish-0.0.0.dev170.dist-info/METADATA,sha256=1zURRBI9Xgzn95Gk7PvYo_f_QCqUBqd_uNIFZZVBuHM,4264
572
+ omlish-0.0.0.dev170.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
573
+ omlish-0.0.0.dev170.dist-info/entry_points.txt,sha256=Lt84WvRZJskWCAS7xnQGZIeVWksprtUHj0llrvVmod8,35
574
+ omlish-0.0.0.dev170.dist-info/top_level.txt,sha256=pePsKdLu7DvtUiecdYXJ78iO80uDNmBlqe-8hOzOmfs,7
575
+ omlish-0.0.0.dev170.dist-info/RECORD,,