omlish 0.0.0.dev437__py3-none-any.whl → 0.0.0.dev438__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- omlish/__about__.py +2 -2
- omlish/formats/json/stream/__init__.py +3 -1
- omlish/formats/json/stream/lexing.py +187 -42
- omlish/formats/json/stream/parsing.py +27 -5
- omlish/formats/json/stream/utils.py +106 -33
- omlish/formats/json5/literals.py +7 -4
- omlish/formats/json5/parsing.py +33 -79
- omlish/formats/json5/stream.py +45 -50
- omlish/http/all.py +59 -53
- {omlish-0.0.0.dev437.dist-info → omlish-0.0.0.dev438.dist-info}/METADATA +1 -1
- {omlish-0.0.0.dev437.dist-info → omlish-0.0.0.dev438.dist-info}/RECORD +15 -89
- omlish/formats/json5/Json5.g4 +0 -168
- omlish/formats/json5/_antlr/Json5Lexer.py +0 -354
- omlish/formats/json5/_antlr/Json5Listener.py +0 -79
- omlish/formats/json5/_antlr/Json5Parser.py +0 -617
- omlish/formats/json5/_antlr/Json5Visitor.py +0 -52
- omlish/formats/json5/_antlr/__init__.py +0 -0
- omlish/text/antlr/__init__.py +0 -3
- omlish/text/antlr/_runtime/BufferedTokenStream.py +0 -305
- omlish/text/antlr/_runtime/CommonTokenFactory.py +0 -64
- omlish/text/antlr/_runtime/CommonTokenStream.py +0 -90
- omlish/text/antlr/_runtime/FileStream.py +0 -30
- omlish/text/antlr/_runtime/InputStream.py +0 -90
- omlish/text/antlr/_runtime/IntervalSet.py +0 -183
- omlish/text/antlr/_runtime/LICENSE.txt +0 -28
- omlish/text/antlr/_runtime/LL1Analyzer.py +0 -176
- omlish/text/antlr/_runtime/Lexer.py +0 -332
- omlish/text/antlr/_runtime/ListTokenSource.py +0 -147
- omlish/text/antlr/_runtime/Parser.py +0 -583
- omlish/text/antlr/_runtime/ParserInterpreter.py +0 -173
- omlish/text/antlr/_runtime/ParserRuleContext.py +0 -189
- omlish/text/antlr/_runtime/PredictionContext.py +0 -632
- omlish/text/antlr/_runtime/Recognizer.py +0 -150
- omlish/text/antlr/_runtime/RuleContext.py +0 -230
- omlish/text/antlr/_runtime/StdinStream.py +0 -14
- omlish/text/antlr/_runtime/Token.py +0 -158
- omlish/text/antlr/_runtime/TokenStreamRewriter.py +0 -258
- omlish/text/antlr/_runtime/Utils.py +0 -36
- omlish/text/antlr/_runtime/__init__.py +0 -2
- omlish/text/antlr/_runtime/_all.py +0 -24
- omlish/text/antlr/_runtime/_pygrun.py +0 -174
- omlish/text/antlr/_runtime/atn/ATN.py +0 -135
- omlish/text/antlr/_runtime/atn/ATNConfig.py +0 -162
- omlish/text/antlr/_runtime/atn/ATNConfigSet.py +0 -215
- omlish/text/antlr/_runtime/atn/ATNDeserializationOptions.py +0 -27
- omlish/text/antlr/_runtime/atn/ATNDeserializer.py +0 -449
- omlish/text/antlr/_runtime/atn/ATNSimulator.py +0 -50
- omlish/text/antlr/_runtime/atn/ATNState.py +0 -267
- omlish/text/antlr/_runtime/atn/ATNType.py +0 -20
- omlish/text/antlr/_runtime/atn/LexerATNSimulator.py +0 -573
- omlish/text/antlr/_runtime/atn/LexerAction.py +0 -301
- omlish/text/antlr/_runtime/atn/LexerActionExecutor.py +0 -146
- omlish/text/antlr/_runtime/atn/ParserATNSimulator.py +0 -1664
- omlish/text/antlr/_runtime/atn/PredictionMode.py +0 -502
- omlish/text/antlr/_runtime/atn/SemanticContext.py +0 -333
- omlish/text/antlr/_runtime/atn/Transition.py +0 -271
- omlish/text/antlr/_runtime/atn/__init__.py +0 -4
- omlish/text/antlr/_runtime/dfa/DFA.py +0 -136
- omlish/text/antlr/_runtime/dfa/DFASerializer.py +0 -76
- omlish/text/antlr/_runtime/dfa/DFAState.py +0 -129
- omlish/text/antlr/_runtime/dfa/__init__.py +0 -4
- omlish/text/antlr/_runtime/error/DiagnosticErrorListener.py +0 -111
- omlish/text/antlr/_runtime/error/ErrorListener.py +0 -75
- omlish/text/antlr/_runtime/error/ErrorStrategy.py +0 -712
- omlish/text/antlr/_runtime/error/Errors.py +0 -176
- omlish/text/antlr/_runtime/error/__init__.py +0 -4
- omlish/text/antlr/_runtime/tree/Chunk.py +0 -33
- omlish/text/antlr/_runtime/tree/ParseTreeMatch.py +0 -121
- omlish/text/antlr/_runtime/tree/ParseTreePattern.py +0 -75
- omlish/text/antlr/_runtime/tree/ParseTreePatternMatcher.py +0 -377
- omlish/text/antlr/_runtime/tree/RuleTagToken.py +0 -53
- omlish/text/antlr/_runtime/tree/TokenTagToken.py +0 -50
- omlish/text/antlr/_runtime/tree/Tree.py +0 -194
- omlish/text/antlr/_runtime/tree/Trees.py +0 -114
- omlish/text/antlr/_runtime/tree/__init__.py +0 -2
- omlish/text/antlr/_runtime/xpath/XPath.py +0 -278
- omlish/text/antlr/_runtime/xpath/XPathLexer.py +0 -98
- omlish/text/antlr/_runtime/xpath/__init__.py +0 -4
- omlish/text/antlr/delimit.py +0 -109
- omlish/text/antlr/dot.py +0 -41
- omlish/text/antlr/errors.py +0 -14
- omlish/text/antlr/input.py +0 -96
- omlish/text/antlr/parsing.py +0 -54
- omlish/text/antlr/runtime.py +0 -102
- omlish/text/antlr/utils.py +0 -38
- {omlish-0.0.0.dev437.dist-info → omlish-0.0.0.dev438.dist-info}/WHEEL +0 -0
- {omlish-0.0.0.dev437.dist-info → omlish-0.0.0.dev438.dist-info}/entry_points.txt +0 -0
- {omlish-0.0.0.dev437.dist-info → omlish-0.0.0.dev438.dist-info}/licenses/LICENSE +0 -0
- {omlish-0.0.0.dev437.dist-info → omlish-0.0.0.dev438.dist-info}/top_level.txt +0 -0
omlish/text/antlr/input.py
DELETED
@@ -1,96 +0,0 @@
|
|
1
|
-
# ruff: noqa: N802
|
2
|
-
import typing as ta
|
3
|
-
|
4
|
-
|
5
|
-
##
|
6
|
-
|
7
|
-
|
8
|
-
# class InputStream(ta.Protocol):
|
9
|
-
#
|
10
|
-
# @property
|
11
|
-
# def index(self) -> int: ...
|
12
|
-
#
|
13
|
-
# @property
|
14
|
-
# def size(self) -> int: ...
|
15
|
-
#
|
16
|
-
# # Reset the stream so that it's in the same state it was when the object was created *except* the data array is not
|
17
|
-
# # touched.
|
18
|
-
# def reset(self) -> None: ...
|
19
|
-
#
|
20
|
-
# def consume(self) -> None: ...
|
21
|
-
#
|
22
|
-
# def LA(self, offset: int) -> int: ...
|
23
|
-
#
|
24
|
-
# def LT(self, offset: int) -> int: ...
|
25
|
-
#
|
26
|
-
# def mark(self) -> int: ...
|
27
|
-
#
|
28
|
-
# def release(self, marker: int) -> None: ...
|
29
|
-
#
|
30
|
-
# # consume() ahead until p==_index; can't just set p=_index as we must update line and column. If we seek backwards,
|
31
|
-
# # just set p
|
32
|
-
# def seek(self, _index: int) -> None: ...
|
33
|
-
#
|
34
|
-
# def getText(self, start: int, stop: int) -> str: ...
|
35
|
-
#
|
36
|
-
# def __str__(self) -> str: ...
|
37
|
-
|
38
|
-
|
39
|
-
InputStream: ta.TypeAlias = ta.Any
|
40
|
-
|
41
|
-
|
42
|
-
##
|
43
|
-
|
44
|
-
|
45
|
-
# @lang.protocol_check(InputStream)
|
46
|
-
class ProxyInputStream:
|
47
|
-
def __init__(self, target: InputStream) -> None:
|
48
|
-
super().__init__()
|
49
|
-
|
50
|
-
self._target = target
|
51
|
-
|
52
|
-
@property
|
53
|
-
def index(self) -> int:
|
54
|
-
return self._target.index
|
55
|
-
|
56
|
-
@property
|
57
|
-
def size(self) -> int:
|
58
|
-
return self._target.size
|
59
|
-
|
60
|
-
def reset(self) -> None:
|
61
|
-
self._target.reset()
|
62
|
-
|
63
|
-
def consume(self) -> None:
|
64
|
-
self._target.consume()
|
65
|
-
|
66
|
-
def LA(self, offset: int) -> int:
|
67
|
-
return self._target.LA(offset)
|
68
|
-
|
69
|
-
def LT(self, offset: int) -> int:
|
70
|
-
return self._target.LT(offset)
|
71
|
-
|
72
|
-
def mark(self) -> int:
|
73
|
-
return self._target.mark()
|
74
|
-
|
75
|
-
def release(self, marker: int) -> None:
|
76
|
-
return self._target.release(marker)
|
77
|
-
|
78
|
-
def seek(self, _index: int) -> None:
|
79
|
-
return self._target.seek(_index)
|
80
|
-
|
81
|
-
def getText(self, start: int, stop: int) -> str:
|
82
|
-
return self._target.getText(start, stop)
|
83
|
-
|
84
|
-
def __str__(self) -> str:
|
85
|
-
return str(self._target)
|
86
|
-
|
87
|
-
|
88
|
-
##
|
89
|
-
|
90
|
-
|
91
|
-
class CaseInsensitiveInputStream(ProxyInputStream):
|
92
|
-
def LA(self, offset: int) -> int:
|
93
|
-
ret = super().LA(offset)
|
94
|
-
if ret != -1:
|
95
|
-
ret = ord(chr(ret).upper())
|
96
|
-
return ret
|
omlish/text/antlr/parsing.py
DELETED
@@ -1,54 +0,0 @@
|
|
1
|
-
# ruff: noqa: N802 N803
|
2
|
-
import typing as ta
|
3
|
-
|
4
|
-
from ... import check
|
5
|
-
from . import runtime as antlr4
|
6
|
-
from .errors import SilentRaisingErrorListener
|
7
|
-
|
8
|
-
|
9
|
-
LexerT = ta.TypeVar('LexerT', bound=antlr4.Lexer)
|
10
|
-
ParserT = ta.TypeVar('ParserT', bound=antlr4.Parser)
|
11
|
-
|
12
|
-
|
13
|
-
##
|
14
|
-
|
15
|
-
|
16
|
-
def is_eof_context(ctx: antlr4.ParserRuleContext) -> bool:
|
17
|
-
return ctx.getChildCount() == 1 and ctx.getChild(0).getSymbol().type == antlr4.Token.EOF
|
18
|
-
|
19
|
-
|
20
|
-
class StandardParseTreeVisitor(antlr4.ParseTreeVisitor):
|
21
|
-
def visit(self, ctx: antlr4.ParserRuleContext):
|
22
|
-
check.isinstance(ctx, antlr4.ParserRuleContext)
|
23
|
-
return ctx.accept(self)
|
24
|
-
|
25
|
-
def aggregateResult(self, aggregate, nextResult): # noqa
|
26
|
-
if aggregate is not None:
|
27
|
-
check.none(nextResult)
|
28
|
-
return aggregate
|
29
|
-
else:
|
30
|
-
check.none(aggregate)
|
31
|
-
return nextResult
|
32
|
-
|
33
|
-
|
34
|
-
def make_parser(
|
35
|
-
buf: str,
|
36
|
-
lexer_cls: type[LexerT],
|
37
|
-
parser_cls: type[ParserT],
|
38
|
-
*,
|
39
|
-
silent_errors: bool = False,
|
40
|
-
) -> ParserT:
|
41
|
-
lexer = lexer_cls(antlr4.InputStream(buf))
|
42
|
-
if silent_errors:
|
43
|
-
lexer.removeErrorListeners()
|
44
|
-
lexer.addErrorListener(SilentRaisingErrorListener())
|
45
|
-
|
46
|
-
stream = antlr4.CommonTokenStream(lexer)
|
47
|
-
stream.fill()
|
48
|
-
|
49
|
-
parser = parser_cls(stream)
|
50
|
-
if silent_errors:
|
51
|
-
parser.removeErrorListeners()
|
52
|
-
parser.addErrorListener(SilentRaisingErrorListener())
|
53
|
-
|
54
|
-
return parser
|
omlish/text/antlr/runtime.py
DELETED
@@ -1,102 +0,0 @@
|
|
1
|
-
# ruff: noqa: I001
|
2
|
-
# flake8: noqa: F401
|
3
|
-
|
4
|
-
from ._runtime.BufferedTokenStream import ( # type: ignore
|
5
|
-
TokenStream,
|
6
|
-
)
|
7
|
-
|
8
|
-
from ._runtime.CommonTokenStream import ( # type: ignore
|
9
|
-
CommonTokenStream,
|
10
|
-
)
|
11
|
-
|
12
|
-
from ._runtime.FileStream import ( # type: ignore
|
13
|
-
FileStream,
|
14
|
-
)
|
15
|
-
|
16
|
-
from ._runtime.InputStream import ( # type: ignore
|
17
|
-
InputStream,
|
18
|
-
)
|
19
|
-
|
20
|
-
from ._runtime.Lexer import ( # type: ignore
|
21
|
-
Lexer,
|
22
|
-
)
|
23
|
-
|
24
|
-
from ._runtime.Parser import ( # type: ignore
|
25
|
-
Parser,
|
26
|
-
)
|
27
|
-
|
28
|
-
from ._runtime.ParserRuleContext import ( # type: ignore
|
29
|
-
ParserRuleContext,
|
30
|
-
RuleContext,
|
31
|
-
)
|
32
|
-
|
33
|
-
from ._runtime.PredictionContext import ( # type: ignore
|
34
|
-
PredictionContextCache,
|
35
|
-
)
|
36
|
-
|
37
|
-
from ._runtime.StdinStream import ( # type: ignore
|
38
|
-
StdinStream,
|
39
|
-
)
|
40
|
-
|
41
|
-
from ._runtime.Token import ( # type: ignore
|
42
|
-
Token,
|
43
|
-
)
|
44
|
-
|
45
|
-
from ._runtime.Utils import ( # type: ignore
|
46
|
-
str_list,
|
47
|
-
)
|
48
|
-
|
49
|
-
from ._runtime.atn.ATN import ( # type: ignore
|
50
|
-
ATN,
|
51
|
-
)
|
52
|
-
|
53
|
-
from ._runtime.atn.ATNDeserializer import ( # type: ignore
|
54
|
-
ATNDeserializer,
|
55
|
-
)
|
56
|
-
|
57
|
-
from ._runtime.atn.LexerATNSimulator import ( # type: ignore
|
58
|
-
LexerATNSimulator,
|
59
|
-
)
|
60
|
-
|
61
|
-
from ._runtime.atn.ParserATNSimulator import ( # type: ignore
|
62
|
-
ParserATNSimulator,
|
63
|
-
)
|
64
|
-
|
65
|
-
from ._runtime.atn.PredictionMode import ( # type: ignore
|
66
|
-
PredictionMode,
|
67
|
-
)
|
68
|
-
|
69
|
-
from ._runtime.dfa.DFA import ( # type: ignore
|
70
|
-
DFA,
|
71
|
-
)
|
72
|
-
|
73
|
-
from ._runtime.error.DiagnosticErrorListener import ( # type: ignore
|
74
|
-
DiagnosticErrorListener,
|
75
|
-
)
|
76
|
-
|
77
|
-
from ._runtime.error.ErrorListener import ( # type: ignore
|
78
|
-
ErrorListener,
|
79
|
-
)
|
80
|
-
|
81
|
-
from ._runtime.error.ErrorStrategy import ( # type: ignore
|
82
|
-
BailErrorStrategy,
|
83
|
-
)
|
84
|
-
|
85
|
-
from ._runtime.error.Errors import ( # type: ignore
|
86
|
-
LexerNoViableAltException,
|
87
|
-
)
|
88
|
-
|
89
|
-
from ._runtime.error.Errors import ( # type: ignore
|
90
|
-
IllegalStateException,
|
91
|
-
NoViableAltException,
|
92
|
-
RecognitionException,
|
93
|
-
)
|
94
|
-
|
95
|
-
from ._runtime.tree.Tree import ( # type: ignore
|
96
|
-
ErrorNode,
|
97
|
-
ParseTreeListener,
|
98
|
-
ParseTreeVisitor,
|
99
|
-
ParseTreeWalker,
|
100
|
-
RuleNode,
|
101
|
-
TerminalNode,
|
102
|
-
)
|
omlish/text/antlr/utils.py
DELETED
@@ -1,38 +0,0 @@
|
|
1
|
-
# ruff: noqa: N802 N803
|
2
|
-
import io
|
3
|
-
import typing as ta
|
4
|
-
|
5
|
-
from . import runtime as antlr4
|
6
|
-
|
7
|
-
|
8
|
-
##
|
9
|
-
|
10
|
-
|
11
|
-
def pformat(
|
12
|
-
node: ta.Any,
|
13
|
-
*,
|
14
|
-
buf: ta.IO | None = None,
|
15
|
-
indent: str = '',
|
16
|
-
child_indent: str = ' ',
|
17
|
-
) -> ta.IO:
|
18
|
-
if buf is None:
|
19
|
-
buf = io.StringIO()
|
20
|
-
buf.write(indent)
|
21
|
-
buf.write(node.__class__.__name__)
|
22
|
-
if hasattr(node, 'start') and hasattr(node, 'stop'):
|
23
|
-
buf.write(f' ({node.start} -> {node.stop})')
|
24
|
-
buf.write('\n')
|
25
|
-
for child in getattr(node, 'children', []) or []:
|
26
|
-
pformat(child, buf=buf, indent=indent + child_indent, child_indent=child_indent)
|
27
|
-
return buf
|
28
|
-
|
29
|
-
|
30
|
-
def yield_contexts(
|
31
|
-
root: antlr4.ParserRuleContext,
|
32
|
-
) -> ta.Iterator[antlr4.ParserRuleContext]:
|
33
|
-
q = [root]
|
34
|
-
while q:
|
35
|
-
c = q.pop()
|
36
|
-
yield c
|
37
|
-
if not isinstance(c, antlr4.TerminalNode) and c.children:
|
38
|
-
q.extend(c.children)
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|