omlish 0.0.0.dev437__py3-none-any.whl → 0.0.0.dev438__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (89) hide show
  1. omlish/__about__.py +2 -2
  2. omlish/formats/json/stream/__init__.py +3 -1
  3. omlish/formats/json/stream/lexing.py +187 -42
  4. omlish/formats/json/stream/parsing.py +27 -5
  5. omlish/formats/json/stream/utils.py +106 -33
  6. omlish/formats/json5/literals.py +7 -4
  7. omlish/formats/json5/parsing.py +33 -79
  8. omlish/formats/json5/stream.py +45 -50
  9. omlish/http/all.py +59 -53
  10. {omlish-0.0.0.dev437.dist-info → omlish-0.0.0.dev438.dist-info}/METADATA +1 -1
  11. {omlish-0.0.0.dev437.dist-info → omlish-0.0.0.dev438.dist-info}/RECORD +15 -89
  12. omlish/formats/json5/Json5.g4 +0 -168
  13. omlish/formats/json5/_antlr/Json5Lexer.py +0 -354
  14. omlish/formats/json5/_antlr/Json5Listener.py +0 -79
  15. omlish/formats/json5/_antlr/Json5Parser.py +0 -617
  16. omlish/formats/json5/_antlr/Json5Visitor.py +0 -52
  17. omlish/formats/json5/_antlr/__init__.py +0 -0
  18. omlish/text/antlr/__init__.py +0 -3
  19. omlish/text/antlr/_runtime/BufferedTokenStream.py +0 -305
  20. omlish/text/antlr/_runtime/CommonTokenFactory.py +0 -64
  21. omlish/text/antlr/_runtime/CommonTokenStream.py +0 -90
  22. omlish/text/antlr/_runtime/FileStream.py +0 -30
  23. omlish/text/antlr/_runtime/InputStream.py +0 -90
  24. omlish/text/antlr/_runtime/IntervalSet.py +0 -183
  25. omlish/text/antlr/_runtime/LICENSE.txt +0 -28
  26. omlish/text/antlr/_runtime/LL1Analyzer.py +0 -176
  27. omlish/text/antlr/_runtime/Lexer.py +0 -332
  28. omlish/text/antlr/_runtime/ListTokenSource.py +0 -147
  29. omlish/text/antlr/_runtime/Parser.py +0 -583
  30. omlish/text/antlr/_runtime/ParserInterpreter.py +0 -173
  31. omlish/text/antlr/_runtime/ParserRuleContext.py +0 -189
  32. omlish/text/antlr/_runtime/PredictionContext.py +0 -632
  33. omlish/text/antlr/_runtime/Recognizer.py +0 -150
  34. omlish/text/antlr/_runtime/RuleContext.py +0 -230
  35. omlish/text/antlr/_runtime/StdinStream.py +0 -14
  36. omlish/text/antlr/_runtime/Token.py +0 -158
  37. omlish/text/antlr/_runtime/TokenStreamRewriter.py +0 -258
  38. omlish/text/antlr/_runtime/Utils.py +0 -36
  39. omlish/text/antlr/_runtime/__init__.py +0 -2
  40. omlish/text/antlr/_runtime/_all.py +0 -24
  41. omlish/text/antlr/_runtime/_pygrun.py +0 -174
  42. omlish/text/antlr/_runtime/atn/ATN.py +0 -135
  43. omlish/text/antlr/_runtime/atn/ATNConfig.py +0 -162
  44. omlish/text/antlr/_runtime/atn/ATNConfigSet.py +0 -215
  45. omlish/text/antlr/_runtime/atn/ATNDeserializationOptions.py +0 -27
  46. omlish/text/antlr/_runtime/atn/ATNDeserializer.py +0 -449
  47. omlish/text/antlr/_runtime/atn/ATNSimulator.py +0 -50
  48. omlish/text/antlr/_runtime/atn/ATNState.py +0 -267
  49. omlish/text/antlr/_runtime/atn/ATNType.py +0 -20
  50. omlish/text/antlr/_runtime/atn/LexerATNSimulator.py +0 -573
  51. omlish/text/antlr/_runtime/atn/LexerAction.py +0 -301
  52. omlish/text/antlr/_runtime/atn/LexerActionExecutor.py +0 -146
  53. omlish/text/antlr/_runtime/atn/ParserATNSimulator.py +0 -1664
  54. omlish/text/antlr/_runtime/atn/PredictionMode.py +0 -502
  55. omlish/text/antlr/_runtime/atn/SemanticContext.py +0 -333
  56. omlish/text/antlr/_runtime/atn/Transition.py +0 -271
  57. omlish/text/antlr/_runtime/atn/__init__.py +0 -4
  58. omlish/text/antlr/_runtime/dfa/DFA.py +0 -136
  59. omlish/text/antlr/_runtime/dfa/DFASerializer.py +0 -76
  60. omlish/text/antlr/_runtime/dfa/DFAState.py +0 -129
  61. omlish/text/antlr/_runtime/dfa/__init__.py +0 -4
  62. omlish/text/antlr/_runtime/error/DiagnosticErrorListener.py +0 -111
  63. omlish/text/antlr/_runtime/error/ErrorListener.py +0 -75
  64. omlish/text/antlr/_runtime/error/ErrorStrategy.py +0 -712
  65. omlish/text/antlr/_runtime/error/Errors.py +0 -176
  66. omlish/text/antlr/_runtime/error/__init__.py +0 -4
  67. omlish/text/antlr/_runtime/tree/Chunk.py +0 -33
  68. omlish/text/antlr/_runtime/tree/ParseTreeMatch.py +0 -121
  69. omlish/text/antlr/_runtime/tree/ParseTreePattern.py +0 -75
  70. omlish/text/antlr/_runtime/tree/ParseTreePatternMatcher.py +0 -377
  71. omlish/text/antlr/_runtime/tree/RuleTagToken.py +0 -53
  72. omlish/text/antlr/_runtime/tree/TokenTagToken.py +0 -50
  73. omlish/text/antlr/_runtime/tree/Tree.py +0 -194
  74. omlish/text/antlr/_runtime/tree/Trees.py +0 -114
  75. omlish/text/antlr/_runtime/tree/__init__.py +0 -2
  76. omlish/text/antlr/_runtime/xpath/XPath.py +0 -278
  77. omlish/text/antlr/_runtime/xpath/XPathLexer.py +0 -98
  78. omlish/text/antlr/_runtime/xpath/__init__.py +0 -4
  79. omlish/text/antlr/delimit.py +0 -109
  80. omlish/text/antlr/dot.py +0 -41
  81. omlish/text/antlr/errors.py +0 -14
  82. omlish/text/antlr/input.py +0 -96
  83. omlish/text/antlr/parsing.py +0 -54
  84. omlish/text/antlr/runtime.py +0 -102
  85. omlish/text/antlr/utils.py +0 -38
  86. {omlish-0.0.0.dev437.dist-info → omlish-0.0.0.dev438.dist-info}/WHEEL +0 -0
  87. {omlish-0.0.0.dev437.dist-info → omlish-0.0.0.dev438.dist-info}/entry_points.txt +0 -0
  88. {omlish-0.0.0.dev437.dist-info → omlish-0.0.0.dev438.dist-info}/licenses/LICENSE +0 -0
  89. {omlish-0.0.0.dev437.dist-info → omlish-0.0.0.dev438.dist-info}/top_level.txt +0 -0
@@ -1,96 +0,0 @@
1
- # ruff: noqa: N802
2
- import typing as ta
3
-
4
-
5
- ##
6
-
7
-
8
- # class InputStream(ta.Protocol):
9
- #
10
- # @property
11
- # def index(self) -> int: ...
12
- #
13
- # @property
14
- # def size(self) -> int: ...
15
- #
16
- # # Reset the stream so that it's in the same state it was when the object was created *except* the data array is not
17
- # # touched.
18
- # def reset(self) -> None: ...
19
- #
20
- # def consume(self) -> None: ...
21
- #
22
- # def LA(self, offset: int) -> int: ...
23
- #
24
- # def LT(self, offset: int) -> int: ...
25
- #
26
- # def mark(self) -> int: ...
27
- #
28
- # def release(self, marker: int) -> None: ...
29
- #
30
- # # consume() ahead until p==_index; can't just set p=_index as we must update line and column. If we seek backwards,
31
- # # just set p
32
- # def seek(self, _index: int) -> None: ...
33
- #
34
- # def getText(self, start: int, stop: int) -> str: ...
35
- #
36
- # def __str__(self) -> str: ...
37
-
38
-
39
- InputStream: ta.TypeAlias = ta.Any
40
-
41
-
42
- ##
43
-
44
-
45
- # @lang.protocol_check(InputStream)
46
- class ProxyInputStream:
47
- def __init__(self, target: InputStream) -> None:
48
- super().__init__()
49
-
50
- self._target = target
51
-
52
- @property
53
- def index(self) -> int:
54
- return self._target.index
55
-
56
- @property
57
- def size(self) -> int:
58
- return self._target.size
59
-
60
- def reset(self) -> None:
61
- self._target.reset()
62
-
63
- def consume(self) -> None:
64
- self._target.consume()
65
-
66
- def LA(self, offset: int) -> int:
67
- return self._target.LA(offset)
68
-
69
- def LT(self, offset: int) -> int:
70
- return self._target.LT(offset)
71
-
72
- def mark(self) -> int:
73
- return self._target.mark()
74
-
75
- def release(self, marker: int) -> None:
76
- return self._target.release(marker)
77
-
78
- def seek(self, _index: int) -> None:
79
- return self._target.seek(_index)
80
-
81
- def getText(self, start: int, stop: int) -> str:
82
- return self._target.getText(start, stop)
83
-
84
- def __str__(self) -> str:
85
- return str(self._target)
86
-
87
-
88
- ##
89
-
90
-
91
- class CaseInsensitiveInputStream(ProxyInputStream):
92
- def LA(self, offset: int) -> int:
93
- ret = super().LA(offset)
94
- if ret != -1:
95
- ret = ord(chr(ret).upper())
96
- return ret
@@ -1,54 +0,0 @@
1
- # ruff: noqa: N802 N803
2
- import typing as ta
3
-
4
- from ... import check
5
- from . import runtime as antlr4
6
- from .errors import SilentRaisingErrorListener
7
-
8
-
9
- LexerT = ta.TypeVar('LexerT', bound=antlr4.Lexer)
10
- ParserT = ta.TypeVar('ParserT', bound=antlr4.Parser)
11
-
12
-
13
- ##
14
-
15
-
16
- def is_eof_context(ctx: antlr4.ParserRuleContext) -> bool:
17
- return ctx.getChildCount() == 1 and ctx.getChild(0).getSymbol().type == antlr4.Token.EOF
18
-
19
-
20
- class StandardParseTreeVisitor(antlr4.ParseTreeVisitor):
21
- def visit(self, ctx: antlr4.ParserRuleContext):
22
- check.isinstance(ctx, antlr4.ParserRuleContext)
23
- return ctx.accept(self)
24
-
25
- def aggregateResult(self, aggregate, nextResult): # noqa
26
- if aggregate is not None:
27
- check.none(nextResult)
28
- return aggregate
29
- else:
30
- check.none(aggregate)
31
- return nextResult
32
-
33
-
34
- def make_parser(
35
- buf: str,
36
- lexer_cls: type[LexerT],
37
- parser_cls: type[ParserT],
38
- *,
39
- silent_errors: bool = False,
40
- ) -> ParserT:
41
- lexer = lexer_cls(antlr4.InputStream(buf))
42
- if silent_errors:
43
- lexer.removeErrorListeners()
44
- lexer.addErrorListener(SilentRaisingErrorListener())
45
-
46
- stream = antlr4.CommonTokenStream(lexer)
47
- stream.fill()
48
-
49
- parser = parser_cls(stream)
50
- if silent_errors:
51
- parser.removeErrorListeners()
52
- parser.addErrorListener(SilentRaisingErrorListener())
53
-
54
- return parser
@@ -1,102 +0,0 @@
1
- # ruff: noqa: I001
2
- # flake8: noqa: F401
3
-
4
- from ._runtime.BufferedTokenStream import ( # type: ignore
5
- TokenStream,
6
- )
7
-
8
- from ._runtime.CommonTokenStream import ( # type: ignore
9
- CommonTokenStream,
10
- )
11
-
12
- from ._runtime.FileStream import ( # type: ignore
13
- FileStream,
14
- )
15
-
16
- from ._runtime.InputStream import ( # type: ignore
17
- InputStream,
18
- )
19
-
20
- from ._runtime.Lexer import ( # type: ignore
21
- Lexer,
22
- )
23
-
24
- from ._runtime.Parser import ( # type: ignore
25
- Parser,
26
- )
27
-
28
- from ._runtime.ParserRuleContext import ( # type: ignore
29
- ParserRuleContext,
30
- RuleContext,
31
- )
32
-
33
- from ._runtime.PredictionContext import ( # type: ignore
34
- PredictionContextCache,
35
- )
36
-
37
- from ._runtime.StdinStream import ( # type: ignore
38
- StdinStream,
39
- )
40
-
41
- from ._runtime.Token import ( # type: ignore
42
- Token,
43
- )
44
-
45
- from ._runtime.Utils import ( # type: ignore
46
- str_list,
47
- )
48
-
49
- from ._runtime.atn.ATN import ( # type: ignore
50
- ATN,
51
- )
52
-
53
- from ._runtime.atn.ATNDeserializer import ( # type: ignore
54
- ATNDeserializer,
55
- )
56
-
57
- from ._runtime.atn.LexerATNSimulator import ( # type: ignore
58
- LexerATNSimulator,
59
- )
60
-
61
- from ._runtime.atn.ParserATNSimulator import ( # type: ignore
62
- ParserATNSimulator,
63
- )
64
-
65
- from ._runtime.atn.PredictionMode import ( # type: ignore
66
- PredictionMode,
67
- )
68
-
69
- from ._runtime.dfa.DFA import ( # type: ignore
70
- DFA,
71
- )
72
-
73
- from ._runtime.error.DiagnosticErrorListener import ( # type: ignore
74
- DiagnosticErrorListener,
75
- )
76
-
77
- from ._runtime.error.ErrorListener import ( # type: ignore
78
- ErrorListener,
79
- )
80
-
81
- from ._runtime.error.ErrorStrategy import ( # type: ignore
82
- BailErrorStrategy,
83
- )
84
-
85
- from ._runtime.error.Errors import ( # type: ignore
86
- LexerNoViableAltException,
87
- )
88
-
89
- from ._runtime.error.Errors import ( # type: ignore
90
- IllegalStateException,
91
- NoViableAltException,
92
- RecognitionException,
93
- )
94
-
95
- from ._runtime.tree.Tree import ( # type: ignore
96
- ErrorNode,
97
- ParseTreeListener,
98
- ParseTreeVisitor,
99
- ParseTreeWalker,
100
- RuleNode,
101
- TerminalNode,
102
- )
@@ -1,38 +0,0 @@
1
- # ruff: noqa: N802 N803
2
- import io
3
- import typing as ta
4
-
5
- from . import runtime as antlr4
6
-
7
-
8
- ##
9
-
10
-
11
- def pformat(
12
- node: ta.Any,
13
- *,
14
- buf: ta.IO | None = None,
15
- indent: str = '',
16
- child_indent: str = ' ',
17
- ) -> ta.IO:
18
- if buf is None:
19
- buf = io.StringIO()
20
- buf.write(indent)
21
- buf.write(node.__class__.__name__)
22
- if hasattr(node, 'start') and hasattr(node, 'stop'):
23
- buf.write(f' ({node.start} -> {node.stop})')
24
- buf.write('\n')
25
- for child in getattr(node, 'children', []) or []:
26
- pformat(child, buf=buf, indent=indent + child_indent, child_indent=child_indent)
27
- return buf
28
-
29
-
30
- def yield_contexts(
31
- root: antlr4.ParserRuleContext,
32
- ) -> ta.Iterator[antlr4.ParserRuleContext]:
33
- q = [root]
34
- while q:
35
- c = q.pop()
36
- yield c
37
- if not isinstance(c, antlr4.TerminalNode) and c.children:
38
- q.extend(c.children)