omlish 0.0.0.dev437__py3-none-any.whl → 0.0.0.dev439__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (102) hide show
  1. omlish/__about__.py +2 -2
  2. omlish/dataclasses/impl/api/classes/make.py +1 -1
  3. omlish/dataclasses/tools/static.py +1 -1
  4. omlish/formats/json/stream/__init__.py +5 -3
  5. omlish/formats/json/stream/building.py +2 -2
  6. omlish/formats/json/stream/lexing.py +187 -42
  7. omlish/formats/json/stream/parsing.py +31 -9
  8. omlish/formats/json/stream/rendering.py +6 -6
  9. omlish/formats/json/stream/utils.py +106 -33
  10. omlish/formats/json5/literals.py +7 -4
  11. omlish/formats/json5/parsing.py +33 -79
  12. omlish/formats/json5/stream.py +45 -50
  13. omlish/http/all.py +59 -53
  14. omlish/inject/__init__.py +1 -0
  15. omlish/iterators/__init__.py +2 -0
  16. omlish/iterators/transforms.py +204 -0
  17. omlish/lang/classes/bindable.py +1 -1
  18. omlish/lang/classes/restrict.py +8 -0
  19. omlish/lite/inject.py +1 -0
  20. omlish/lite/marshal.py +1 -0
  21. omlish/reflect/types.py +2 -2
  22. omlish/sql/queries/_marshal.py +1 -1
  23. {omlish-0.0.0.dev437.dist-info → omlish-0.0.0.dev439.dist-info}/METADATA +2 -2
  24. {omlish-0.0.0.dev437.dist-info → omlish-0.0.0.dev439.dist-info}/RECORD +28 -101
  25. omlish/formats/json5/Json5.g4 +0 -168
  26. omlish/formats/json5/_antlr/Json5Lexer.py +0 -354
  27. omlish/formats/json5/_antlr/Json5Listener.py +0 -79
  28. omlish/formats/json5/_antlr/Json5Parser.py +0 -617
  29. omlish/formats/json5/_antlr/Json5Visitor.py +0 -52
  30. omlish/formats/json5/_antlr/__init__.py +0 -0
  31. omlish/text/antlr/__init__.py +0 -3
  32. omlish/text/antlr/_runtime/BufferedTokenStream.py +0 -305
  33. omlish/text/antlr/_runtime/CommonTokenFactory.py +0 -64
  34. omlish/text/antlr/_runtime/CommonTokenStream.py +0 -90
  35. omlish/text/antlr/_runtime/FileStream.py +0 -30
  36. omlish/text/antlr/_runtime/InputStream.py +0 -90
  37. omlish/text/antlr/_runtime/IntervalSet.py +0 -183
  38. omlish/text/antlr/_runtime/LICENSE.txt +0 -28
  39. omlish/text/antlr/_runtime/LL1Analyzer.py +0 -176
  40. omlish/text/antlr/_runtime/Lexer.py +0 -332
  41. omlish/text/antlr/_runtime/ListTokenSource.py +0 -147
  42. omlish/text/antlr/_runtime/Parser.py +0 -583
  43. omlish/text/antlr/_runtime/ParserInterpreter.py +0 -173
  44. omlish/text/antlr/_runtime/ParserRuleContext.py +0 -189
  45. omlish/text/antlr/_runtime/PredictionContext.py +0 -632
  46. omlish/text/antlr/_runtime/Recognizer.py +0 -150
  47. omlish/text/antlr/_runtime/RuleContext.py +0 -230
  48. omlish/text/antlr/_runtime/StdinStream.py +0 -14
  49. omlish/text/antlr/_runtime/Token.py +0 -158
  50. omlish/text/antlr/_runtime/TokenStreamRewriter.py +0 -258
  51. omlish/text/antlr/_runtime/Utils.py +0 -36
  52. omlish/text/antlr/_runtime/__init__.py +0 -2
  53. omlish/text/antlr/_runtime/_all.py +0 -24
  54. omlish/text/antlr/_runtime/_pygrun.py +0 -174
  55. omlish/text/antlr/_runtime/atn/ATN.py +0 -135
  56. omlish/text/antlr/_runtime/atn/ATNConfig.py +0 -162
  57. omlish/text/antlr/_runtime/atn/ATNConfigSet.py +0 -215
  58. omlish/text/antlr/_runtime/atn/ATNDeserializationOptions.py +0 -27
  59. omlish/text/antlr/_runtime/atn/ATNDeserializer.py +0 -449
  60. omlish/text/antlr/_runtime/atn/ATNSimulator.py +0 -50
  61. omlish/text/antlr/_runtime/atn/ATNState.py +0 -267
  62. omlish/text/antlr/_runtime/atn/ATNType.py +0 -20
  63. omlish/text/antlr/_runtime/atn/LexerATNSimulator.py +0 -573
  64. omlish/text/antlr/_runtime/atn/LexerAction.py +0 -301
  65. omlish/text/antlr/_runtime/atn/LexerActionExecutor.py +0 -146
  66. omlish/text/antlr/_runtime/atn/ParserATNSimulator.py +0 -1664
  67. omlish/text/antlr/_runtime/atn/PredictionMode.py +0 -502
  68. omlish/text/antlr/_runtime/atn/SemanticContext.py +0 -333
  69. omlish/text/antlr/_runtime/atn/Transition.py +0 -271
  70. omlish/text/antlr/_runtime/atn/__init__.py +0 -4
  71. omlish/text/antlr/_runtime/dfa/DFA.py +0 -136
  72. omlish/text/antlr/_runtime/dfa/DFASerializer.py +0 -76
  73. omlish/text/antlr/_runtime/dfa/DFAState.py +0 -129
  74. omlish/text/antlr/_runtime/dfa/__init__.py +0 -4
  75. omlish/text/antlr/_runtime/error/DiagnosticErrorListener.py +0 -111
  76. omlish/text/antlr/_runtime/error/ErrorListener.py +0 -75
  77. omlish/text/antlr/_runtime/error/ErrorStrategy.py +0 -712
  78. omlish/text/antlr/_runtime/error/Errors.py +0 -176
  79. omlish/text/antlr/_runtime/error/__init__.py +0 -4
  80. omlish/text/antlr/_runtime/tree/Chunk.py +0 -33
  81. omlish/text/antlr/_runtime/tree/ParseTreeMatch.py +0 -121
  82. omlish/text/antlr/_runtime/tree/ParseTreePattern.py +0 -75
  83. omlish/text/antlr/_runtime/tree/ParseTreePatternMatcher.py +0 -377
  84. omlish/text/antlr/_runtime/tree/RuleTagToken.py +0 -53
  85. omlish/text/antlr/_runtime/tree/TokenTagToken.py +0 -50
  86. omlish/text/antlr/_runtime/tree/Tree.py +0 -194
  87. omlish/text/antlr/_runtime/tree/Trees.py +0 -114
  88. omlish/text/antlr/_runtime/tree/__init__.py +0 -2
  89. omlish/text/antlr/_runtime/xpath/XPath.py +0 -278
  90. omlish/text/antlr/_runtime/xpath/XPathLexer.py +0 -98
  91. omlish/text/antlr/_runtime/xpath/__init__.py +0 -4
  92. omlish/text/antlr/delimit.py +0 -109
  93. omlish/text/antlr/dot.py +0 -41
  94. omlish/text/antlr/errors.py +0 -14
  95. omlish/text/antlr/input.py +0 -96
  96. omlish/text/antlr/parsing.py +0 -54
  97. omlish/text/antlr/runtime.py +0 -102
  98. omlish/text/antlr/utils.py +0 -38
  99. {omlish-0.0.0.dev437.dist-info → omlish-0.0.0.dev439.dist-info}/WHEEL +0 -0
  100. {omlish-0.0.0.dev437.dist-info → omlish-0.0.0.dev439.dist-info}/entry_points.txt +0 -0
  101. {omlish-0.0.0.dev437.dist-info → omlish-0.0.0.dev439.dist-info}/licenses/LICENSE +0 -0
  102. {omlish-0.0.0.dev437.dist-info → omlish-0.0.0.dev439.dist-info}/top_level.txt +0 -0
@@ -1,96 +0,0 @@
1
- # ruff: noqa: N802
2
- import typing as ta
3
-
4
-
5
- ##
6
-
7
-
8
- # class InputStream(ta.Protocol):
9
- #
10
- # @property
11
- # def index(self) -> int: ...
12
- #
13
- # @property
14
- # def size(self) -> int: ...
15
- #
16
- # # Reset the stream so that it's in the same state it was when the object was created *except* the data array is not
17
- # # touched.
18
- # def reset(self) -> None: ...
19
- #
20
- # def consume(self) -> None: ...
21
- #
22
- # def LA(self, offset: int) -> int: ...
23
- #
24
- # def LT(self, offset: int) -> int: ...
25
- #
26
- # def mark(self) -> int: ...
27
- #
28
- # def release(self, marker: int) -> None: ...
29
- #
30
- # # consume() ahead until p==_index; can't just set p=_index as we must update line and column. If we seek backwards,
31
- # # just set p
32
- # def seek(self, _index: int) -> None: ...
33
- #
34
- # def getText(self, start: int, stop: int) -> str: ...
35
- #
36
- # def __str__(self) -> str: ...
37
-
38
-
39
- InputStream: ta.TypeAlias = ta.Any
40
-
41
-
42
- ##
43
-
44
-
45
- # @lang.protocol_check(InputStream)
46
- class ProxyInputStream:
47
- def __init__(self, target: InputStream) -> None:
48
- super().__init__()
49
-
50
- self._target = target
51
-
52
- @property
53
- def index(self) -> int:
54
- return self._target.index
55
-
56
- @property
57
- def size(self) -> int:
58
- return self._target.size
59
-
60
- def reset(self) -> None:
61
- self._target.reset()
62
-
63
- def consume(self) -> None:
64
- self._target.consume()
65
-
66
- def LA(self, offset: int) -> int:
67
- return self._target.LA(offset)
68
-
69
- def LT(self, offset: int) -> int:
70
- return self._target.LT(offset)
71
-
72
- def mark(self) -> int:
73
- return self._target.mark()
74
-
75
- def release(self, marker: int) -> None:
76
- return self._target.release(marker)
77
-
78
- def seek(self, _index: int) -> None:
79
- return self._target.seek(_index)
80
-
81
- def getText(self, start: int, stop: int) -> str:
82
- return self._target.getText(start, stop)
83
-
84
- def __str__(self) -> str:
85
- return str(self._target)
86
-
87
-
88
- ##
89
-
90
-
91
- class CaseInsensitiveInputStream(ProxyInputStream):
92
- def LA(self, offset: int) -> int:
93
- ret = super().LA(offset)
94
- if ret != -1:
95
- ret = ord(chr(ret).upper())
96
- return ret
@@ -1,54 +0,0 @@
1
- # ruff: noqa: N802 N803
2
- import typing as ta
3
-
4
- from ... import check
5
- from . import runtime as antlr4
6
- from .errors import SilentRaisingErrorListener
7
-
8
-
9
- LexerT = ta.TypeVar('LexerT', bound=antlr4.Lexer)
10
- ParserT = ta.TypeVar('ParserT', bound=antlr4.Parser)
11
-
12
-
13
- ##
14
-
15
-
16
- def is_eof_context(ctx: antlr4.ParserRuleContext) -> bool:
17
- return ctx.getChildCount() == 1 and ctx.getChild(0).getSymbol().type == antlr4.Token.EOF
18
-
19
-
20
- class StandardParseTreeVisitor(antlr4.ParseTreeVisitor):
21
- def visit(self, ctx: antlr4.ParserRuleContext):
22
- check.isinstance(ctx, antlr4.ParserRuleContext)
23
- return ctx.accept(self)
24
-
25
- def aggregateResult(self, aggregate, nextResult): # noqa
26
- if aggregate is not None:
27
- check.none(nextResult)
28
- return aggregate
29
- else:
30
- check.none(aggregate)
31
- return nextResult
32
-
33
-
34
- def make_parser(
35
- buf: str,
36
- lexer_cls: type[LexerT],
37
- parser_cls: type[ParserT],
38
- *,
39
- silent_errors: bool = False,
40
- ) -> ParserT:
41
- lexer = lexer_cls(antlr4.InputStream(buf))
42
- if silent_errors:
43
- lexer.removeErrorListeners()
44
- lexer.addErrorListener(SilentRaisingErrorListener())
45
-
46
- stream = antlr4.CommonTokenStream(lexer)
47
- stream.fill()
48
-
49
- parser = parser_cls(stream)
50
- if silent_errors:
51
- parser.removeErrorListeners()
52
- parser.addErrorListener(SilentRaisingErrorListener())
53
-
54
- return parser
@@ -1,102 +0,0 @@
1
- # ruff: noqa: I001
2
- # flake8: noqa: F401
3
-
4
- from ._runtime.BufferedTokenStream import ( # type: ignore
5
- TokenStream,
6
- )
7
-
8
- from ._runtime.CommonTokenStream import ( # type: ignore
9
- CommonTokenStream,
10
- )
11
-
12
- from ._runtime.FileStream import ( # type: ignore
13
- FileStream,
14
- )
15
-
16
- from ._runtime.InputStream import ( # type: ignore
17
- InputStream,
18
- )
19
-
20
- from ._runtime.Lexer import ( # type: ignore
21
- Lexer,
22
- )
23
-
24
- from ._runtime.Parser import ( # type: ignore
25
- Parser,
26
- )
27
-
28
- from ._runtime.ParserRuleContext import ( # type: ignore
29
- ParserRuleContext,
30
- RuleContext,
31
- )
32
-
33
- from ._runtime.PredictionContext import ( # type: ignore
34
- PredictionContextCache,
35
- )
36
-
37
- from ._runtime.StdinStream import ( # type: ignore
38
- StdinStream,
39
- )
40
-
41
- from ._runtime.Token import ( # type: ignore
42
- Token,
43
- )
44
-
45
- from ._runtime.Utils import ( # type: ignore
46
- str_list,
47
- )
48
-
49
- from ._runtime.atn.ATN import ( # type: ignore
50
- ATN,
51
- )
52
-
53
- from ._runtime.atn.ATNDeserializer import ( # type: ignore
54
- ATNDeserializer,
55
- )
56
-
57
- from ._runtime.atn.LexerATNSimulator import ( # type: ignore
58
- LexerATNSimulator,
59
- )
60
-
61
- from ._runtime.atn.ParserATNSimulator import ( # type: ignore
62
- ParserATNSimulator,
63
- )
64
-
65
- from ._runtime.atn.PredictionMode import ( # type: ignore
66
- PredictionMode,
67
- )
68
-
69
- from ._runtime.dfa.DFA import ( # type: ignore
70
- DFA,
71
- )
72
-
73
- from ._runtime.error.DiagnosticErrorListener import ( # type: ignore
74
- DiagnosticErrorListener,
75
- )
76
-
77
- from ._runtime.error.ErrorListener import ( # type: ignore
78
- ErrorListener,
79
- )
80
-
81
- from ._runtime.error.ErrorStrategy import ( # type: ignore
82
- BailErrorStrategy,
83
- )
84
-
85
- from ._runtime.error.Errors import ( # type: ignore
86
- LexerNoViableAltException,
87
- )
88
-
89
- from ._runtime.error.Errors import ( # type: ignore
90
- IllegalStateException,
91
- NoViableAltException,
92
- RecognitionException,
93
- )
94
-
95
- from ._runtime.tree.Tree import ( # type: ignore
96
- ErrorNode,
97
- ParseTreeListener,
98
- ParseTreeVisitor,
99
- ParseTreeWalker,
100
- RuleNode,
101
- TerminalNode,
102
- )
@@ -1,38 +0,0 @@
1
- # ruff: noqa: N802 N803
2
- import io
3
- import typing as ta
4
-
5
- from . import runtime as antlr4
6
-
7
-
8
- ##
9
-
10
-
11
- def pformat(
12
- node: ta.Any,
13
- *,
14
- buf: ta.IO | None = None,
15
- indent: str = '',
16
- child_indent: str = ' ',
17
- ) -> ta.IO:
18
- if buf is None:
19
- buf = io.StringIO()
20
- buf.write(indent)
21
- buf.write(node.__class__.__name__)
22
- if hasattr(node, 'start') and hasattr(node, 'stop'):
23
- buf.write(f' ({node.start} -> {node.stop})')
24
- buf.write('\n')
25
- for child in getattr(node, 'children', []) or []:
26
- pformat(child, buf=buf, indent=indent + child_indent, child_indent=child_indent)
27
- return buf
28
-
29
-
30
- def yield_contexts(
31
- root: antlr4.ParserRuleContext,
32
- ) -> ta.Iterator[antlr4.ParserRuleContext]:
33
- q = [root]
34
- while q:
35
- c = q.pop()
36
- yield c
37
- if not isinstance(c, antlr4.TerminalNode) and c.children:
38
- q.extend(c.children)