omlish 0.0.0.dev436__py3-none-any.whl → 0.0.0.dev438__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (89) hide show
  1. omlish/__about__.py +2 -2
  2. omlish/formats/json/stream/__init__.py +3 -1
  3. omlish/formats/json/stream/lexing.py +187 -42
  4. omlish/formats/json/stream/parsing.py +27 -5
  5. omlish/formats/json/stream/utils.py +106 -33
  6. omlish/formats/json5/literals.py +7 -4
  7. omlish/formats/json5/parsing.py +33 -79
  8. omlish/formats/json5/stream.py +45 -50
  9. omlish/http/all.py +59 -53
  10. {omlish-0.0.0.dev436.dist-info → omlish-0.0.0.dev438.dist-info}/METADATA +1 -1
  11. {omlish-0.0.0.dev436.dist-info → omlish-0.0.0.dev438.dist-info}/RECORD +15 -89
  12. omlish/formats/json5/Json5.g4 +0 -168
  13. omlish/formats/json5/_antlr/Json5Lexer.py +0 -354
  14. omlish/formats/json5/_antlr/Json5Listener.py +0 -79
  15. omlish/formats/json5/_antlr/Json5Parser.py +0 -617
  16. omlish/formats/json5/_antlr/Json5Visitor.py +0 -52
  17. omlish/formats/json5/_antlr/__init__.py +0 -0
  18. omlish/text/antlr/__init__.py +0 -3
  19. omlish/text/antlr/_runtime/BufferedTokenStream.py +0 -305
  20. omlish/text/antlr/_runtime/CommonTokenFactory.py +0 -64
  21. omlish/text/antlr/_runtime/CommonTokenStream.py +0 -90
  22. omlish/text/antlr/_runtime/FileStream.py +0 -30
  23. omlish/text/antlr/_runtime/InputStream.py +0 -90
  24. omlish/text/antlr/_runtime/IntervalSet.py +0 -183
  25. omlish/text/antlr/_runtime/LICENSE.txt +0 -28
  26. omlish/text/antlr/_runtime/LL1Analyzer.py +0 -176
  27. omlish/text/antlr/_runtime/Lexer.py +0 -332
  28. omlish/text/antlr/_runtime/ListTokenSource.py +0 -147
  29. omlish/text/antlr/_runtime/Parser.py +0 -583
  30. omlish/text/antlr/_runtime/ParserInterpreter.py +0 -173
  31. omlish/text/antlr/_runtime/ParserRuleContext.py +0 -189
  32. omlish/text/antlr/_runtime/PredictionContext.py +0 -632
  33. omlish/text/antlr/_runtime/Recognizer.py +0 -150
  34. omlish/text/antlr/_runtime/RuleContext.py +0 -230
  35. omlish/text/antlr/_runtime/StdinStream.py +0 -14
  36. omlish/text/antlr/_runtime/Token.py +0 -158
  37. omlish/text/antlr/_runtime/TokenStreamRewriter.py +0 -258
  38. omlish/text/antlr/_runtime/Utils.py +0 -36
  39. omlish/text/antlr/_runtime/__init__.py +0 -2
  40. omlish/text/antlr/_runtime/_all.py +0 -24
  41. omlish/text/antlr/_runtime/_pygrun.py +0 -174
  42. omlish/text/antlr/_runtime/atn/ATN.py +0 -135
  43. omlish/text/antlr/_runtime/atn/ATNConfig.py +0 -162
  44. omlish/text/antlr/_runtime/atn/ATNConfigSet.py +0 -215
  45. omlish/text/antlr/_runtime/atn/ATNDeserializationOptions.py +0 -27
  46. omlish/text/antlr/_runtime/atn/ATNDeserializer.py +0 -449
  47. omlish/text/antlr/_runtime/atn/ATNSimulator.py +0 -50
  48. omlish/text/antlr/_runtime/atn/ATNState.py +0 -267
  49. omlish/text/antlr/_runtime/atn/ATNType.py +0 -20
  50. omlish/text/antlr/_runtime/atn/LexerATNSimulator.py +0 -573
  51. omlish/text/antlr/_runtime/atn/LexerAction.py +0 -301
  52. omlish/text/antlr/_runtime/atn/LexerActionExecutor.py +0 -146
  53. omlish/text/antlr/_runtime/atn/ParserATNSimulator.py +0 -1664
  54. omlish/text/antlr/_runtime/atn/PredictionMode.py +0 -502
  55. omlish/text/antlr/_runtime/atn/SemanticContext.py +0 -333
  56. omlish/text/antlr/_runtime/atn/Transition.py +0 -271
  57. omlish/text/antlr/_runtime/atn/__init__.py +0 -4
  58. omlish/text/antlr/_runtime/dfa/DFA.py +0 -136
  59. omlish/text/antlr/_runtime/dfa/DFASerializer.py +0 -76
  60. omlish/text/antlr/_runtime/dfa/DFAState.py +0 -129
  61. omlish/text/antlr/_runtime/dfa/__init__.py +0 -4
  62. omlish/text/antlr/_runtime/error/DiagnosticErrorListener.py +0 -111
  63. omlish/text/antlr/_runtime/error/ErrorListener.py +0 -75
  64. omlish/text/antlr/_runtime/error/ErrorStrategy.py +0 -712
  65. omlish/text/antlr/_runtime/error/Errors.py +0 -176
  66. omlish/text/antlr/_runtime/error/__init__.py +0 -4
  67. omlish/text/antlr/_runtime/tree/Chunk.py +0 -33
  68. omlish/text/antlr/_runtime/tree/ParseTreeMatch.py +0 -121
  69. omlish/text/antlr/_runtime/tree/ParseTreePattern.py +0 -75
  70. omlish/text/antlr/_runtime/tree/ParseTreePatternMatcher.py +0 -377
  71. omlish/text/antlr/_runtime/tree/RuleTagToken.py +0 -53
  72. omlish/text/antlr/_runtime/tree/TokenTagToken.py +0 -50
  73. omlish/text/antlr/_runtime/tree/Tree.py +0 -194
  74. omlish/text/antlr/_runtime/tree/Trees.py +0 -114
  75. omlish/text/antlr/_runtime/tree/__init__.py +0 -2
  76. omlish/text/antlr/_runtime/xpath/XPath.py +0 -278
  77. omlish/text/antlr/_runtime/xpath/XPathLexer.py +0 -98
  78. omlish/text/antlr/_runtime/xpath/__init__.py +0 -4
  79. omlish/text/antlr/delimit.py +0 -109
  80. omlish/text/antlr/dot.py +0 -41
  81. omlish/text/antlr/errors.py +0 -14
  82. omlish/text/antlr/input.py +0 -96
  83. omlish/text/antlr/parsing.py +0 -54
  84. omlish/text/antlr/runtime.py +0 -102
  85. omlish/text/antlr/utils.py +0 -38
  86. {omlish-0.0.0.dev436.dist-info → omlish-0.0.0.dev438.dist-info}/WHEEL +0 -0
  87. {omlish-0.0.0.dev436.dist-info → omlish-0.0.0.dev438.dist-info}/entry_points.txt +0 -0
  88. {omlish-0.0.0.dev436.dist-info → omlish-0.0.0.dev438.dist-info}/licenses/LICENSE +0 -0
  89. {omlish-0.0.0.dev436.dist-info → omlish-0.0.0.dev438.dist-info}/top_level.txt +0 -0
@@ -1,183 +0,0 @@
1
- # type: ignore
2
- # ruff: noqa
3
- # flake8: noqa
4
- #
5
- # Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
6
- # Use of this file is governed by the BSD 3-clause license that
7
- # can be found in the LICENSE.txt file in the project root.
8
- #
9
-
10
- from io import StringIO
11
- from .Token import Token
12
-
13
- # need forward declarations
14
- IntervalSet = None
15
-
16
- class IntervalSet(object):
17
- __slots__ = ('intervals', 'readonly')
18
-
19
- def __init__(self):
20
- self.intervals = None
21
- self.readonly = False
22
-
23
- def __iter__(self):
24
- if self.intervals is not None:
25
- for i in self.intervals:
26
- for c in i:
27
- yield c
28
-
29
- def __getitem__(self, item):
30
- i = 0
31
- for k in self:
32
- if i==item:
33
- return k
34
- else:
35
- i += 1
36
- return Token.INVALID_TYPE
37
-
38
- def addOne(self, v:int):
39
- self.addRange(range(v, v+1))
40
-
41
- def addRange(self, v:range):
42
- if self.intervals is None:
43
- self.intervals = list()
44
- self.intervals.append(v)
45
- else:
46
- # find insert pos
47
- k = 0
48
- for i in self.intervals:
49
- # distinct range -> insert
50
- if v.stop<i.start:
51
- self.intervals.insert(k, v)
52
- return
53
- # contiguous range -> adjust
54
- elif v.stop==i.start:
55
- self.intervals[k] = range(v.start, i.stop)
56
- return
57
- # overlapping range -> adjust and reduce
58
- elif v.start<=i.stop:
59
- self.intervals[k] = range(min(i.start,v.start), max(i.stop,v.stop))
60
- self.reduce(k)
61
- return
62
- k += 1
63
- # greater than any existing
64
- self.intervals.append(v)
65
-
66
- def addSet(self, other:IntervalSet):
67
- if other.intervals is not None:
68
- for i in other.intervals:
69
- self.addRange(i)
70
- return self
71
-
72
- def reduce(self, k:int):
73
- # only need to reduce if k is not the last
74
- if k<len(self.intervals)-1:
75
- l = self.intervals[k]
76
- r = self.intervals[k+1]
77
- # if r contained in l
78
- if l.stop >= r.stop:
79
- self.intervals.pop(k+1)
80
- self.reduce(k)
81
- elif l.stop >= r.start:
82
- self.intervals[k] = range(l.start, r.stop)
83
- self.intervals.pop(k+1)
84
-
85
- def complement(self, start, stop):
86
- result = IntervalSet()
87
- result.addRange(range(start,stop+1))
88
- for i in self.intervals:
89
- result.removeRange(i)
90
- return result
91
-
92
- def __contains__(self, item):
93
- if self.intervals is None:
94
- return False
95
- else:
96
- return any(item in i for i in self.intervals)
97
-
98
- def __len__(self):
99
- return sum(len(i) for i in self.intervals)
100
-
101
- def removeRange(self, v):
102
- if v.start==v.stop-1:
103
- self.removeOne(v.start)
104
- elif self.intervals is not None:
105
- k = 0
106
- for i in self.intervals:
107
- # intervals are ordered
108
- if v.stop<=i.start:
109
- return
110
- # check for including range, split it
111
- elif v.start>i.start and v.stop<i.stop:
112
- self.intervals[k] = range(i.start, v.start)
113
- x = range(v.stop, i.stop)
114
- self.intervals.insert(k, x)
115
- return
116
- # check for included range, remove it
117
- elif v.start<=i.start and v.stop>=i.stop:
118
- self.intervals.pop(k)
119
- k -= 1 # need another pass
120
- # check for lower boundary
121
- elif v.start<i.stop:
122
- self.intervals[k] = range(i.start, v.start)
123
- # check for upper boundary
124
- elif v.stop<i.stop:
125
- self.intervals[k] = range(v.stop, i.stop)
126
- k += 1
127
-
128
- def removeOne(self, v):
129
- if self.intervals is not None:
130
- k = 0
131
- for i in self.intervals:
132
- # intervals is ordered
133
- if v<i.start:
134
- return
135
- # check for single value range
136
- elif v==i.start and v==i.stop-1:
137
- self.intervals.pop(k)
138
- return
139
- # check for lower boundary
140
- elif v==i.start:
141
- self.intervals[k] = range(i.start+1, i.stop)
142
- return
143
- # check for upper boundary
144
- elif v==i.stop-1:
145
- self.intervals[k] = range(i.start, i.stop-1)
146
- return
147
- # split existing range
148
- elif v<i.stop-1:
149
- x = range(i.start, v)
150
- self.intervals[k] = range(v + 1, i.stop)
151
- self.intervals.insert(k, x)
152
- return
153
- k += 1
154
-
155
-
156
- def toString(self, literalNames:list, symbolicNames:list):
157
- if self.intervals is None:
158
- return "{}"
159
- with StringIO() as buf:
160
- if len(self)>1:
161
- buf.write("{")
162
- first = True
163
- for i in self.intervals:
164
- for j in i:
165
- if not first:
166
- buf.write(", ")
167
- buf.write(self.elementName(literalNames, symbolicNames, j))
168
- first = False
169
- if len(self)>1:
170
- buf.write("}")
171
- return buf.getvalue()
172
-
173
- def elementName(self, literalNames:list, symbolicNames:list, a:int):
174
- if a==Token.EOF:
175
- return "<EOF>"
176
- elif a==Token.EPSILON:
177
- return "<EPSILON>"
178
- else:
179
- if a<len(literalNames) and literalNames[a] != "<INVALID>":
180
- return literalNames[a]
181
- if a<len(symbolicNames):
182
- return symbolicNames[a]
183
- return "<UNKNOWN>"
@@ -1,28 +0,0 @@
1
- Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
2
-
3
- Redistribution and use in source and binary forms, with or without
4
- modification, are permitted provided that the following conditions
5
- are met:
6
-
7
- 1. Redistributions of source code must retain the above copyright
8
- notice, this list of conditions and the following disclaimer.
9
-
10
- 2. Redistributions in binary form must reproduce the above copyright
11
- notice, this list of conditions and the following disclaimer in the
12
- documentation and/or other materials provided with the distribution.
13
-
14
- 3. Neither name of copyright holders nor the names of its contributors
15
- may be used to endorse or promote products derived from this software
16
- without specific prior written permission.
17
-
18
- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
19
- ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
20
- LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
21
- A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR
22
- CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
23
- EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
24
- PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
25
- PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
26
- LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
27
- NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
28
- SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
@@ -1,176 +0,0 @@
1
- # type: ignore
2
- # ruff: noqa
3
- # flake8: noqa
4
- #
5
- # Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
6
- # Use of this file is governed by the BSD 3-clause license that
7
- # can be found in the LICENSE.txt file in the project root.
8
- #/
9
- from .IntervalSet import IntervalSet
10
- from .Token import Token
11
- from .PredictionContext import PredictionContext, SingletonPredictionContext, PredictionContextFromRuleContext
12
- from .RuleContext import RuleContext
13
- from .atn.ATN import ATN
14
- from .atn.ATNConfig import ATNConfig
15
- from .atn.ATNState import ATNState, RuleStopState
16
- from .atn.Transition import WildcardTransition, NotSetTransition, AbstractPredicateTransition, RuleTransition
17
-
18
-
19
- class LL1Analyzer (object):
20
- __slots__ = 'atn'
21
-
22
- #* Special value added to the lookahead sets to indicate that we hit
23
- # a predicate during analysis if {@code seeThruPreds==false}.
24
- #/
25
- HIT_PRED = Token.INVALID_TYPE
26
-
27
- def __init__(self, atn:ATN):
28
- self.atn = atn
29
-
30
- #*
31
- # Calculates the SLL(1) expected lookahead set for each outgoing transition
32
- # of an {@link ATNState}. The returned array has one element for each
33
- # outgoing transition in {@code s}. If the closure from transition
34
- # <em>i</em> leads to a semantic predicate before matching a symbol, the
35
- # element at index <em>i</em> of the result will be {@code null}.
36
- #
37
- # @param s the ATN state
38
- # @return the expected symbols for each outgoing transition of {@code s}.
39
- #/
40
- def getDecisionLookahead(self, s:ATNState):
41
- if s is None:
42
- return None
43
-
44
- count = len(s.transitions)
45
- look = [] * count
46
- for alt in range(0, count):
47
- look[alt] = set()
48
- lookBusy = set()
49
- seeThruPreds = False # fail to get lookahead upon pred
50
- self._LOOK(s.transition(alt).target, None, PredictionContext.EMPTY,
51
- look[alt], lookBusy, set(), seeThruPreds, False)
52
- # Wipe out lookahead for this alternative if we found nothing
53
- # or we had a predicate when we !seeThruPreds
54
- if len(look[alt])==0 or self.HIT_PRED in look[alt]:
55
- look[alt] = None
56
- return look
57
-
58
- #*
59
- # Compute set of tokens that can follow {@code s} in the ATN in the
60
- # specified {@code ctx}.
61
- #
62
- # <p>If {@code ctx} is {@code null} and the end of the rule containing
63
- # {@code s} is reached, {@link Token#EPSILON} is added to the result set.
64
- # If {@code ctx} is not {@code null} and the end of the outermost rule is
65
- # reached, {@link Token#EOF} is added to the result set.</p>
66
- #
67
- # @param s the ATN state
68
- # @param stopState the ATN state to stop at. This can be a
69
- # {@link BlockEndState} to detect epsilon paths through a closure.
70
- # @param ctx the complete parser context, or {@code null} if the context
71
- # should be ignored
72
- #
73
- # @return The set of tokens that can follow {@code s} in the ATN in the
74
- # specified {@code ctx}.
75
- #/
76
- def LOOK(self, s:ATNState, stopState:ATNState=None, ctx:RuleContext=None):
77
- r = IntervalSet()
78
- seeThruPreds = True # ignore preds; get all lookahead
79
- lookContext = PredictionContextFromRuleContext(s.atn, ctx) if ctx is not None else None
80
- self._LOOK(s, stopState, lookContext, r, set(), set(), seeThruPreds, True)
81
- return r
82
-
83
- #*
84
- # Compute set of tokens that can follow {@code s} in the ATN in the
85
- # specified {@code ctx}.
86
- #
87
- # <p>If {@code ctx} is {@code null} and {@code stopState} or the end of the
88
- # rule containing {@code s} is reached, {@link Token#EPSILON} is added to
89
- # the result set. If {@code ctx} is not {@code null} and {@code addEOF} is
90
- # {@code true} and {@code stopState} or the end of the outermost rule is
91
- # reached, {@link Token#EOF} is added to the result set.</p>
92
- #
93
- # @param s the ATN state.
94
- # @param stopState the ATN state to stop at. This can be a
95
- # {@link BlockEndState} to detect epsilon paths through a closure.
96
- # @param ctx The outer context, or {@code null} if the outer context should
97
- # not be used.
98
- # @param look The result lookahead set.
99
- # @param lookBusy A set used for preventing epsilon closures in the ATN
100
- # from causing a stack overflow. Outside code should pass
101
- # {@code new HashSet<ATNConfig>} for this argument.
102
- # @param calledRuleStack A set used for preventing left recursion in the
103
- # ATN from causing a stack overflow. Outside code should pass
104
- # {@code new BitSet()} for this argument.
105
- # @param seeThruPreds {@code true} to true semantic predicates as
106
- # implicitly {@code true} and "see through them", otherwise {@code false}
107
- # to treat semantic predicates as opaque and add {@link #HIT_PRED} to the
108
- # result if one is encountered.
109
- # @param addEOF Add {@link Token#EOF} to the result if the end of the
110
- # outermost context is reached. This parameter has no effect if {@code ctx}
111
- # is {@code null}.
112
- #/
113
- def _LOOK(self, s:ATNState, stopState:ATNState , ctx:PredictionContext, look:IntervalSet, lookBusy:set,
114
- calledRuleStack:set, seeThruPreds:bool, addEOF:bool):
115
- c = ATNConfig(s, 0, ctx)
116
-
117
- if c in lookBusy:
118
- return
119
- lookBusy.add(c)
120
-
121
- if s == stopState:
122
- if ctx is None:
123
- look.addOne(Token.EPSILON)
124
- return
125
- elif ctx.isEmpty() and addEOF:
126
- look.addOne(Token.EOF)
127
- return
128
-
129
- if isinstance(s, RuleStopState ):
130
- if ctx is None:
131
- look.addOne(Token.EPSILON)
132
- return
133
- elif ctx.isEmpty() and addEOF:
134
- look.addOne(Token.EOF)
135
- return
136
-
137
- if ctx != PredictionContext.EMPTY:
138
- removed = s.ruleIndex in calledRuleStack
139
- try:
140
- calledRuleStack.discard(s.ruleIndex)
141
- # run thru all possible stack tops in ctx
142
- for i in range(0, len(ctx)):
143
- returnState = self.atn.states[ctx.getReturnState(i)]
144
- self._LOOK(returnState, stopState, ctx.getParent(i), look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
145
- finally:
146
- if removed:
147
- calledRuleStack.add(s.ruleIndex)
148
- return
149
-
150
- for t in s.transitions:
151
- if type(t) == RuleTransition:
152
- if t.target.ruleIndex in calledRuleStack:
153
- continue
154
-
155
- newContext = SingletonPredictionContext.create(ctx, t.followState.stateNumber)
156
-
157
- try:
158
- calledRuleStack.add(t.target.ruleIndex)
159
- self._LOOK(t.target, stopState, newContext, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
160
- finally:
161
- calledRuleStack.remove(t.target.ruleIndex)
162
- elif isinstance(t, AbstractPredicateTransition ):
163
- if seeThruPreds:
164
- self._LOOK(t.target, stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
165
- else:
166
- look.addOne(self.HIT_PRED)
167
- elif t.isEpsilon:
168
- self._LOOK(t.target, stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
169
- elif type(t) == WildcardTransition:
170
- look.addRange( range(Token.MIN_USER_TOKEN_TYPE, self.atn.maxTokenType + 1) )
171
- else:
172
- set_ = t.label
173
- if set_ is not None:
174
- if isinstance(t, NotSetTransition):
175
- set_ = set_.complement(Token.MIN_USER_TOKEN_TYPE, self.atn.maxTokenType)
176
- look.addSet(set_)