omlish 0.0.0.dev359__py3-none-any.whl → 0.0.0.dev361__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of omlish might be problematic. Click here for more details.
- omlish/__about__.py +2 -2
- omlish/formats/json/Json.g4 +77 -0
- omlish/formats/json/_antlr/JsonLexer.py +109 -0
- omlish/formats/json/_antlr/JsonListener.py +61 -0
- omlish/formats/json/_antlr/JsonParser.py +457 -0
- omlish/formats/json/_antlr/JsonVisitor.py +42 -0
- omlish/formats/json/_antlr/__init__.py +0 -0
- omlish/formats/json/literals.py +27 -13
- omlish/formats/json/stream/__init__.py +57 -0
- omlish/formats/json/stream/building.py +1 -1
- omlish/formats/json/stream/utils.py +14 -6
- omlish/formats/json5/Json5.g4 +8 -9
- omlish/formats/json5/_antlr/Json5Lexer.py +1 -0
- omlish/formats/json5/_antlr/Json5Listener.py +1 -0
- omlish/formats/json5/_antlr/Json5Parser.py +1 -0
- omlish/formats/json5/_antlr/Json5Visitor.py +1 -0
- omlish/formats/json5/parsing.py +30 -8
- omlish/formats/json5/rendering.py +118 -10
- omlish/lang/__init__.py +1 -0
- omlish/lang/classes/namespaces.py +8 -3
- omlish/lang/strings.py +15 -0
- omlish/specs/jmespath/__init__.py +1 -0
- omlish/specs/jmespath/ast.py +17 -0
- omlish/specs/jmespath/cli.py +3 -0
- omlish/specs/jmespath/errors.py +3 -0
- omlish/specs/jmespath/lexer.py +4 -0
- omlish/specs/jmespath/parser.py +13 -2
- omlish/specs/jmespath/scope.py +3 -0
- omlish/specs/jmespath/visitor.py +9 -0
- omlish/specs/proto/_antlr/Protobuf3Lexer.py +1 -0
- omlish/specs/proto/_antlr/Protobuf3Listener.py +1 -0
- omlish/specs/proto/_antlr/Protobuf3Parser.py +1 -0
- omlish/specs/proto/_antlr/Protobuf3Visitor.py +1 -0
- omlish/sql/parsing/_antlr/MinisqlLexer.py +1 -0
- omlish/sql/parsing/_antlr/MinisqlListener.py +1 -0
- omlish/sql/parsing/_antlr/MinisqlParser.py +1 -0
- omlish/sql/parsing/_antlr/MinisqlVisitor.py +1 -0
- {omlish-0.0.0.dev359.dist-info → omlish-0.0.0.dev361.dist-info}/METADATA +1 -1
- {omlish-0.0.0.dev359.dist-info → omlish-0.0.0.dev361.dist-info}/RECORD +43 -37
- {omlish-0.0.0.dev359.dist-info → omlish-0.0.0.dev361.dist-info}/WHEEL +0 -0
- {omlish-0.0.0.dev359.dist-info → omlish-0.0.0.dev361.dist-info}/entry_points.txt +0 -0
- {omlish-0.0.0.dev359.dist-info → omlish-0.0.0.dev361.dist-info}/licenses/LICENSE +0 -0
- {omlish-0.0.0.dev359.dist-info → omlish-0.0.0.dev361.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,457 @@
|
|
|
1
|
+
# type: ignore
|
|
2
|
+
# ruff: noqa
|
|
3
|
+
# flake8: noqa
|
|
4
|
+
# @omlish-generated
|
|
5
|
+
# Generated from Json.g4 by ANTLR 4.13.2
|
|
6
|
+
# encoding: utf-8
|
|
7
|
+
from ....text.antlr._runtime._all import *
|
|
8
|
+
from io import StringIO
|
|
9
|
+
import sys
|
|
10
|
+
if sys.version_info[1] > 5:
|
|
11
|
+
from typing import TextIO
|
|
12
|
+
else:
|
|
13
|
+
from typing.io import TextIO
|
|
14
|
+
|
|
15
|
+
def serializedATN():
|
|
16
|
+
return [
|
|
17
|
+
4,1,12,57,2,0,7,0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,1,0,1,0,1,0,1,
|
|
18
|
+
1,1,1,1,1,1,1,5,1,18,8,1,10,1,12,1,21,9,1,1,1,1,1,1,1,1,1,3,1,27,
|
|
19
|
+
8,1,1,2,1,2,1,2,1,2,1,3,1,3,1,3,1,3,5,3,37,8,3,10,3,12,3,40,9,3,
|
|
20
|
+
1,3,1,3,1,3,1,3,3,3,46,8,3,1,4,1,4,1,4,1,4,1,4,1,4,1,4,3,4,55,8,
|
|
21
|
+
4,1,4,0,0,5,0,2,4,6,8,0,0,61,0,10,1,0,0,0,2,26,1,0,0,0,4,28,1,0,
|
|
22
|
+
0,0,6,45,1,0,0,0,8,54,1,0,0,0,10,11,3,8,4,0,11,12,5,0,0,1,12,1,1,
|
|
23
|
+
0,0,0,13,14,5,1,0,0,14,19,3,4,2,0,15,16,5,2,0,0,16,18,3,4,2,0,17,
|
|
24
|
+
15,1,0,0,0,18,21,1,0,0,0,19,17,1,0,0,0,19,20,1,0,0,0,20,22,1,0,0,
|
|
25
|
+
0,21,19,1,0,0,0,22,23,5,3,0,0,23,27,1,0,0,0,24,25,5,1,0,0,25,27,
|
|
26
|
+
5,3,0,0,26,13,1,0,0,0,26,24,1,0,0,0,27,3,1,0,0,0,28,29,5,10,0,0,
|
|
27
|
+
29,30,5,4,0,0,30,31,3,8,4,0,31,5,1,0,0,0,32,33,5,5,0,0,33,38,3,8,
|
|
28
|
+
4,0,34,35,5,2,0,0,35,37,3,8,4,0,36,34,1,0,0,0,37,40,1,0,0,0,38,36,
|
|
29
|
+
1,0,0,0,38,39,1,0,0,0,39,41,1,0,0,0,40,38,1,0,0,0,41,42,5,6,0,0,
|
|
30
|
+
42,46,1,0,0,0,43,44,5,5,0,0,44,46,5,6,0,0,45,32,1,0,0,0,45,43,1,
|
|
31
|
+
0,0,0,46,7,1,0,0,0,47,55,5,10,0,0,48,55,5,11,0,0,49,55,3,2,1,0,50,
|
|
32
|
+
55,3,6,3,0,51,55,5,7,0,0,52,55,5,8,0,0,53,55,5,9,0,0,54,47,1,0,0,
|
|
33
|
+
0,54,48,1,0,0,0,54,49,1,0,0,0,54,50,1,0,0,0,54,51,1,0,0,0,54,52,
|
|
34
|
+
1,0,0,0,54,53,1,0,0,0,55,9,1,0,0,0,5,19,26,38,45,54
|
|
35
|
+
]
|
|
36
|
+
|
|
37
|
+
class JsonParser ( Parser ):
|
|
38
|
+
|
|
39
|
+
grammarFileName = "Json.g4"
|
|
40
|
+
|
|
41
|
+
atn = ATNDeserializer().deserialize(serializedATN())
|
|
42
|
+
|
|
43
|
+
decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
|
|
44
|
+
|
|
45
|
+
sharedContextCache = PredictionContextCache()
|
|
46
|
+
|
|
47
|
+
literalNames = [ "<INVALID>", "'{'", "','", "'}'", "':'", "'['", "']'",
|
|
48
|
+
"'true'", "'false'", "'null'" ]
|
|
49
|
+
|
|
50
|
+
symbolicNames = [ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
|
|
51
|
+
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
|
|
52
|
+
"<INVALID>", "<INVALID>", "STRING", "NUMBER", "WS" ]
|
|
53
|
+
|
|
54
|
+
RULE_json = 0
|
|
55
|
+
RULE_obj = 1
|
|
56
|
+
RULE_pair = 2
|
|
57
|
+
RULE_arr = 3
|
|
58
|
+
RULE_value = 4
|
|
59
|
+
|
|
60
|
+
ruleNames = [ "json", "obj", "pair", "arr", "value" ]
|
|
61
|
+
|
|
62
|
+
EOF = Token.EOF
|
|
63
|
+
T__0=1
|
|
64
|
+
T__1=2
|
|
65
|
+
T__2=3
|
|
66
|
+
T__3=4
|
|
67
|
+
T__4=5
|
|
68
|
+
T__5=6
|
|
69
|
+
T__6=7
|
|
70
|
+
T__7=8
|
|
71
|
+
T__8=9
|
|
72
|
+
STRING=10
|
|
73
|
+
NUMBER=11
|
|
74
|
+
WS=12
|
|
75
|
+
|
|
76
|
+
def __init__(self, input:TokenStream, output:TextIO = sys.stdout):
|
|
77
|
+
super().__init__(input, output)
|
|
78
|
+
self.checkVersion("4.13.2")
|
|
79
|
+
self._interp = ParserATNSimulator(self, self.atn, self.decisionsToDFA, self.sharedContextCache)
|
|
80
|
+
self._predicates = None
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
class JsonContext(ParserRuleContext):
|
|
86
|
+
__slots__ = 'parser'
|
|
87
|
+
|
|
88
|
+
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
|
|
89
|
+
super().__init__(parent, invokingState)
|
|
90
|
+
self.parser = parser
|
|
91
|
+
|
|
92
|
+
def value(self):
|
|
93
|
+
return self.getTypedRuleContext(JsonParser.ValueContext,0)
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
def EOF(self):
|
|
97
|
+
return self.getToken(JsonParser.EOF, 0)
|
|
98
|
+
|
|
99
|
+
def getRuleIndex(self):
|
|
100
|
+
return JsonParser.RULE_json
|
|
101
|
+
|
|
102
|
+
def enterRule(self, listener:ParseTreeListener):
|
|
103
|
+
if hasattr( listener, "enterJson" ):
|
|
104
|
+
listener.enterJson(self)
|
|
105
|
+
|
|
106
|
+
def exitRule(self, listener:ParseTreeListener):
|
|
107
|
+
if hasattr( listener, "exitJson" ):
|
|
108
|
+
listener.exitJson(self)
|
|
109
|
+
|
|
110
|
+
def accept(self, visitor:ParseTreeVisitor):
|
|
111
|
+
if hasattr( visitor, "visitJson" ):
|
|
112
|
+
return visitor.visitJson(self)
|
|
113
|
+
else:
|
|
114
|
+
return visitor.visitChildren(self)
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
def json(self):
|
|
120
|
+
|
|
121
|
+
localctx = JsonParser.JsonContext(self, self._ctx, self.state)
|
|
122
|
+
self.enterRule(localctx, 0, self.RULE_json)
|
|
123
|
+
try:
|
|
124
|
+
self.enterOuterAlt(localctx, 1)
|
|
125
|
+
self.state = 10
|
|
126
|
+
self.value()
|
|
127
|
+
self.state = 11
|
|
128
|
+
self.match(JsonParser.EOF)
|
|
129
|
+
except RecognitionException as re:
|
|
130
|
+
localctx.exception = re
|
|
131
|
+
self._errHandler.reportError(self, re)
|
|
132
|
+
self._errHandler.recover(self, re)
|
|
133
|
+
finally:
|
|
134
|
+
self.exitRule()
|
|
135
|
+
return localctx
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
class ObjContext(ParserRuleContext):
|
|
139
|
+
__slots__ = 'parser'
|
|
140
|
+
|
|
141
|
+
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
|
|
142
|
+
super().__init__(parent, invokingState)
|
|
143
|
+
self.parser = parser
|
|
144
|
+
|
|
145
|
+
def pair(self, i:int=None):
|
|
146
|
+
if i is None:
|
|
147
|
+
return self.getTypedRuleContexts(JsonParser.PairContext)
|
|
148
|
+
else:
|
|
149
|
+
return self.getTypedRuleContext(JsonParser.PairContext,i)
|
|
150
|
+
|
|
151
|
+
|
|
152
|
+
def getRuleIndex(self):
|
|
153
|
+
return JsonParser.RULE_obj
|
|
154
|
+
|
|
155
|
+
def enterRule(self, listener:ParseTreeListener):
|
|
156
|
+
if hasattr( listener, "enterObj" ):
|
|
157
|
+
listener.enterObj(self)
|
|
158
|
+
|
|
159
|
+
def exitRule(self, listener:ParseTreeListener):
|
|
160
|
+
if hasattr( listener, "exitObj" ):
|
|
161
|
+
listener.exitObj(self)
|
|
162
|
+
|
|
163
|
+
def accept(self, visitor:ParseTreeVisitor):
|
|
164
|
+
if hasattr( visitor, "visitObj" ):
|
|
165
|
+
return visitor.visitObj(self)
|
|
166
|
+
else:
|
|
167
|
+
return visitor.visitChildren(self)
|
|
168
|
+
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
def obj(self):
|
|
173
|
+
|
|
174
|
+
localctx = JsonParser.ObjContext(self, self._ctx, self.state)
|
|
175
|
+
self.enterRule(localctx, 2, self.RULE_obj)
|
|
176
|
+
self._la = 0 # Token type
|
|
177
|
+
try:
|
|
178
|
+
self.state = 26
|
|
179
|
+
self._errHandler.sync(self)
|
|
180
|
+
la_ = self._interp.adaptivePredict(self._input,1,self._ctx)
|
|
181
|
+
if la_ == 1:
|
|
182
|
+
self.enterOuterAlt(localctx, 1)
|
|
183
|
+
self.state = 13
|
|
184
|
+
self.match(JsonParser.T__0)
|
|
185
|
+
self.state = 14
|
|
186
|
+
self.pair()
|
|
187
|
+
self.state = 19
|
|
188
|
+
self._errHandler.sync(self)
|
|
189
|
+
_la = self._input.LA(1)
|
|
190
|
+
while _la==2:
|
|
191
|
+
self.state = 15
|
|
192
|
+
self.match(JsonParser.T__1)
|
|
193
|
+
self.state = 16
|
|
194
|
+
self.pair()
|
|
195
|
+
self.state = 21
|
|
196
|
+
self._errHandler.sync(self)
|
|
197
|
+
_la = self._input.LA(1)
|
|
198
|
+
|
|
199
|
+
self.state = 22
|
|
200
|
+
self.match(JsonParser.T__2)
|
|
201
|
+
pass
|
|
202
|
+
|
|
203
|
+
elif la_ == 2:
|
|
204
|
+
self.enterOuterAlt(localctx, 2)
|
|
205
|
+
self.state = 24
|
|
206
|
+
self.match(JsonParser.T__0)
|
|
207
|
+
self.state = 25
|
|
208
|
+
self.match(JsonParser.T__2)
|
|
209
|
+
pass
|
|
210
|
+
|
|
211
|
+
|
|
212
|
+
except RecognitionException as re:
|
|
213
|
+
localctx.exception = re
|
|
214
|
+
self._errHandler.reportError(self, re)
|
|
215
|
+
self._errHandler.recover(self, re)
|
|
216
|
+
finally:
|
|
217
|
+
self.exitRule()
|
|
218
|
+
return localctx
|
|
219
|
+
|
|
220
|
+
|
|
221
|
+
class PairContext(ParserRuleContext):
|
|
222
|
+
__slots__ = 'parser'
|
|
223
|
+
|
|
224
|
+
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
|
|
225
|
+
super().__init__(parent, invokingState)
|
|
226
|
+
self.parser = parser
|
|
227
|
+
|
|
228
|
+
def STRING(self):
|
|
229
|
+
return self.getToken(JsonParser.STRING, 0)
|
|
230
|
+
|
|
231
|
+
def value(self):
|
|
232
|
+
return self.getTypedRuleContext(JsonParser.ValueContext,0)
|
|
233
|
+
|
|
234
|
+
|
|
235
|
+
def getRuleIndex(self):
|
|
236
|
+
return JsonParser.RULE_pair
|
|
237
|
+
|
|
238
|
+
def enterRule(self, listener:ParseTreeListener):
|
|
239
|
+
if hasattr( listener, "enterPair" ):
|
|
240
|
+
listener.enterPair(self)
|
|
241
|
+
|
|
242
|
+
def exitRule(self, listener:ParseTreeListener):
|
|
243
|
+
if hasattr( listener, "exitPair" ):
|
|
244
|
+
listener.exitPair(self)
|
|
245
|
+
|
|
246
|
+
def accept(self, visitor:ParseTreeVisitor):
|
|
247
|
+
if hasattr( visitor, "visitPair" ):
|
|
248
|
+
return visitor.visitPair(self)
|
|
249
|
+
else:
|
|
250
|
+
return visitor.visitChildren(self)
|
|
251
|
+
|
|
252
|
+
|
|
253
|
+
|
|
254
|
+
|
|
255
|
+
def pair(self):
|
|
256
|
+
|
|
257
|
+
localctx = JsonParser.PairContext(self, self._ctx, self.state)
|
|
258
|
+
self.enterRule(localctx, 4, self.RULE_pair)
|
|
259
|
+
try:
|
|
260
|
+
self.enterOuterAlt(localctx, 1)
|
|
261
|
+
self.state = 28
|
|
262
|
+
self.match(JsonParser.STRING)
|
|
263
|
+
self.state = 29
|
|
264
|
+
self.match(JsonParser.T__3)
|
|
265
|
+
self.state = 30
|
|
266
|
+
self.value()
|
|
267
|
+
except RecognitionException as re:
|
|
268
|
+
localctx.exception = re
|
|
269
|
+
self._errHandler.reportError(self, re)
|
|
270
|
+
self._errHandler.recover(self, re)
|
|
271
|
+
finally:
|
|
272
|
+
self.exitRule()
|
|
273
|
+
return localctx
|
|
274
|
+
|
|
275
|
+
|
|
276
|
+
class ArrContext(ParserRuleContext):
|
|
277
|
+
__slots__ = 'parser'
|
|
278
|
+
|
|
279
|
+
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
|
|
280
|
+
super().__init__(parent, invokingState)
|
|
281
|
+
self.parser = parser
|
|
282
|
+
|
|
283
|
+
def value(self, i:int=None):
|
|
284
|
+
if i is None:
|
|
285
|
+
return self.getTypedRuleContexts(JsonParser.ValueContext)
|
|
286
|
+
else:
|
|
287
|
+
return self.getTypedRuleContext(JsonParser.ValueContext,i)
|
|
288
|
+
|
|
289
|
+
|
|
290
|
+
def getRuleIndex(self):
|
|
291
|
+
return JsonParser.RULE_arr
|
|
292
|
+
|
|
293
|
+
def enterRule(self, listener:ParseTreeListener):
|
|
294
|
+
if hasattr( listener, "enterArr" ):
|
|
295
|
+
listener.enterArr(self)
|
|
296
|
+
|
|
297
|
+
def exitRule(self, listener:ParseTreeListener):
|
|
298
|
+
if hasattr( listener, "exitArr" ):
|
|
299
|
+
listener.exitArr(self)
|
|
300
|
+
|
|
301
|
+
def accept(self, visitor:ParseTreeVisitor):
|
|
302
|
+
if hasattr( visitor, "visitArr" ):
|
|
303
|
+
return visitor.visitArr(self)
|
|
304
|
+
else:
|
|
305
|
+
return visitor.visitChildren(self)
|
|
306
|
+
|
|
307
|
+
|
|
308
|
+
|
|
309
|
+
|
|
310
|
+
def arr(self):
|
|
311
|
+
|
|
312
|
+
localctx = JsonParser.ArrContext(self, self._ctx, self.state)
|
|
313
|
+
self.enterRule(localctx, 6, self.RULE_arr)
|
|
314
|
+
self._la = 0 # Token type
|
|
315
|
+
try:
|
|
316
|
+
self.state = 45
|
|
317
|
+
self._errHandler.sync(self)
|
|
318
|
+
la_ = self._interp.adaptivePredict(self._input,3,self._ctx)
|
|
319
|
+
if la_ == 1:
|
|
320
|
+
self.enterOuterAlt(localctx, 1)
|
|
321
|
+
self.state = 32
|
|
322
|
+
self.match(JsonParser.T__4)
|
|
323
|
+
self.state = 33
|
|
324
|
+
self.value()
|
|
325
|
+
self.state = 38
|
|
326
|
+
self._errHandler.sync(self)
|
|
327
|
+
_la = self._input.LA(1)
|
|
328
|
+
while _la==2:
|
|
329
|
+
self.state = 34
|
|
330
|
+
self.match(JsonParser.T__1)
|
|
331
|
+
self.state = 35
|
|
332
|
+
self.value()
|
|
333
|
+
self.state = 40
|
|
334
|
+
self._errHandler.sync(self)
|
|
335
|
+
_la = self._input.LA(1)
|
|
336
|
+
|
|
337
|
+
self.state = 41
|
|
338
|
+
self.match(JsonParser.T__5)
|
|
339
|
+
pass
|
|
340
|
+
|
|
341
|
+
elif la_ == 2:
|
|
342
|
+
self.enterOuterAlt(localctx, 2)
|
|
343
|
+
self.state = 43
|
|
344
|
+
self.match(JsonParser.T__4)
|
|
345
|
+
self.state = 44
|
|
346
|
+
self.match(JsonParser.T__5)
|
|
347
|
+
pass
|
|
348
|
+
|
|
349
|
+
|
|
350
|
+
except RecognitionException as re:
|
|
351
|
+
localctx.exception = re
|
|
352
|
+
self._errHandler.reportError(self, re)
|
|
353
|
+
self._errHandler.recover(self, re)
|
|
354
|
+
finally:
|
|
355
|
+
self.exitRule()
|
|
356
|
+
return localctx
|
|
357
|
+
|
|
358
|
+
|
|
359
|
+
class ValueContext(ParserRuleContext):
|
|
360
|
+
__slots__ = 'parser'
|
|
361
|
+
|
|
362
|
+
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
|
|
363
|
+
super().__init__(parent, invokingState)
|
|
364
|
+
self.parser = parser
|
|
365
|
+
|
|
366
|
+
def STRING(self):
|
|
367
|
+
return self.getToken(JsonParser.STRING, 0)
|
|
368
|
+
|
|
369
|
+
def NUMBER(self):
|
|
370
|
+
return self.getToken(JsonParser.NUMBER, 0)
|
|
371
|
+
|
|
372
|
+
def obj(self):
|
|
373
|
+
return self.getTypedRuleContext(JsonParser.ObjContext,0)
|
|
374
|
+
|
|
375
|
+
|
|
376
|
+
def arr(self):
|
|
377
|
+
return self.getTypedRuleContext(JsonParser.ArrContext,0)
|
|
378
|
+
|
|
379
|
+
|
|
380
|
+
def getRuleIndex(self):
|
|
381
|
+
return JsonParser.RULE_value
|
|
382
|
+
|
|
383
|
+
def enterRule(self, listener:ParseTreeListener):
|
|
384
|
+
if hasattr( listener, "enterValue" ):
|
|
385
|
+
listener.enterValue(self)
|
|
386
|
+
|
|
387
|
+
def exitRule(self, listener:ParseTreeListener):
|
|
388
|
+
if hasattr( listener, "exitValue" ):
|
|
389
|
+
listener.exitValue(self)
|
|
390
|
+
|
|
391
|
+
def accept(self, visitor:ParseTreeVisitor):
|
|
392
|
+
if hasattr( visitor, "visitValue" ):
|
|
393
|
+
return visitor.visitValue(self)
|
|
394
|
+
else:
|
|
395
|
+
return visitor.visitChildren(self)
|
|
396
|
+
|
|
397
|
+
|
|
398
|
+
|
|
399
|
+
|
|
400
|
+
def value(self):
|
|
401
|
+
|
|
402
|
+
localctx = JsonParser.ValueContext(self, self._ctx, self.state)
|
|
403
|
+
self.enterRule(localctx, 8, self.RULE_value)
|
|
404
|
+
try:
|
|
405
|
+
self.state = 54
|
|
406
|
+
self._errHandler.sync(self)
|
|
407
|
+
token = self._input.LA(1)
|
|
408
|
+
if token in [10]:
|
|
409
|
+
self.enterOuterAlt(localctx, 1)
|
|
410
|
+
self.state = 47
|
|
411
|
+
self.match(JsonParser.STRING)
|
|
412
|
+
pass
|
|
413
|
+
elif token in [11]:
|
|
414
|
+
self.enterOuterAlt(localctx, 2)
|
|
415
|
+
self.state = 48
|
|
416
|
+
self.match(JsonParser.NUMBER)
|
|
417
|
+
pass
|
|
418
|
+
elif token in [1]:
|
|
419
|
+
self.enterOuterAlt(localctx, 3)
|
|
420
|
+
self.state = 49
|
|
421
|
+
self.obj()
|
|
422
|
+
pass
|
|
423
|
+
elif token in [5]:
|
|
424
|
+
self.enterOuterAlt(localctx, 4)
|
|
425
|
+
self.state = 50
|
|
426
|
+
self.arr()
|
|
427
|
+
pass
|
|
428
|
+
elif token in [7]:
|
|
429
|
+
self.enterOuterAlt(localctx, 5)
|
|
430
|
+
self.state = 51
|
|
431
|
+
self.match(JsonParser.T__6)
|
|
432
|
+
pass
|
|
433
|
+
elif token in [8]:
|
|
434
|
+
self.enterOuterAlt(localctx, 6)
|
|
435
|
+
self.state = 52
|
|
436
|
+
self.match(JsonParser.T__7)
|
|
437
|
+
pass
|
|
438
|
+
elif token in [9]:
|
|
439
|
+
self.enterOuterAlt(localctx, 7)
|
|
440
|
+
self.state = 53
|
|
441
|
+
self.match(JsonParser.T__8)
|
|
442
|
+
pass
|
|
443
|
+
else:
|
|
444
|
+
raise NoViableAltException(self)
|
|
445
|
+
|
|
446
|
+
except RecognitionException as re:
|
|
447
|
+
localctx.exception = re
|
|
448
|
+
self._errHandler.reportError(self, re)
|
|
449
|
+
self._errHandler.recover(self, re)
|
|
450
|
+
finally:
|
|
451
|
+
self.exitRule()
|
|
452
|
+
return localctx
|
|
453
|
+
|
|
454
|
+
|
|
455
|
+
|
|
456
|
+
|
|
457
|
+
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
# type: ignore
|
|
2
|
+
# ruff: noqa
|
|
3
|
+
# flake8: noqa
|
|
4
|
+
# @omlish-generated
|
|
5
|
+
# Generated from Json.g4 by ANTLR 4.13.2
|
|
6
|
+
from ....text.antlr._runtime._all import *
|
|
7
|
+
if "." in __name__:
|
|
8
|
+
from .JsonParser import JsonParser
|
|
9
|
+
else:
|
|
10
|
+
from JsonParser import JsonParser
|
|
11
|
+
|
|
12
|
+
# This class defines a complete generic visitor for a parse tree produced by JsonParser.
|
|
13
|
+
|
|
14
|
+
class JsonVisitor(ParseTreeVisitor):
|
|
15
|
+
|
|
16
|
+
# Visit a parse tree produced by JsonParser#json.
|
|
17
|
+
def visitJson(self, ctx:JsonParser.JsonContext):
|
|
18
|
+
return self.visitChildren(ctx)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
# Visit a parse tree produced by JsonParser#obj.
|
|
22
|
+
def visitObj(self, ctx:JsonParser.ObjContext):
|
|
23
|
+
return self.visitChildren(ctx)
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
# Visit a parse tree produced by JsonParser#pair.
|
|
27
|
+
def visitPair(self, ctx:JsonParser.PairContext):
|
|
28
|
+
return self.visitChildren(ctx)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
# Visit a parse tree produced by JsonParser#arr.
|
|
32
|
+
def visitArr(self, ctx:JsonParser.ArrContext):
|
|
33
|
+
return self.visitChildren(ctx)
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
# Visit a parse tree produced by JsonParser#value.
|
|
37
|
+
def visitValue(self, ctx:JsonParser.ValueContext):
|
|
38
|
+
return self.visitChildren(ctx)
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
del JsonParser
|
|
File without changes
|
omlish/formats/json/literals.py
CHANGED
|
@@ -22,12 +22,14 @@ import re
|
|
|
22
22
|
import sys
|
|
23
23
|
import typing as ta
|
|
24
24
|
|
|
25
|
+
from ... import lang
|
|
26
|
+
|
|
25
27
|
|
|
26
28
|
##
|
|
27
29
|
|
|
28
30
|
|
|
29
31
|
_ESCAPE_PAT = re.compile(r'[\x00-\x1f\\"]')
|
|
30
|
-
_ESCAPE_ASCII_PAT = re.compile(r'
|
|
32
|
+
_ESCAPE_ASCII_PAT = re.compile(r'[\\"]|[^\ -~]')
|
|
31
33
|
|
|
32
34
|
ESCAPE_MAP: ta.Mapping[str, str] = {
|
|
33
35
|
**{
|
|
@@ -78,6 +80,7 @@ def encode_string(
|
|
|
78
80
|
rq: str | None = None,
|
|
79
81
|
escape_map: ta.Mapping[str, str] | None = None,
|
|
80
82
|
ensure_ascii: bool = False,
|
|
83
|
+
process_chunks: ta.Callable[[list[str]], ta.Iterable[str]] | None = None,
|
|
81
84
|
) -> str:
|
|
82
85
|
"""Return a JSON representation of a Python string."""
|
|
83
86
|
|
|
@@ -91,14 +94,12 @@ def encode_string(
|
|
|
91
94
|
if ensure_ascii:
|
|
92
95
|
pat = _ESCAPE_ASCII_PAT
|
|
93
96
|
|
|
94
|
-
def replace(
|
|
95
|
-
s = m.group(0)
|
|
96
|
-
|
|
97
|
+
def replace(c):
|
|
97
98
|
try:
|
|
98
|
-
return escape_map[
|
|
99
|
+
return escape_map[c]
|
|
99
100
|
|
|
100
101
|
except KeyError:
|
|
101
|
-
n = ord(
|
|
102
|
+
n = ord(c)
|
|
102
103
|
if n < 0x10000:
|
|
103
104
|
return f'\\u{n:04x}'
|
|
104
105
|
|
|
@@ -111,14 +112,27 @@ def encode_string(
|
|
|
111
112
|
else:
|
|
112
113
|
pat = _ESCAPE_PAT
|
|
113
114
|
|
|
114
|
-
def replace(
|
|
115
|
-
return escape_map[
|
|
115
|
+
def replace(c):
|
|
116
|
+
return escape_map[c]
|
|
117
|
+
|
|
118
|
+
if process_chunks is not None:
|
|
119
|
+
chunks: list[str] = [
|
|
120
|
+
replace(p.group(0)) if isinstance(p, re.Match) else p
|
|
121
|
+
for p in lang.iter_matches(pat, s)
|
|
122
|
+
]
|
|
116
123
|
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
124
|
+
return ''.join([
|
|
125
|
+
lq,
|
|
126
|
+
*process_chunks(chunks),
|
|
127
|
+
rq,
|
|
128
|
+
])
|
|
129
|
+
|
|
130
|
+
else:
|
|
131
|
+
return ''.join([
|
|
132
|
+
lq,
|
|
133
|
+
pat.sub(lambda m: replace(m.group(0)), s),
|
|
134
|
+
rq,
|
|
135
|
+
])
|
|
122
136
|
|
|
123
137
|
|
|
124
138
|
##
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
from .building import ( # noqa
|
|
2
|
+
JsonValueBuilder,
|
|
3
|
+
)
|
|
4
|
+
|
|
5
|
+
from .errors import ( # noqa
|
|
6
|
+
JsonStreamError,
|
|
7
|
+
)
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
from .lexing import ( # noqa
|
|
11
|
+
ValueTokenKind,
|
|
12
|
+
VALUE_TOKEN_KINDS,
|
|
13
|
+
ControlTokenKind,
|
|
14
|
+
SpaceTokenKind,
|
|
15
|
+
TokenKind,
|
|
16
|
+
|
|
17
|
+
ScalarValue,
|
|
18
|
+
SCALAR_VALUE_TYPES,
|
|
19
|
+
|
|
20
|
+
Position,
|
|
21
|
+
|
|
22
|
+
Token,
|
|
23
|
+
|
|
24
|
+
CONTROL_TOKENS,
|
|
25
|
+
CONST_TOKENS,
|
|
26
|
+
|
|
27
|
+
JsonStreamLexError,
|
|
28
|
+
JsonStreamLexer,
|
|
29
|
+
)
|
|
30
|
+
|
|
31
|
+
from .parsing import ( # noqa
|
|
32
|
+
BeginObject,
|
|
33
|
+
Key,
|
|
34
|
+
EndObject,
|
|
35
|
+
BeginArray,
|
|
36
|
+
EndArray,
|
|
37
|
+
|
|
38
|
+
JsonStreamParserEvent,
|
|
39
|
+
JsonStreamParserEvents,
|
|
40
|
+
|
|
41
|
+
yield_parser_events,
|
|
42
|
+
|
|
43
|
+
JsonStreamParseError,
|
|
44
|
+
JsonStreamObject,
|
|
45
|
+
JsonStreamParser,
|
|
46
|
+
)
|
|
47
|
+
|
|
48
|
+
from .rendering import ( # noqa
|
|
49
|
+
StreamJsonRenderer,
|
|
50
|
+
)
|
|
51
|
+
|
|
52
|
+
from .utils import ( # noqa
|
|
53
|
+
JsonStreamValueParser,
|
|
54
|
+
|
|
55
|
+
stream_parse_values,
|
|
56
|
+
stream_parse_one_value,
|
|
57
|
+
)
|
|
@@ -2,7 +2,7 @@ import dataclasses as dc
|
|
|
2
2
|
import typing as ta
|
|
3
3
|
|
|
4
4
|
from .... import lang
|
|
5
|
-
from .building import
|
|
5
|
+
from .building import JsonValueBuilder
|
|
6
6
|
from .lexing import JsonStreamLexer
|
|
7
7
|
from .parsing import JsonStreamParser
|
|
8
8
|
|
|
@@ -11,7 +11,7 @@ from .parsing import JsonStreamParser
|
|
|
11
11
|
|
|
12
12
|
|
|
13
13
|
@dc.dataclass(kw_only=True)
|
|
14
|
-
class
|
|
14
|
+
class JsonStreamValueParser(lang.ExitStacked):
|
|
15
15
|
include_raw: bool = False
|
|
16
16
|
yield_object_lists: bool = False
|
|
17
17
|
|
|
@@ -19,7 +19,7 @@ class JsonStreamObjectParser(lang.ExitStacked):
|
|
|
19
19
|
|
|
20
20
|
_lex: JsonStreamLexer = dc.field(init=False)
|
|
21
21
|
_parse: JsonStreamParser = dc.field(init=False)
|
|
22
|
-
_build:
|
|
22
|
+
_build: JsonValueBuilder = dc.field(init=False)
|
|
23
23
|
|
|
24
24
|
def _enter_contexts(self) -> None:
|
|
25
25
|
self._lex = JsonStreamLexer(
|
|
@@ -28,7 +28,7 @@ class JsonStreamObjectParser(lang.ExitStacked):
|
|
|
28
28
|
|
|
29
29
|
self._parse = JsonStreamParser()
|
|
30
30
|
|
|
31
|
-
self._build =
|
|
31
|
+
self._build = JsonValueBuilder(
|
|
32
32
|
yield_object_lists=self.yield_object_lists,
|
|
33
33
|
)
|
|
34
34
|
|
|
@@ -40,9 +40,17 @@ class JsonStreamObjectParser(lang.ExitStacked):
|
|
|
40
40
|
yield v
|
|
41
41
|
|
|
42
42
|
|
|
43
|
-
def
|
|
43
|
+
def stream_parse_values(
|
|
44
|
+
i: ta.Iterable[str],
|
|
45
|
+
**kwargs: ta.Any,
|
|
46
|
+
) -> ta.Generator[ta.Any]:
|
|
47
|
+
with JsonStreamValueParser(**kwargs) as p:
|
|
48
|
+
yield from p.feed(i)
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def stream_parse_one_value(
|
|
44
52
|
i: ta.Iterable[str],
|
|
45
53
|
**kwargs: ta.Any,
|
|
46
54
|
) -> ta.Any:
|
|
47
|
-
with
|
|
55
|
+
with JsonStreamValueParser(**kwargs) as p:
|
|
48
56
|
return next(p.feed(i))
|