rapydscript-ns 0.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.agignore +1 -0
- package/.gitattributes +4 -0
- package/.github/workflows/ci.yml +38 -0
- package/.github/workflows/web-repl-page-deploy.yml +42 -0
- package/=template.pyj +5 -0
- package/CHANGELOG.md +456 -0
- package/CONTRIBUTORS +13 -0
- package/HACKING.md +103 -0
- package/LICENSE +24 -0
- package/README.md +2512 -0
- package/TODO.md +327 -0
- package/add-toc-to-readme +2 -0
- package/bin/export +75 -0
- package/bin/rapydscript +70 -0
- package/bin/web-repl-export +102 -0
- package/build +3 -0
- package/package.json +46 -0
- package/publish.py +37 -0
- package/release/baselib-plain-pretty.js +4370 -0
- package/release/baselib-plain-ugly.js +3 -0
- package/release/compiler.js +18394 -0
- package/release/signatures.json +31 -0
- package/session.vim +4 -0
- package/setup.cfg +2 -0
- package/src/ast.pyj +1356 -0
- package/src/baselib-builtins.pyj +279 -0
- package/src/baselib-containers.pyj +723 -0
- package/src/baselib-errors.pyj +37 -0
- package/src/baselib-internal.pyj +421 -0
- package/src/baselib-itertools.pyj +97 -0
- package/src/baselib-str.pyj +798 -0
- package/src/compiler.pyj +36 -0
- package/src/errors.pyj +30 -0
- package/src/lib/aes.pyj +646 -0
- package/src/lib/collections.pyj +695 -0
- package/src/lib/elementmaker.pyj +83 -0
- package/src/lib/encodings.pyj +126 -0
- package/src/lib/functools.pyj +148 -0
- package/src/lib/gettext.pyj +569 -0
- package/src/lib/itertools.pyj +580 -0
- package/src/lib/math.pyj +193 -0
- package/src/lib/numpy.pyj +2101 -0
- package/src/lib/operator.pyj +11 -0
- package/src/lib/pythonize.pyj +20 -0
- package/src/lib/random.pyj +118 -0
- package/src/lib/re.pyj +470 -0
- package/src/lib/traceback.pyj +63 -0
- package/src/lib/uuid.pyj +77 -0
- package/src/monaco-language-service/analyzer.js +526 -0
- package/src/monaco-language-service/builtins.js +543 -0
- package/src/monaco-language-service/completions.js +498 -0
- package/src/monaco-language-service/diagnostics.js +643 -0
- package/src/monaco-language-service/dts.js +550 -0
- package/src/monaco-language-service/hover.js +121 -0
- package/src/monaco-language-service/index.js +386 -0
- package/src/monaco-language-service/scope.js +162 -0
- package/src/monaco-language-service/signature.js +144 -0
- package/src/output/__init__.pyj +0 -0
- package/src/output/classes.pyj +296 -0
- package/src/output/codegen.pyj +492 -0
- package/src/output/comments.pyj +45 -0
- package/src/output/exceptions.pyj +105 -0
- package/src/output/functions.pyj +491 -0
- package/src/output/literals.pyj +109 -0
- package/src/output/loops.pyj +444 -0
- package/src/output/modules.pyj +329 -0
- package/src/output/operators.pyj +429 -0
- package/src/output/statements.pyj +463 -0
- package/src/output/stream.pyj +309 -0
- package/src/output/treeshake.pyj +182 -0
- package/src/output/utils.pyj +72 -0
- package/src/parse.pyj +3106 -0
- package/src/string_interpolation.pyj +72 -0
- package/src/tokenizer.pyj +702 -0
- package/src/unicode_aliases.pyj +576 -0
- package/src/utils.pyj +192 -0
- package/test/_import_one.pyj +37 -0
- package/test/_import_two/__init__.pyj +11 -0
- package/test/_import_two/level2/__init__.pyj +0 -0
- package/test/_import_two/level2/deep.pyj +4 -0
- package/test/_import_two/other.pyj +6 -0
- package/test/_import_two/sub.pyj +13 -0
- package/test/aes_vectors.pyj +421 -0
- package/test/annotations.pyj +80 -0
- package/test/baselib.pyj +319 -0
- package/test/classes.pyj +452 -0
- package/test/collections.pyj +152 -0
- package/test/decorators.pyj +77 -0
- package/test/dict_spread.pyj +76 -0
- package/test/docstrings.pyj +39 -0
- package/test/elementmaker_test.pyj +45 -0
- package/test/ellipsis.pyj +49 -0
- package/test/functions.pyj +151 -0
- package/test/generators.pyj +41 -0
- package/test/generic.pyj +370 -0
- package/test/imports.pyj +72 -0
- package/test/internationalization.pyj +73 -0
- package/test/lint.pyj +164 -0
- package/test/loops.pyj +85 -0
- package/test/numpy.pyj +734 -0
- package/test/omit_function_metadata.pyj +20 -0
- package/test/regexp.pyj +55 -0
- package/test/repl.pyj +121 -0
- package/test/scoped_flags.pyj +76 -0
- package/test/starargs.pyj +506 -0
- package/test/starred_assign.pyj +104 -0
- package/test/str.pyj +198 -0
- package/test/subscript_tuple.pyj +53 -0
- package/test/unit/fixtures/fibonacci_expected.js +46 -0
- package/test/unit/index.js +2989 -0
- package/test/unit/language-service-builtins.js +815 -0
- package/test/unit/language-service-completions.js +1067 -0
- package/test/unit/language-service-dts.js +543 -0
- package/test/unit/language-service-hover.js +455 -0
- package/test/unit/language-service-scope.js +833 -0
- package/test/unit/language-service-signature.js +458 -0
- package/test/unit/language-service.js +705 -0
- package/test/unit/run-language-service.js +41 -0
- package/test/unit/web-repl.js +484 -0
- package/tools/build-language-service.js +190 -0
- package/tools/cli.js +547 -0
- package/tools/compile.js +219 -0
- package/tools/compiler.js +108 -0
- package/tools/completer.js +131 -0
- package/tools/embedded_compiler.js +251 -0
- package/tools/export.js +316 -0
- package/tools/gettext.js +185 -0
- package/tools/ini.js +65 -0
- package/tools/lint.js +705 -0
- package/tools/msgfmt.js +187 -0
- package/tools/repl.js +223 -0
- package/tools/self.js +162 -0
- package/tools/test.js +118 -0
- package/tools/utils.js +128 -0
- package/tools/web_repl.js +95 -0
- package/try +41 -0
- package/web-repl/env.js +74 -0
- package/web-repl/index.html +163 -0
- package/web-repl/language-service.js +4084 -0
- package/web-repl/main.js +254 -0
- package/web-repl/prism.css +139 -0
- package/web-repl/prism.js +113 -0
- package/web-repl/rapydscript.js +435 -0
- package/web-repl/sha1.js +25 -0
package/src/parse.pyj
ADDED
|
@@ -0,0 +1,3106 @@
|
|
|
1
|
+
# vim:fileencoding=utf-8
|
|
2
|
+
# License: BSD Copyright: 2016, Kovid Goyal <kovid at kovidgoyal.net>
|
|
3
|
+
# globals: readfile
|
|
4
|
+
from __python__ import hash_literals
|
|
5
|
+
|
|
6
|
+
from utils import make_predicate, array_to_hash, defaults, has_prop, cache_file_name
|
|
7
|
+
from errors import SyntaxError, ImportError
|
|
8
|
+
from ast import (
|
|
9
|
+
AST_Array, AST_Assign, AST_Binary, AST_BlockStatement, AST_Break,
|
|
10
|
+
AST_Call, AST_Catch, AST_Class, AST_ClassCall, AST_Conditional,
|
|
11
|
+
AST_Constant, AST_Continue, AST_DWLoop, AST_Debugger, AST_Decorator,
|
|
12
|
+
AST_Definitions, AST_DictComprehension, AST_Directive, AST_Do, AST_Dot,
|
|
13
|
+
AST_Ellipsis, AST_Else, AST_EmptyStatement, AST_Except, AST_ExpressiveObject, AST_False, AST_Finally,
|
|
14
|
+
AST_ForIn, AST_ForJS, AST_Function, AST_GeneratorComprehension, AST_Hole,
|
|
15
|
+
AST_If, AST_Import, AST_ImportedVar, AST_Imports, AST_ListComprehension,
|
|
16
|
+
AST_Method, AST_New, AST_Null, AST_Number, AST_Object, AST_ObjectKeyVal, AST_ObjectSpread,
|
|
17
|
+
AST_PropAccess, AST_RegExp, AST_Return, AST_Scope, AST_Set,
|
|
18
|
+
AST_SetComprehension, AST_SetItem, AST_Seq, AST_SimpleStatement, AST_Splice,
|
|
19
|
+
AST_String, AST_Sub, AST_ItemAccess, AST_SymbolAlias,
|
|
20
|
+
AST_SymbolCatch, AST_SymbolDefun, AST_SymbolFunarg,
|
|
21
|
+
AST_SymbolLambda, AST_SymbolNonlocal, AST_SymbolRef, AST_SymbolVar, AST_This,
|
|
22
|
+
AST_Throw, AST_Toplevel, AST_True, AST_Try, AST_UnaryPrefix,
|
|
23
|
+
AST_Undefined, AST_Var, AST_VarDef, AST_Verbatim, AST_While, AST_With, AST_WithClause,
|
|
24
|
+
AST_Yield, AST_Await, AST_Assert, AST_Existential, AST_NamedExpr, AST_AnnotatedAssign, AST_Super, AST_Starred, is_node_type,
|
|
25
|
+
AST_Match, AST_MatchCase,
|
|
26
|
+
AST_MatchWildcard, AST_MatchCapture, AST_MatchLiteral, AST_MatchOr,
|
|
27
|
+
AST_MatchAs, AST_MatchStar, AST_MatchSequence, AST_MatchMapping, AST_MatchClass
|
|
28
|
+
)
|
|
29
|
+
from tokenizer import tokenizer, is_token, RESERVED_WORDS
|
|
30
|
+
|
|
31
|
+
COMPILER_VERSION = '__COMPILER_VERSION__'
|
|
32
|
+
PYTHON_FLAGS = {'dict_literals':True, 'overload_getitem':True, 'bound_methods':True, 'hash_literals':True, 'overload_operators':True}
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def get_compiler_version():
|
|
36
|
+
return COMPILER_VERSION
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def static_predicate(names):
|
|
40
|
+
return {k:True for k in names.split(' ')}
|
|
41
|
+
|
|
42
|
+
NATIVE_CLASSES = {
|
|
43
|
+
'Image': {},
|
|
44
|
+
'FileReader': {},
|
|
45
|
+
'RegExp': {},
|
|
46
|
+
'Error': {},
|
|
47
|
+
'EvalError': {},
|
|
48
|
+
'InternalError': {},
|
|
49
|
+
'RangeError': {},
|
|
50
|
+
'ReferenceError': {},
|
|
51
|
+
'SyntaxError': {},
|
|
52
|
+
'TypeError': {},
|
|
53
|
+
'URIError': {},
|
|
54
|
+
'Object': {
|
|
55
|
+
'static': static_predicate(
|
|
56
|
+
'getOwnPropertyNames getOwnPropertyDescriptor getOwnPropertyDescriptors'
|
|
57
|
+
' getOwnPropertySymbols keys entries values create defineProperty'
|
|
58
|
+
' defineProperties getPrototypeOf setPrototypeOf assign'
|
|
59
|
+
' seal isSealed is preventExtensions isExtensible'
|
|
60
|
+
' freeze isFrozen'
|
|
61
|
+
)
|
|
62
|
+
},
|
|
63
|
+
'String': {
|
|
64
|
+
'static': static_predicate("fromCharCode")
|
|
65
|
+
},
|
|
66
|
+
'Array': {
|
|
67
|
+
'static': static_predicate("isArray from of")
|
|
68
|
+
},
|
|
69
|
+
'Function': {},
|
|
70
|
+
'Date': {
|
|
71
|
+
'static': static_predicate("UTC now parse")
|
|
72
|
+
},
|
|
73
|
+
'ArrayBuffer': {
|
|
74
|
+
'static': static_predicate('isView transfer')
|
|
75
|
+
},
|
|
76
|
+
'DataView': {},
|
|
77
|
+
'Float32Array': {},
|
|
78
|
+
'Float64Array': {},
|
|
79
|
+
'Int16Array': {},
|
|
80
|
+
'Int32Array': {},
|
|
81
|
+
'Int8Array': {},
|
|
82
|
+
'Uint16Array': {},
|
|
83
|
+
'Uint32Array': {},
|
|
84
|
+
'Uint8Array': {},
|
|
85
|
+
'Uint8ClampedArray': {},
|
|
86
|
+
'Map': {},
|
|
87
|
+
'WeakMap': {},
|
|
88
|
+
'Proxy': {},
|
|
89
|
+
'Set': {},
|
|
90
|
+
'WeakSet': {},
|
|
91
|
+
'Promise': {
|
|
92
|
+
'static': static_predicate('all race reject resolve')
|
|
93
|
+
},
|
|
94
|
+
'WebSocket': {},
|
|
95
|
+
'XMLHttpRequest': {},
|
|
96
|
+
'TextEncoder': {},
|
|
97
|
+
'TextDecoder': {},
|
|
98
|
+
'MouseEvent': {},
|
|
99
|
+
'Event': {},
|
|
100
|
+
'CustomEvent': {},
|
|
101
|
+
'Blob': {},
|
|
102
|
+
}
|
|
103
|
+
ERROR_CLASSES = {
|
|
104
|
+
'Exception': {},
|
|
105
|
+
'AttributeError': {},
|
|
106
|
+
'IndexError': {},
|
|
107
|
+
'KeyError': {},
|
|
108
|
+
'ValueError': {},
|
|
109
|
+
'UnicodeDecodeError': {},
|
|
110
|
+
'AssertionError': {},
|
|
111
|
+
'ZeroDivisionError': {},
|
|
112
|
+
}
|
|
113
|
+
COMMON_STATIC = static_predicate('call apply bind toString')
|
|
114
|
+
FORBIDDEN_CLASS_VARS = 'prototype constructor'.split(' ')
|
|
115
|
+
|
|
116
|
+
# -----[ Parser (constants) ]-----
|
|
117
|
+
UNARY_PREFIX = make_predicate('typeof void delete ~ - + ! @')
|
|
118
|
+
|
|
119
|
+
ASSIGNMENT = make_predicate('= += -= /= //= *= %= >>= <<= >>>= |= ^= &=')
|
|
120
|
+
|
|
121
|
+
PRECEDENCE = (def(a, ret):
|
|
122
|
+
for i in range(a.length):
|
|
123
|
+
b = a[i]
|
|
124
|
+
for j in range(b.length):
|
|
125
|
+
ret[b[j]] = i+1
|
|
126
|
+
return ret
|
|
127
|
+
)([
|
|
128
|
+
# lowest precedence
|
|
129
|
+
[ "||" ],
|
|
130
|
+
[ "&&" ],
|
|
131
|
+
[ "|" ],
|
|
132
|
+
[ "^" ],
|
|
133
|
+
[ "&" ],
|
|
134
|
+
[ "==", "===", "!=", "!==" ],
|
|
135
|
+
[ "<", ">", "<=", ">=", "in", "nin", "instanceof" ],
|
|
136
|
+
[ ">>", "<<", ">>>" ],
|
|
137
|
+
[ "+", "-" ],
|
|
138
|
+
[ "*", "/", "//", "%" ],
|
|
139
|
+
[ "**" ]
|
|
140
|
+
# highest precedence
|
|
141
|
+
], {})
|
|
142
|
+
|
|
143
|
+
STATEMENTS_WITH_LABELS = array_to_hash([ "for", "do", "while", "switch" ])
|
|
144
|
+
|
|
145
|
+
ATOMIC_START_TOKEN = array_to_hash([ "atom", "num", "string", "regexp", "name", "js" ])
|
|
146
|
+
|
|
147
|
+
compile_time_decorators = ['staticmethod', 'classmethod', 'external', 'property']
|
|
148
|
+
|
|
149
|
+
def has_simple_decorator(decorators, name):
|
|
150
|
+
remove = v'[]'
|
|
151
|
+
for v'var i = 0; i < decorators.length; i++':
|
|
152
|
+
s = decorators[i]
|
|
153
|
+
if is_node_type(s, AST_SymbolRef) and not s.parens and s.name is name:
|
|
154
|
+
remove.push(i)
|
|
155
|
+
if remove.length:
|
|
156
|
+
remove.reverse()
|
|
157
|
+
for v'var i = 0; i < remove.length; i++':
|
|
158
|
+
decorators.splice(remove[i], 1)
|
|
159
|
+
return True
|
|
160
|
+
return False
|
|
161
|
+
|
|
162
|
+
def has_setter_decorator(decorators, name):
|
|
163
|
+
remove = v'[]'
|
|
164
|
+
for v'var i = 0; i < decorators.length; i++':
|
|
165
|
+
s = decorators[i]
|
|
166
|
+
if is_node_type(s, AST_Dot) and is_node_type(s.expression, AST_SymbolRef) and s.expression.name is name and s.property is 'setter':
|
|
167
|
+
remove.push(i)
|
|
168
|
+
if remove.length:
|
|
169
|
+
remove.reverse()
|
|
170
|
+
for v'var i = 0; i < remove.length; i++':
|
|
171
|
+
decorators.splice(remove[i], 1)
|
|
172
|
+
return True
|
|
173
|
+
return False
|
|
174
|
+
|
|
175
|
+
# -----[ Parser ]-----
|
|
176
|
+
def create_parser_ctx(S, import_dirs, module_id, baselib_items, imported_module_ids, imported_modules, importing_modules, options):
|
|
177
|
+
|
|
178
|
+
def next():
|
|
179
|
+
S.prev = S.token
|
|
180
|
+
if S.peeked.length:
|
|
181
|
+
S.token = S.peeked.shift()
|
|
182
|
+
else:
|
|
183
|
+
S.token = S.input()
|
|
184
|
+
|
|
185
|
+
return S.token
|
|
186
|
+
|
|
187
|
+
def is_(type, value):
|
|
188
|
+
return is_token(S.token, type, value)
|
|
189
|
+
|
|
190
|
+
def peek():
|
|
191
|
+
if not S.peeked.length:
|
|
192
|
+
S.peeked.push(S.input())
|
|
193
|
+
return S.peeked[0]
|
|
194
|
+
|
|
195
|
+
def prev():
|
|
196
|
+
return S.prev
|
|
197
|
+
|
|
198
|
+
def croak(msg, line, col, pos, is_eof):
|
|
199
|
+
# note: undefined means nothing was passed in, None/null means a null value was passed in
|
|
200
|
+
ctx = S.input.context()
|
|
201
|
+
raise new SyntaxError(msg, ctx.filename, (line if line is not undefined else ctx.tokline),
|
|
202
|
+
(col if col is not undefined else ctx.tokcol), (pos if pos is not undefined else ctx.tokpos), is_eof)
|
|
203
|
+
|
|
204
|
+
def token_error(token, msg):
|
|
205
|
+
is_eof = token.type is 'eof'
|
|
206
|
+
croak(msg, token.line, token.col, undefined, is_eof)
|
|
207
|
+
|
|
208
|
+
def unexpected(token):
|
|
209
|
+
if token is undefined:
|
|
210
|
+
token = S.token
|
|
211
|
+
token_error(token, "Unexpected token: " + token.type + " «" + token.value + "»")
|
|
212
|
+
|
|
213
|
+
def expect_token(type, val):
|
|
214
|
+
if is_(type, val):
|
|
215
|
+
return next()
|
|
216
|
+
token_error(S.token, "Unexpected token " + S.token.type + " «" + S.token.value + "»" +
|
|
217
|
+
", expected " + type + " «" + val + "»")
|
|
218
|
+
|
|
219
|
+
def expect(punc):
|
|
220
|
+
return expect_token("punc", punc)
|
|
221
|
+
|
|
222
|
+
def semicolon():
|
|
223
|
+
if is_("punc", ";"):
|
|
224
|
+
next()
|
|
225
|
+
S.token.nlb = True
|
|
226
|
+
|
|
227
|
+
def embed_tokens(parser):
|
|
228
|
+
def with_embedded_tokens():
|
|
229
|
+
start = S.token
|
|
230
|
+
expr = parser()
|
|
231
|
+
if expr is undefined:
|
|
232
|
+
unexpected()
|
|
233
|
+
end = prev()
|
|
234
|
+
expr.start = start
|
|
235
|
+
expr.end = end
|
|
236
|
+
return expr
|
|
237
|
+
return with_embedded_tokens
|
|
238
|
+
|
|
239
|
+
def scan_for_top_level_callables(body):
|
|
240
|
+
ans = v'[]'
|
|
241
|
+
# Get the named functions and classes
|
|
242
|
+
if Array.isArray(body):
|
|
243
|
+
for obj in body:
|
|
244
|
+
if is_node_type(obj, AST_Function) or is_node_type(obj, AST_Class):
|
|
245
|
+
if obj.name:
|
|
246
|
+
ans.push(obj.name.name)
|
|
247
|
+
else:
|
|
248
|
+
token_error(obj.start, "Top-level functions must have names")
|
|
249
|
+
else:
|
|
250
|
+
# skip inner scopes
|
|
251
|
+
if is_node_type(obj, AST_Scope):
|
|
252
|
+
continue
|
|
253
|
+
for x in ['body', 'alternative']:
|
|
254
|
+
opt = obj[x]
|
|
255
|
+
if opt:
|
|
256
|
+
ans = ans.concat(scan_for_top_level_callables(opt))
|
|
257
|
+
|
|
258
|
+
if is_node_type(opt, AST_Assign) and not (is_node_type(opt.right, AST_Scope)):
|
|
259
|
+
ans = ans.concat(scan_for_top_level_callables(opt.right))
|
|
260
|
+
|
|
261
|
+
elif body.body:
|
|
262
|
+
# recursive descent into wrapper statements that contain body blocks
|
|
263
|
+
ans = ans.concat(scan_for_top_level_callables(body.body))
|
|
264
|
+
if body.alternative:
|
|
265
|
+
ans = ans.concat(scan_for_top_level_callables(body.alternative))
|
|
266
|
+
|
|
267
|
+
|
|
268
|
+
return ans
|
|
269
|
+
|
|
270
|
+
def scan_for_classes(body):
|
|
271
|
+
ans = {}
|
|
272
|
+
for obj in body:
|
|
273
|
+
if is_node_type(obj, AST_Class):
|
|
274
|
+
ans[obj.name.name] = obj
|
|
275
|
+
return ans
|
|
276
|
+
|
|
277
|
+
def scan_for_local_vars(body):
|
|
278
|
+
"""
|
|
279
|
+
Pick out all variables being assigned to from within this scope, we'll mark them as local
|
|
280
|
+
|
|
281
|
+
body body to be scanned
|
|
282
|
+
"""
|
|
283
|
+
localvars = v'[]'
|
|
284
|
+
seen = {}
|
|
285
|
+
|
|
286
|
+
def push(x):
|
|
287
|
+
if has_prop(seen, x):
|
|
288
|
+
return
|
|
289
|
+
seen[x] = True
|
|
290
|
+
localvars.push(x)
|
|
291
|
+
|
|
292
|
+
def extend(arr):
|
|
293
|
+
for x in arr:
|
|
294
|
+
push(x)
|
|
295
|
+
|
|
296
|
+
def scan_in_array(arr):
|
|
297
|
+
for x in arr:
|
|
298
|
+
if is_node_type(x, AST_Seq):
|
|
299
|
+
x = x.to_array()
|
|
300
|
+
elif is_node_type(x, AST_Array):
|
|
301
|
+
x = x.elements
|
|
302
|
+
elif is_node_type(x, AST_Starred):
|
|
303
|
+
push(x.expression.name)
|
|
304
|
+
continue
|
|
305
|
+
if Array.isArray(x):
|
|
306
|
+
scan_in_array(x)
|
|
307
|
+
else:
|
|
308
|
+
if not is_node_type(x, AST_PropAccess):
|
|
309
|
+
push(x.name)
|
|
310
|
+
|
|
311
|
+
def add_assign_lhs(lhs):
|
|
312
|
+
if is_node_type(lhs, AST_Seq):
|
|
313
|
+
lhs = new AST_Array({'elements':lhs.to_array()})
|
|
314
|
+
if is_node_type(lhs, AST_Array):
|
|
315
|
+
# assignment to an implicit tuple
|
|
316
|
+
push("ρσ_unpack")
|
|
317
|
+
scan_in_array(lhs.elements)
|
|
318
|
+
elif lhs.name:
|
|
319
|
+
# assignment to a single variable
|
|
320
|
+
push(lhs.name)
|
|
321
|
+
|
|
322
|
+
def add_for_in(stmt):
|
|
323
|
+
if is_node_type(stmt.init, AST_Array):
|
|
324
|
+
# iteration via implicit tuple
|
|
325
|
+
push("ρσ_unpack")
|
|
326
|
+
scan_in_array(stmt.init.elements)
|
|
327
|
+
else:
|
|
328
|
+
# iteration via a single variable
|
|
329
|
+
push(stmt.init.name)
|
|
330
|
+
|
|
331
|
+
if Array.isArray(body):
|
|
332
|
+
# this is a body of statements
|
|
333
|
+
for stmt in body:
|
|
334
|
+
# skip inner scopes
|
|
335
|
+
if is_node_type(stmt, AST_Scope):
|
|
336
|
+
continue
|
|
337
|
+
|
|
338
|
+
# recursive descent into conditional, loop and exception bodies
|
|
339
|
+
for option in ('body', 'alternative', 'bcatch', 'condition'):
|
|
340
|
+
opt = stmt[option]
|
|
341
|
+
if opt:
|
|
342
|
+
extend(scan_for_local_vars(opt))
|
|
343
|
+
|
|
344
|
+
if is_node_type(opt, AST_Assign) and not (is_node_type(opt.right, AST_Scope)):
|
|
345
|
+
extend(scan_for_local_vars(opt.right))
|
|
346
|
+
|
|
347
|
+
# pick up iterators from loops
|
|
348
|
+
if is_node_type(stmt, AST_ForIn):
|
|
349
|
+
add_for_in(stmt)
|
|
350
|
+
elif is_node_type(stmt, AST_DWLoop):
|
|
351
|
+
extend(scan_for_local_vars(stmt))
|
|
352
|
+
elif is_node_type(stmt, AST_With):
|
|
353
|
+
push('ρσ_with_exception'), push('ρσ_with_suppress')
|
|
354
|
+
for clause in stmt.clauses:
|
|
355
|
+
if clause.alias:
|
|
356
|
+
push(clause.alias.name)
|
|
357
|
+
elif is_node_type(stmt, AST_Match):
|
|
358
|
+
for mcase in stmt.cases:
|
|
359
|
+
for cap in scan_match_pattern_captures(mcase.pattern):
|
|
360
|
+
push(cap)
|
|
361
|
+
if mcase.body:
|
|
362
|
+
extend(scan_for_local_vars(mcase.body))
|
|
363
|
+
elif is_node_type(stmt, AST_AnnotatedAssign):
|
|
364
|
+
if stmt.value is not None and is_node_type(stmt.target, AST_SymbolRef):
|
|
365
|
+
push(stmt.target.name)
|
|
366
|
+
if stmt.value:
|
|
367
|
+
extend(scan_for_local_vars(stmt.value))
|
|
368
|
+
|
|
369
|
+
elif body.body:
|
|
370
|
+
# recursive descent into wrapper statements that contain body blocks
|
|
371
|
+
extend(scan_for_local_vars(body.body))
|
|
372
|
+
if body.alternative:
|
|
373
|
+
extend(scan_for_local_vars(body.alternative))
|
|
374
|
+
|
|
375
|
+
elif is_node_type(body, AST_Assign):
|
|
376
|
+
# this is a single assignment operator
|
|
377
|
+
if body.is_chained():
|
|
378
|
+
is_compound_assign = False
|
|
379
|
+
for lhs in body.traverse_chain()[0]:
|
|
380
|
+
add_assign_lhs(lhs)
|
|
381
|
+
if is_node_type(lhs, AST_Seq) or is_node_type(lhs, AST_Array):
|
|
382
|
+
is_compound_assign = True
|
|
383
|
+
break
|
|
384
|
+
if is_compound_assign:
|
|
385
|
+
push('ρσ_chain_assign_temp')
|
|
386
|
+
else:
|
|
387
|
+
add_assign_lhs(body.left)
|
|
388
|
+
if not is_node_type(body.right, AST_Scope):
|
|
389
|
+
extend(scan_for_local_vars(body.right))
|
|
390
|
+
|
|
391
|
+
elif is_node_type(body, AST_NamedExpr):
|
|
392
|
+
# walrus operator: name := value
|
|
393
|
+
push(body.name.name)
|
|
394
|
+
extend(scan_for_local_vars(body.value))
|
|
395
|
+
|
|
396
|
+
elif is_node_type(body, AST_AnnotatedAssign):
|
|
397
|
+
# annotated assignment: name: annotation [= value]
|
|
398
|
+
if body.value is not None and is_node_type(body.target, AST_SymbolRef):
|
|
399
|
+
push(body.target.name)
|
|
400
|
+
if body.value:
|
|
401
|
+
extend(scan_for_local_vars(body.value))
|
|
402
|
+
|
|
403
|
+
elif is_node_type(body, AST_ForIn):
|
|
404
|
+
add_for_in(body)
|
|
405
|
+
|
|
406
|
+
return localvars
|
|
407
|
+
|
|
408
|
+
def scan_match_pattern_captures(pattern):
|
|
409
|
+
"""Recursively collect all capture variable names from a match pattern."""
|
|
410
|
+
captures = v'[]'
|
|
411
|
+
if is_node_type(pattern, AST_MatchCapture):
|
|
412
|
+
captures.push(pattern.name)
|
|
413
|
+
elif is_node_type(pattern, AST_MatchAs):
|
|
414
|
+
if pattern.name:
|
|
415
|
+
captures.push(pattern.name)
|
|
416
|
+
if pattern.pattern:
|
|
417
|
+
captures = captures.concat(scan_match_pattern_captures(pattern.pattern))
|
|
418
|
+
elif is_node_type(pattern, AST_MatchOr):
|
|
419
|
+
# OR patterns must bind the same names – scan first alternative
|
|
420
|
+
if pattern.patterns.length:
|
|
421
|
+
captures = captures.concat(scan_match_pattern_captures(pattern.patterns[0]))
|
|
422
|
+
elif is_node_type(pattern, AST_MatchSequence):
|
|
423
|
+
for e in pattern.elements:
|
|
424
|
+
captures = captures.concat(scan_match_pattern_captures(e))
|
|
425
|
+
elif is_node_type(pattern, AST_MatchStar):
|
|
426
|
+
if pattern.name:
|
|
427
|
+
captures.push(pattern.name)
|
|
428
|
+
elif is_node_type(pattern, AST_MatchMapping):
|
|
429
|
+
for v in pattern.values:
|
|
430
|
+
captures = captures.concat(scan_match_pattern_captures(v))
|
|
431
|
+
if pattern.rest_name:
|
|
432
|
+
captures.push(pattern.rest_name)
|
|
433
|
+
elif is_node_type(pattern, AST_MatchClass):
|
|
434
|
+
for p in pattern.positional:
|
|
435
|
+
captures = captures.concat(scan_match_pattern_captures(p))
|
|
436
|
+
for v in pattern.values:
|
|
437
|
+
captures = captures.concat(scan_match_pattern_captures(v))
|
|
438
|
+
return captures
|
|
439
|
+
|
|
440
|
+
def scan_for_nonlocal_defs(body):
|
|
441
|
+
vars = v'[]'
|
|
442
|
+
if Array.isArray(body):
|
|
443
|
+
for stmt in body:
|
|
444
|
+
if is_node_type(stmt, AST_Scope):
|
|
445
|
+
continue
|
|
446
|
+
|
|
447
|
+
# don't invade nested scopes
|
|
448
|
+
if is_node_type(stmt, AST_Definitions):
|
|
449
|
+
for vardef in stmt.definitions:
|
|
450
|
+
vars.push(vardef.name.name)
|
|
451
|
+
|
|
452
|
+
for option in ('body', 'alternative'):
|
|
453
|
+
nonlocal vars
|
|
454
|
+
opt = stmt[option]
|
|
455
|
+
if opt:
|
|
456
|
+
vars = vars.concat(scan_for_nonlocal_defs(opt))
|
|
457
|
+
|
|
458
|
+
|
|
459
|
+
elif body.body:
|
|
460
|
+
vars = vars.concat(scan_for_nonlocal_defs(body.body))
|
|
461
|
+
if body.alternative:
|
|
462
|
+
vars = vars.concat(scan_for_nonlocal_defs(body.alternative))
|
|
463
|
+
|
|
464
|
+
|
|
465
|
+
return vars
|
|
466
|
+
|
|
467
|
+
def return_():
|
|
468
|
+
if is_('punc', ';'):
|
|
469
|
+
semicolon()
|
|
470
|
+
value = None
|
|
471
|
+
else:
|
|
472
|
+
is_end_of_statement = S.token.nlb or is_("eof") or is_("punc", "}")
|
|
473
|
+
if is_end_of_statement:
|
|
474
|
+
value = None
|
|
475
|
+
else:
|
|
476
|
+
value = expression(True)
|
|
477
|
+
semicolon()
|
|
478
|
+
return value
|
|
479
|
+
|
|
480
|
+
@embed_tokens
|
|
481
|
+
def statement():
|
|
482
|
+
# From Kovid: The next three lines were a hack to try to support statements
|
|
483
|
+
# starting with a regexp literal. However, it did not work, for example:
|
|
484
|
+
# echo 'f=1\n/asd/.test()' | rs -> parse error
|
|
485
|
+
# So we just accept that this cannot be supported in RS, and avoid hacks that mess
|
|
486
|
+
# with the internal state of S. In any case,
|
|
487
|
+
# statements starting with a literal are very rare.
|
|
488
|
+
if S.token.type is 'operator' and S.token.value.substr(0, 1) is '/':
|
|
489
|
+
token_error(S.token, 'RapydScript does not support statements starting with regexp literals')
|
|
490
|
+
|
|
491
|
+
S.statement_starting_token = S.token
|
|
492
|
+
tmp_ = S.token.type
|
|
493
|
+
p = prev()
|
|
494
|
+
if p and not S.token.nlb and ATOMIC_START_TOKEN[p.type] and not is_('punc', ':'):
|
|
495
|
+
unexpected()
|
|
496
|
+
if tmp_ is "string":
|
|
497
|
+
return simple_statement()
|
|
498
|
+
elif tmp_ is "shebang":
|
|
499
|
+
tmp_ = S.token.value
|
|
500
|
+
next()
|
|
501
|
+
return new AST_Directive({
|
|
502
|
+
'value': tmp_
|
|
503
|
+
})
|
|
504
|
+
elif tmp_ is "num" or tmp_ is "regexp" or tmp_ is "operator" or tmp_ is "atom" or tmp_ is "js":
|
|
505
|
+
return simple_statement()
|
|
506
|
+
elif tmp_ is "punc":
|
|
507
|
+
tmp_ = S.token.value
|
|
508
|
+
if tmp_ is ":":
|
|
509
|
+
return new AST_BlockStatement({
|
|
510
|
+
'start': S.token,
|
|
511
|
+
'body': block_(),
|
|
512
|
+
'end': prev()
|
|
513
|
+
})
|
|
514
|
+
elif tmp_ is "{" or tmp_ is "[" or tmp_ is "(":
|
|
515
|
+
return simple_statement()
|
|
516
|
+
elif tmp_ is ";":
|
|
517
|
+
next()
|
|
518
|
+
return new AST_EmptyStatement({'stype':';', 'start':prev(), 'end':prev()})
|
|
519
|
+
else:
|
|
520
|
+
unexpected()
|
|
521
|
+
elif tmp_ is "name":
|
|
522
|
+
if S.token.value is 'match':
|
|
523
|
+
p = peek()
|
|
524
|
+
# 'match' is a soft keyword: treat as match statement when followed by
|
|
525
|
+
# a token that can start an expression subject (not an assignment/attr-access/call op)
|
|
526
|
+
if p.type is 'name' or p.type is 'string' or p.type is 'num' or p.type is 'atom' or p.type is 'js' or (p.type is 'punc' and (p.value is '[' or p.value is '(')):
|
|
527
|
+
next() # consume the 'match' name token
|
|
528
|
+
return match_()
|
|
529
|
+
if (is_token(peek(), 'punc', ':')):
|
|
530
|
+
return annotated_var_statement()
|
|
531
|
+
return simple_statement()
|
|
532
|
+
elif tmp_ is "keyword":
|
|
533
|
+
tmp_ = S.token.value
|
|
534
|
+
next()
|
|
535
|
+
if tmp_ is "break":
|
|
536
|
+
return break_cont(AST_Break)
|
|
537
|
+
elif tmp_ is "continue":
|
|
538
|
+
return break_cont(AST_Continue)
|
|
539
|
+
elif tmp_ is "debugger":
|
|
540
|
+
semicolon()
|
|
541
|
+
return new AST_Debugger()
|
|
542
|
+
elif tmp_ is "do":
|
|
543
|
+
return new AST_Do({
|
|
544
|
+
'body': in_loop(statement),
|
|
545
|
+
'condition': (def():
|
|
546
|
+
expect(".")
|
|
547
|
+
expect_token("keyword", "while")
|
|
548
|
+
tmp = expression(True)
|
|
549
|
+
if is_node_type(tmp, AST_Assign):
|
|
550
|
+
croak('Assignments in do loop conditions are not allowed')
|
|
551
|
+
semicolon()
|
|
552
|
+
return tmp
|
|
553
|
+
)()
|
|
554
|
+
})
|
|
555
|
+
elif tmp_ is "while":
|
|
556
|
+
while_cond = expression(True)
|
|
557
|
+
if is_node_type(while_cond, AST_Assign):
|
|
558
|
+
croak('Assignments in while loop conditions are not allowed')
|
|
559
|
+
if not is_('punc', ':'):
|
|
560
|
+
croak('Expected a colon after the while statement')
|
|
561
|
+
return new AST_While({
|
|
562
|
+
'condition': while_cond,
|
|
563
|
+
'body': in_loop(statement)
|
|
564
|
+
})
|
|
565
|
+
elif tmp_ is "for":
|
|
566
|
+
if is_('js'):
|
|
567
|
+
return for_js()
|
|
568
|
+
return for_()
|
|
569
|
+
elif tmp_ is "from":
|
|
570
|
+
return import_(True)
|
|
571
|
+
elif tmp_ is "import":
|
|
572
|
+
return import_(False)
|
|
573
|
+
elif tmp_ is "class":
|
|
574
|
+
return class_()
|
|
575
|
+
elif tmp_ is "def":
|
|
576
|
+
start = prev()
|
|
577
|
+
func = function_(S.in_class[-1], False)
|
|
578
|
+
func.start = start
|
|
579
|
+
func.end = prev()
|
|
580
|
+
chain = subscripts(func, True)
|
|
581
|
+
if chain is func:
|
|
582
|
+
return func
|
|
583
|
+
else:
|
|
584
|
+
return new AST_SimpleStatement({
|
|
585
|
+
'start': start,
|
|
586
|
+
'body': chain,
|
|
587
|
+
'end': prev()
|
|
588
|
+
})
|
|
589
|
+
elif tmp_ is "async":
|
|
590
|
+
start = prev()
|
|
591
|
+
if not is_("keyword", "def"):
|
|
592
|
+
croak("Expected 'def' after 'async'")
|
|
593
|
+
next()
|
|
594
|
+
func = function_(S.in_class[-1], False, True)
|
|
595
|
+
func.start = start
|
|
596
|
+
func.end = prev()
|
|
597
|
+
chain = subscripts(func, True)
|
|
598
|
+
if chain is func:
|
|
599
|
+
return func
|
|
600
|
+
else:
|
|
601
|
+
return new AST_SimpleStatement({
|
|
602
|
+
'start': start,
|
|
603
|
+
'body': chain,
|
|
604
|
+
'end': prev()
|
|
605
|
+
})
|
|
606
|
+
elif tmp_ is "lambda":
|
|
607
|
+
start = prev()
|
|
608
|
+
func = lambda_()
|
|
609
|
+
func.start = start
|
|
610
|
+
func.end = prev()
|
|
611
|
+
return new AST_SimpleStatement({'start': start, 'body': func, 'end': prev()})
|
|
612
|
+
elif tmp_ is "await":
|
|
613
|
+
start = prev()
|
|
614
|
+
value = expression(True)
|
|
615
|
+
semicolon()
|
|
616
|
+
node = new AST_Await({'start': start, 'value': value, 'end': prev()})
|
|
617
|
+
return new AST_SimpleStatement({'start': start, 'body': node, 'end': prev()})
|
|
618
|
+
elif tmp_ is 'assert':
|
|
619
|
+
start = prev()
|
|
620
|
+
cond = expression(False)
|
|
621
|
+
msg = None
|
|
622
|
+
if is_('punc', ','):
|
|
623
|
+
next()
|
|
624
|
+
msg = expression(False)
|
|
625
|
+
return new AST_Assert({'start': start, 'condition':cond, 'message':msg, 'end':prev()})
|
|
626
|
+
elif tmp_ is "if":
|
|
627
|
+
return if_()
|
|
628
|
+
elif tmp_ is "pass":
|
|
629
|
+
semicolon()
|
|
630
|
+
return new AST_EmptyStatement({'stype':'pass', 'start':prev(), 'end':prev()})
|
|
631
|
+
elif tmp_ is "return":
|
|
632
|
+
if S.in_function is 0:
|
|
633
|
+
croak("'return' outside of function")
|
|
634
|
+
if S.functions[-1].is_generator:
|
|
635
|
+
croak("'return' not allowed in a function with yield")
|
|
636
|
+
S.functions[-1].is_generator = False
|
|
637
|
+
|
|
638
|
+
return new AST_Return({'value':return_()})
|
|
639
|
+
elif tmp_ is "yield":
|
|
640
|
+
return yield_()
|
|
641
|
+
elif tmp_ is "raise":
|
|
642
|
+
if S.token.nlb:
|
|
643
|
+
return new AST_Throw({
|
|
644
|
+
'value': new AST_SymbolCatch({
|
|
645
|
+
'name': "ρσ_Exception"
|
|
646
|
+
})
|
|
647
|
+
})
|
|
648
|
+
|
|
649
|
+
tmp = expression(True)
|
|
650
|
+
semicolon()
|
|
651
|
+
return new AST_Throw({
|
|
652
|
+
'value': tmp
|
|
653
|
+
})
|
|
654
|
+
elif tmp_ is "try":
|
|
655
|
+
return try_()
|
|
656
|
+
elif tmp_ is "nonlocal":
|
|
657
|
+
tmp = nonlocal_()
|
|
658
|
+
semicolon()
|
|
659
|
+
return tmp
|
|
660
|
+
elif tmp_ is 'global':
|
|
661
|
+
tmp = nonlocal_(True)
|
|
662
|
+
semicolon()
|
|
663
|
+
return tmp
|
|
664
|
+
elif tmp_ is "with":
|
|
665
|
+
return with_()
|
|
666
|
+
else:
|
|
667
|
+
unexpected()
|
|
668
|
+
|
|
669
|
+
def with_():
|
|
670
|
+
clauses = v'[]'
|
|
671
|
+
start = S.token
|
|
672
|
+
while True:
|
|
673
|
+
if is_('eof'):
|
|
674
|
+
unexpected()
|
|
675
|
+
expr = expression()
|
|
676
|
+
alias = None
|
|
677
|
+
if is_('keyword', 'as'):
|
|
678
|
+
next()
|
|
679
|
+
alias = as_symbol(AST_SymbolAlias)
|
|
680
|
+
clauses.push(new AST_WithClause({'expression':expr, 'alias':alias}))
|
|
681
|
+
if is_('punc', ','):
|
|
682
|
+
next()
|
|
683
|
+
continue
|
|
684
|
+
if not is_('punc', ':'):
|
|
685
|
+
unexpected()
|
|
686
|
+
break
|
|
687
|
+
|
|
688
|
+
if not clauses.length:
|
|
689
|
+
token_error(start, 'with statement must have at least one clause')
|
|
690
|
+
body = statement()
|
|
691
|
+
|
|
692
|
+
return new AST_With({
|
|
693
|
+
'clauses': clauses,
|
|
694
|
+
'body': body
|
|
695
|
+
})
|
|
696
|
+
|
|
697
|
+
# ---- Match/case (structural pattern matching) ----
|
|
698
|
+
|
|
699
|
+
def parse_match_literal_node(tok):
|
|
700
|
+
"""Build an AST literal node from a token."""
|
|
701
|
+
if tok.type is 'string':
|
|
702
|
+
return new AST_String({'start': tok, 'value': tok.value, 'end': tok})
|
|
703
|
+
elif tok.type is 'num':
|
|
704
|
+
return new AST_Number({'start': tok, 'value': tok.value, 'end': tok})
|
|
705
|
+
elif tok.type is 'atom':
|
|
706
|
+
if tok.value is 'True':
|
|
707
|
+
return new AST_True({'start': tok, 'end': tok})
|
|
708
|
+
elif tok.value is 'False':
|
|
709
|
+
return new AST_False({'start': tok, 'end': tok})
|
|
710
|
+
else:
|
|
711
|
+
return new AST_Null({'start': tok, 'end': tok})
|
|
712
|
+
croak('Expected a literal in match pattern')
|
|
713
|
+
|
|
714
|
+
def parse_match_closed_pattern():
|
|
715
|
+
"""Parse a single atomic/closed pattern (no OR, no AS)."""
|
|
716
|
+
start = S.token
|
|
717
|
+
|
|
718
|
+
# Negative number literal: -42
|
|
719
|
+
if is_('operator', '-') and peek().type is 'num':
|
|
720
|
+
next() # consume '-'
|
|
721
|
+
val = -S.token.value
|
|
722
|
+
tok = S.token
|
|
723
|
+
next()
|
|
724
|
+
return new AST_MatchLiteral({
|
|
725
|
+
'start': start,
|
|
726
|
+
'value': new AST_Number({'start': start, 'value': val, 'end': tok}),
|
|
727
|
+
'end': prev()
|
|
728
|
+
})
|
|
729
|
+
|
|
730
|
+
# Number literal
|
|
731
|
+
if is_('num'):
|
|
732
|
+
tok = S.token
|
|
733
|
+
next()
|
|
734
|
+
return new AST_MatchLiteral({
|
|
735
|
+
'start': tok,
|
|
736
|
+
'value': new AST_Number({'start': tok, 'value': tok.value, 'end': tok}),
|
|
737
|
+
'end': prev()
|
|
738
|
+
})
|
|
739
|
+
|
|
740
|
+
# String literal
|
|
741
|
+
if is_('string'):
|
|
742
|
+
tok = S.token
|
|
743
|
+
next()
|
|
744
|
+
return new AST_MatchLiteral({
|
|
745
|
+
'start': tok,
|
|
746
|
+
'value': new AST_String({'start': tok, 'value': tok.value, 'end': tok}),
|
|
747
|
+
'end': prev()
|
|
748
|
+
})
|
|
749
|
+
|
|
750
|
+
# Atom: True, False, None
|
|
751
|
+
if is_('atom'):
|
|
752
|
+
tok = S.token
|
|
753
|
+
next()
|
|
754
|
+
return new AST_MatchLiteral({
|
|
755
|
+
'start': tok,
|
|
756
|
+
'value': parse_match_literal_node(tok),
|
|
757
|
+
'end': prev()
|
|
758
|
+
})
|
|
759
|
+
|
|
760
|
+
# Sequence: [...]
|
|
761
|
+
if is_('punc', '['):
|
|
762
|
+
return parse_match_sequence('[', ']')
|
|
763
|
+
|
|
764
|
+
# Group or sequence: (...)
|
|
765
|
+
if is_('punc', '('):
|
|
766
|
+
return parse_match_sequence('(', ')')
|
|
767
|
+
|
|
768
|
+
# Mapping: {...}
|
|
769
|
+
if is_('punc', '{'):
|
|
770
|
+
return parse_match_mapping()
|
|
771
|
+
|
|
772
|
+
# Name: wildcard, capture, class pattern, or dotted value pattern
|
|
773
|
+
if is_('name'):
|
|
774
|
+
name = S.token.value
|
|
775
|
+
next()
|
|
776
|
+
|
|
777
|
+
# Build dotted value pattern: Mod.CONST or Mod.Sub.CONST
|
|
778
|
+
expr = new AST_SymbolRef({'start': start, 'name': name, 'end': prev()})
|
|
779
|
+
while is_('punc', '.'):
|
|
780
|
+
next() # consume '.'
|
|
781
|
+
if not is_('name'):
|
|
782
|
+
croak('Expected name after . in match pattern')
|
|
783
|
+
prop = S.token.value
|
|
784
|
+
prop_end = S.token
|
|
785
|
+
next()
|
|
786
|
+
expr = new AST_Dot({'start': start, 'expression': expr, 'property': prop, 'end': prop_end})
|
|
787
|
+
|
|
788
|
+
# Class pattern: Name(...) or Dotted.Name(...)
|
|
789
|
+
if is_('punc', '('):
|
|
790
|
+
return parse_match_class(expr, start)
|
|
791
|
+
|
|
792
|
+
# After a dot, it's a value pattern
|
|
793
|
+
if is_node_type(expr, AST_Dot):
|
|
794
|
+
return new AST_MatchLiteral({'start': start, 'value': expr, 'end': prev()})
|
|
795
|
+
|
|
796
|
+
# Wildcard: _
|
|
797
|
+
if name is '_':
|
|
798
|
+
return new AST_MatchWildcard({'start': start, 'end': prev()})
|
|
799
|
+
|
|
800
|
+
# Capture: plain name
|
|
801
|
+
return new AST_MatchCapture({'start': start, 'name': name, 'end': prev()})
|
|
802
|
+
|
|
803
|
+
croak('Expected a match pattern, got: ' + S.token.type + ' ' + S.token.value)
|
|
804
|
+
|
|
805
|
+
def parse_match_sequence(open_punc, close_punc):
|
|
806
|
+
start = S.token
|
|
807
|
+
next() # consume '[' or '('
|
|
808
|
+
elements = v'[]'
|
|
809
|
+
|
|
810
|
+
if is_('punc', close_punc):
|
|
811
|
+
next()
|
|
812
|
+
return new AST_MatchSequence({'start': start, 'elements': elements, 'end': prev()})
|
|
813
|
+
|
|
814
|
+
while True:
|
|
815
|
+
if is_('operator', '*'):
|
|
816
|
+
star_start = S.token
|
|
817
|
+
next() # consume '*'
|
|
818
|
+
if is_('name', '_'):
|
|
819
|
+
next()
|
|
820
|
+
elements.push(new AST_MatchStar({'start': star_start, 'name': None, 'end': prev()}))
|
|
821
|
+
elif is_('name'):
|
|
822
|
+
nm = S.token.value
|
|
823
|
+
next()
|
|
824
|
+
elements.push(new AST_MatchStar({'start': star_start, 'name': nm, 'end': prev()}))
|
|
825
|
+
else:
|
|
826
|
+
croak('Expected a name after * in sequence pattern')
|
|
827
|
+
else:
|
|
828
|
+
elements.push(parse_match_pattern())
|
|
829
|
+
|
|
830
|
+
if is_('punc', ','):
|
|
831
|
+
next()
|
|
832
|
+
if is_('punc', close_punc):
|
|
833
|
+
break
|
|
834
|
+
elif is_('punc', close_punc):
|
|
835
|
+
break
|
|
836
|
+
else:
|
|
837
|
+
unexpected()
|
|
838
|
+
|
|
839
|
+
next() # consume ']' or ')'
|
|
840
|
+
|
|
841
|
+
# A parenthesised group with exactly one non-star element → unwrap
|
|
842
|
+
if open_punc is '(' and elements.length is 1 and not is_node_type(elements[0], AST_MatchStar):
|
|
843
|
+
return elements[0]
|
|
844
|
+
|
|
845
|
+
return new AST_MatchSequence({'start': start, 'elements': elements, 'end': prev()})
|
|
846
|
+
|
|
847
|
+
def parse_match_mapping():
|
|
848
|
+
start = S.token
|
|
849
|
+
next() # consume '{'
|
|
850
|
+
keys = v'[]'
|
|
851
|
+
values = v'[]'
|
|
852
|
+
rest_name = None
|
|
853
|
+
|
|
854
|
+
if not is_('punc', '}'):
|
|
855
|
+
while True:
|
|
856
|
+
if is_('operator', '**'):
|
|
857
|
+
next() # consume '**'
|
|
858
|
+
if is_('name', '_'):
|
|
859
|
+
next()
|
|
860
|
+
rest_name = None
|
|
861
|
+
elif is_('name'):
|
|
862
|
+
rest_name = S.token.value
|
|
863
|
+
next()
|
|
864
|
+
else:
|
|
865
|
+
croak('Expected a name after ** in mapping pattern')
|
|
866
|
+
break
|
|
867
|
+
# Parse key (string, num, atom, or name-as-string)
|
|
868
|
+
if is_('string') or is_('num') or is_('atom'):
|
|
869
|
+
key_tok = S.token
|
|
870
|
+
next()
|
|
871
|
+
keys.push(parse_match_literal_node(key_tok))
|
|
872
|
+
elif is_('name'):
|
|
873
|
+
# Treat bare name as string key (for dict-style access)
|
|
874
|
+
key_tok = S.token
|
|
875
|
+
next()
|
|
876
|
+
keys.push(new AST_String({'start': key_tok, 'value': key_tok.value, 'end': key_tok}))
|
|
877
|
+
else:
|
|
878
|
+
croak('Expected a literal key in mapping pattern')
|
|
879
|
+
|
|
880
|
+
if not is_('punc', ':'):
|
|
881
|
+
croak('Expected : after key in mapping pattern')
|
|
882
|
+
next() # consume ':'
|
|
883
|
+
|
|
884
|
+
values.push(parse_match_pattern())
|
|
885
|
+
|
|
886
|
+
if is_('punc', ','):
|
|
887
|
+
next()
|
|
888
|
+
if is_('punc', '}'):
|
|
889
|
+
break
|
|
890
|
+
elif is_('punc', '}'):
|
|
891
|
+
break
|
|
892
|
+
else:
|
|
893
|
+
unexpected()
|
|
894
|
+
|
|
895
|
+
next() # consume '}'
|
|
896
|
+
return new AST_MatchMapping({
|
|
897
|
+
'start': start,
|
|
898
|
+
'keys': keys,
|
|
899
|
+
'values': values,
|
|
900
|
+
'rest_name': rest_name,
|
|
901
|
+
'end': prev()
|
|
902
|
+
})
|
|
903
|
+
|
|
904
|
+
def parse_match_class(cls_expr, start):
|
|
905
|
+
next() # consume '('
|
|
906
|
+
positional = v'[]'
|
|
907
|
+
keys = v'[]'
|
|
908
|
+
values = v'[]'
|
|
909
|
+
|
|
910
|
+
if not is_('punc', ')'):
|
|
911
|
+
while True:
|
|
912
|
+
# Keyword arg: name=pattern
|
|
913
|
+
if is_('name') and is_token(peek(), 'operator', '='):
|
|
914
|
+
kname = S.token.value
|
|
915
|
+
next() # consume name
|
|
916
|
+
next() # consume '='
|
|
917
|
+
vpat = parse_match_pattern()
|
|
918
|
+
keys.push(kname)
|
|
919
|
+
values.push(vpat)
|
|
920
|
+
else:
|
|
921
|
+
if keys.length:
|
|
922
|
+
croak('Positional patterns must come before keyword patterns in class pattern')
|
|
923
|
+
positional.push(parse_match_pattern())
|
|
924
|
+
|
|
925
|
+
if is_('punc', ','):
|
|
926
|
+
next()
|
|
927
|
+
if is_('punc', ')'):
|
|
928
|
+
break
|
|
929
|
+
elif is_('punc', ')'):
|
|
930
|
+
break
|
|
931
|
+
else:
|
|
932
|
+
unexpected()
|
|
933
|
+
|
|
934
|
+
next() # consume ')'
|
|
935
|
+
return new AST_MatchClass({
|
|
936
|
+
'start': start,
|
|
937
|
+
'cls': cls_expr,
|
|
938
|
+
'positional': positional,
|
|
939
|
+
'keys': keys,
|
|
940
|
+
'values': values,
|
|
941
|
+
'end': prev()
|
|
942
|
+
})
|
|
943
|
+
|
|
944
|
+
def parse_match_or_pattern():
|
|
945
|
+
start = S.token
|
|
946
|
+
pat = parse_match_closed_pattern()
|
|
947
|
+
if is_('operator', '|'):
|
|
948
|
+
patterns = [pat]
|
|
949
|
+
while is_('operator', '|'):
|
|
950
|
+
next() # consume '|'
|
|
951
|
+
patterns.push(parse_match_closed_pattern())
|
|
952
|
+
return new AST_MatchOr({'start': start, 'patterns': patterns, 'end': prev()})
|
|
953
|
+
return pat
|
|
954
|
+
|
|
955
|
+
def parse_match_pattern():
|
|
956
|
+
"""Parse a full pattern including optional AS suffix."""
|
|
957
|
+
start = S.token
|
|
958
|
+
pat = parse_match_or_pattern()
|
|
959
|
+
if is_('keyword', 'as'):
|
|
960
|
+
next() # consume 'as'
|
|
961
|
+
if not is_('name'):
|
|
962
|
+
croak('Expected a capture name after "as" in match pattern')
|
|
963
|
+
aname = S.token.value
|
|
964
|
+
next()
|
|
965
|
+
return new AST_MatchAs({
|
|
966
|
+
'start': start,
|
|
967
|
+
'pattern': pat,
|
|
968
|
+
'name': aname,
|
|
969
|
+
'end': prev()
|
|
970
|
+
})
|
|
971
|
+
return pat
|
|
972
|
+
|
|
973
|
+
@embed_tokens
|
|
974
|
+
def match_():
|
|
975
|
+
"""Parse a match/case statement. 'match' name token already consumed."""
|
|
976
|
+
start = prev() # the 'match' token
|
|
977
|
+
subject = expression(True)
|
|
978
|
+
|
|
979
|
+
if not is_('punc', ':'):
|
|
980
|
+
croak("Expected ':' after match subject")
|
|
981
|
+
|
|
982
|
+
cases = v'[]'
|
|
983
|
+
prev_whitespace = S.token.leading_whitespace
|
|
984
|
+
next() # consume ':'
|
|
985
|
+
|
|
986
|
+
if not S.token.nlb:
|
|
987
|
+
croak('Expected an indented block of case statements after match')
|
|
988
|
+
|
|
989
|
+
current_whitespace = S.token.leading_whitespace
|
|
990
|
+
if current_whitespace.length is 0 or prev_whitespace is current_whitespace:
|
|
991
|
+
croak('Expected an indented block after match')
|
|
992
|
+
|
|
993
|
+
while not is_('punc', '}') and not is_('eof'):
|
|
994
|
+
if not (S.token.type is 'name' and S.token.value is 'case'):
|
|
995
|
+
token_error(S.token, 'Expected "case" inside match block')
|
|
996
|
+
|
|
997
|
+
case_start = S.token
|
|
998
|
+
next() # consume 'case'
|
|
999
|
+
|
|
1000
|
+
pattern = parse_match_pattern()
|
|
1001
|
+
|
|
1002
|
+
guard = None
|
|
1003
|
+
if is_('keyword', 'if'):
|
|
1004
|
+
next() # consume 'if'
|
|
1005
|
+
guard = expression(True)
|
|
1006
|
+
|
|
1007
|
+
body = statement() # sees ':', creates AST_BlockStatement
|
|
1008
|
+
|
|
1009
|
+
cases.push(new AST_MatchCase({
|
|
1010
|
+
'start': case_start,
|
|
1011
|
+
'pattern': pattern,
|
|
1012
|
+
'guard': guard,
|
|
1013
|
+
'body': body,
|
|
1014
|
+
'end': prev()
|
|
1015
|
+
}))
|
|
1016
|
+
|
|
1017
|
+
if is_('punc', '}'):
|
|
1018
|
+
next() # consume the outer '}'
|
|
1019
|
+
|
|
1020
|
+
return new AST_Match({
|
|
1021
|
+
'start': start,
|
|
1022
|
+
'subject': subject,
|
|
1023
|
+
'cases': cases
|
|
1024
|
+
})
|
|
1025
|
+
|
|
1026
|
+
def simple_statement(tmp):
|
|
1027
|
+
tmp = expression(True)
|
|
1028
|
+
semicolon()
|
|
1029
|
+
return new AST_SimpleStatement({
|
|
1030
|
+
'body': tmp
|
|
1031
|
+
})
|
|
1032
|
+
|
|
1033
|
+
def annotated_var_statement():
|
|
1034
|
+
# Parse a variable type annotation: `name: annotation [= value]`
|
|
1035
|
+
# Called when the current token is a name and the next token is ':'
|
|
1036
|
+
start = S.token
|
|
1037
|
+
target = new AST_SymbolRef({
|
|
1038
|
+
'start': S.token,
|
|
1039
|
+
'name': S.token.value,
|
|
1040
|
+
'end': S.token
|
|
1041
|
+
})
|
|
1042
|
+
next() # consume the name token
|
|
1043
|
+
expect(':') # consume the ':'
|
|
1044
|
+
annotation = maybe_conditional()
|
|
1045
|
+
value = None
|
|
1046
|
+
if is_("operator", "="):
|
|
1047
|
+
next() # consume '='
|
|
1048
|
+
value = expression(True)
|
|
1049
|
+
# Track as class variable if inside a class body
|
|
1050
|
+
if S.in_class.length and S.in_class[-1]:
|
|
1051
|
+
class_name = S.in_class[-1]
|
|
1052
|
+
if S.classes.length > 1:
|
|
1053
|
+
c = S.classes[-2][class_name]
|
|
1054
|
+
if c:
|
|
1055
|
+
c.provisional_classvars[target.name] = True
|
|
1056
|
+
semicolon()
|
|
1057
|
+
return new AST_AnnotatedAssign({
|
|
1058
|
+
'start': start,
|
|
1059
|
+
'target': target,
|
|
1060
|
+
'annotation': annotation,
|
|
1061
|
+
'value': value,
|
|
1062
|
+
'end': prev()
|
|
1063
|
+
})
|
|
1064
|
+
|
|
1065
|
+
def break_cont(t):
|
|
1066
|
+
if S.in_loop is 0:
|
|
1067
|
+
croak(t.name.slice(4) + " not inside a loop or switch")
|
|
1068
|
+
semicolon()
|
|
1069
|
+
return new t()
|
|
1070
|
+
|
|
1071
|
+
def yield_():
|
|
1072
|
+
if S.in_function is 0:
|
|
1073
|
+
croak("'yield' outside of function")
|
|
1074
|
+
if S.functions[-1].is_generator is False:
|
|
1075
|
+
croak("'yield' not allowed in a function with return")
|
|
1076
|
+
S.functions[-1].is_generator = True
|
|
1077
|
+
is_yield_from = is_('keyword', 'from')
|
|
1078
|
+
if is_yield_from:
|
|
1079
|
+
next()
|
|
1080
|
+
return new AST_Yield({'is_yield_from':is_yield_from, 'value': return_()})
|
|
1081
|
+
|
|
1082
|
+
def for_(list_comp):
|
|
1083
|
+
# expect("(")
|
|
1084
|
+
init = None
|
|
1085
|
+
if not is_("punc", ";"):
|
|
1086
|
+
init = expression(True, True)
|
|
1087
|
+
# standardize AST_Seq into array now for consistency
|
|
1088
|
+
if is_node_type(init, AST_Seq):
|
|
1089
|
+
if is_node_type(init.car, AST_SymbolRef) and is_node_type(init.cdr, AST_SymbolRef):
|
|
1090
|
+
# Optimization to prevent runtime call to ρσ_flatten when init is simply (a, b)
|
|
1091
|
+
tmp = init.to_array()
|
|
1092
|
+
else:
|
|
1093
|
+
tmp = [init]
|
|
1094
|
+
init = new AST_Array({
|
|
1095
|
+
'start': init.start,
|
|
1096
|
+
'elements': tmp,
|
|
1097
|
+
'end': init.end
|
|
1098
|
+
})
|
|
1099
|
+
|
|
1100
|
+
if is_("operator", "in"):
|
|
1101
|
+
if is_node_type(init, AST_Var) and init.definitions.length > 1:
|
|
1102
|
+
croak("Only one variable declaration allowed in for..in loop")
|
|
1103
|
+
next()
|
|
1104
|
+
return for_in(init, list_comp)
|
|
1105
|
+
|
|
1106
|
+
unexpected()
|
|
1107
|
+
|
|
1108
|
+
def for_in(init, list_comp):
|
|
1109
|
+
lhs = init.definitions[0].name if is_node_type(init, AST_Var) else None
|
|
1110
|
+
obj = expression(True)
|
|
1111
|
+
# expect(")")
|
|
1112
|
+
if list_comp:
|
|
1113
|
+
return {
|
|
1114
|
+
'init': init,
|
|
1115
|
+
'name': lhs,
|
|
1116
|
+
'object': obj
|
|
1117
|
+
}
|
|
1118
|
+
|
|
1119
|
+
return new AST_ForIn({
|
|
1120
|
+
'init': init,
|
|
1121
|
+
'name': lhs,
|
|
1122
|
+
'object': obj,
|
|
1123
|
+
'body': in_loop(statement)
|
|
1124
|
+
})
|
|
1125
|
+
|
|
1126
|
+
# A native JavaScript for loop - for v"var i=0; i<5000; i++":
|
|
1127
|
+
def for_js():
|
|
1128
|
+
condition = as_atom_node()
|
|
1129
|
+
return new AST_ForJS({
|
|
1130
|
+
'condition': condition,
|
|
1131
|
+
'body': in_loop(statement)
|
|
1132
|
+
})
|
|
1133
|
+
|
|
1134
|
+
# scan function/class body for nested class declarations
|
|
1135
|
+
def get_class_in_scope(expr):
|
|
1136
|
+
# TODO: Currently if a local variable shadows a class name defined in
|
|
1137
|
+
# an outerscope, the logic below will identify that variable as a
|
|
1138
|
+
# class. This bug was always present. Fixing it will require the parser
|
|
1139
|
+
# to maintain a list of local variables for every AST_Scope and provide
|
|
1140
|
+
# an easy way to walk the ast tree upwards.
|
|
1141
|
+
if is_node_type(expr, AST_SymbolRef):
|
|
1142
|
+
# check Native JS classes
|
|
1143
|
+
if has_prop(NATIVE_CLASSES, expr.name):
|
|
1144
|
+
return NATIVE_CLASSES[expr.name]
|
|
1145
|
+
if has_prop(ERROR_CLASSES, expr.name):
|
|
1146
|
+
return ERROR_CLASSES[expr.name]
|
|
1147
|
+
|
|
1148
|
+
# traverse in reverse to check local variables first
|
|
1149
|
+
for s in range(S.classes.length-1, -1, -1):
|
|
1150
|
+
if has_prop(S.classes[s], expr.name):
|
|
1151
|
+
return S.classes[s][expr.name]
|
|
1152
|
+
|
|
1153
|
+
elif is_node_type(expr, AST_Dot):
|
|
1154
|
+
referenced_path = []
|
|
1155
|
+
# this one is for detecting classes inside modules and eventually nested classes
|
|
1156
|
+
while is_node_type(expr, AST_Dot):
|
|
1157
|
+
referenced_path.unshift(expr.property)
|
|
1158
|
+
expr = expr.expression
|
|
1159
|
+
if is_node_type(expr, AST_SymbolRef):
|
|
1160
|
+
referenced_path.unshift(expr.name)
|
|
1161
|
+
# now 'referenced_path' should contain the full path of potential class
|
|
1162
|
+
if len(referenced_path) > 1:
|
|
1163
|
+
class_name = referenced_path.join('.')
|
|
1164
|
+
for s in range(S.classes.length-1, -1, -1):
|
|
1165
|
+
if has_prop(S.classes[s], class_name):
|
|
1166
|
+
return S.classes[s][class_name]
|
|
1167
|
+
return False
|
|
1168
|
+
|
|
1169
|
+
def import_error(message):
|
|
1170
|
+
ctx = S.input.context()
|
|
1171
|
+
raise new ImportError(message, ctx.filename, ctx.tokline, ctx.tokcol, ctx.tokpos)
|
|
1172
|
+
|
|
1173
|
+
def do_import(key):
|
|
1174
|
+
if has_prop(imported_modules, key):
|
|
1175
|
+
return
|
|
1176
|
+
if has_prop(importing_modules, key) and importing_modules[key]:
|
|
1177
|
+
import_error('Detected a recursive import of: ' + key + ' while importing: ' + module_id)
|
|
1178
|
+
|
|
1179
|
+
# Ensure that the package containing this module is also imported
|
|
1180
|
+
package_module_id = key.split('.')[:-1].join('.')
|
|
1181
|
+
if len(package_module_id) > 0:
|
|
1182
|
+
do_import(package_module_id)
|
|
1183
|
+
|
|
1184
|
+
if options.for_linting:
|
|
1185
|
+
imported_modules[key] = {'is_cached':True, 'classes':{}, 'module_id':key, 'exports':[],
|
|
1186
|
+
'nonlocalvars':[], 'baselib':{}, 'outputs':{}, 'discard_asserts':options.discard_asserts}
|
|
1187
|
+
return
|
|
1188
|
+
|
|
1189
|
+
def safe_read(base_path):
|
|
1190
|
+
for i, path in enumerate([base_path + '.pyj', base_path + '/__init__.pyj']):
|
|
1191
|
+
try:
|
|
1192
|
+
return [readfile(path, "utf-8"), path] # noqa:undef
|
|
1193
|
+
except as e:
|
|
1194
|
+
if e.code is 'ENOENT' or e.code is 'EPERM' or e.code is 'EACCESS':
|
|
1195
|
+
if i is 1:
|
|
1196
|
+
return None, None
|
|
1197
|
+
if i is 1:
|
|
1198
|
+
raise
|
|
1199
|
+
|
|
1200
|
+
src_code = filename = None
|
|
1201
|
+
modpath = key.replace(/\./g, '/')
|
|
1202
|
+
|
|
1203
|
+
for location in import_dirs:
|
|
1204
|
+
if location:
|
|
1205
|
+
data, filename = safe_read(location + '/' + modpath)
|
|
1206
|
+
if data is not None:
|
|
1207
|
+
src_code = data
|
|
1208
|
+
break
|
|
1209
|
+
if src_code is None:
|
|
1210
|
+
import_error("Failed Import: '" + key + "' module doesn't exist in any of the import directories: " + import_dirs.join(':'))
|
|
1211
|
+
|
|
1212
|
+
try:
|
|
1213
|
+
cached = JSON.parse(readfile(cache_file_name(filename, options.module_cache_dir), 'utf-8'))
|
|
1214
|
+
except:
|
|
1215
|
+
cached = None
|
|
1216
|
+
|
|
1217
|
+
srchash = sha1sum(src_code) # noqa:undef
|
|
1218
|
+
if cached and cached.version is COMPILER_VERSION and cached.signature is srchash and cached.discard_asserts is v'!!options.discard_asserts':
|
|
1219
|
+
for ikey in cached.imported_module_ids:
|
|
1220
|
+
do_import(ikey) # Ensure all modules imported by the cached module are also imported
|
|
1221
|
+
imported_modules[key] = {
|
|
1222
|
+
'is_cached':True, 'classes':cached.classes, 'outputs':cached.outputs, 'module_id':key, 'import_order':Object.keys(imported_modules).length,
|
|
1223
|
+
'nonlocalvars':cached.nonlocalvars, 'baselib':cached.baselib, 'exports':cached.exports, 'discard_asserts':options.discard_asserts,
|
|
1224
|
+
'imported_module_ids':cached.imported_module_ids,
|
|
1225
|
+
'src_code':src_code, 'filename':filename,
|
|
1226
|
+
}
|
|
1227
|
+
else:
|
|
1228
|
+
parse(src_code, {
|
|
1229
|
+
'filename': filename,
|
|
1230
|
+
'toplevel': None,
|
|
1231
|
+
'basedir': options.basedir,
|
|
1232
|
+
'libdir': options.libdir,
|
|
1233
|
+
'import_dirs': options.import_dirs,
|
|
1234
|
+
'module_id': key,
|
|
1235
|
+
'imported_modules': imported_modules,
|
|
1236
|
+
'importing_modules': importing_modules,
|
|
1237
|
+
'discard_asserts': options.discard_asserts,
|
|
1238
|
+
'module_cache_dir': options.module_cache_dir,
|
|
1239
|
+
}) # This function will add the module to imported_modules itself
|
|
1240
|
+
|
|
1241
|
+
imported_modules[key].srchash = srchash
|
|
1242
|
+
|
|
1243
|
+
for bitem in Object.keys(imported_modules[key].baselib):
|
|
1244
|
+
baselib_items[bitem] = True
|
|
1245
|
+
|
|
1246
|
+
def read_python_flags():
|
|
1247
|
+
expect_token("keyword", "import")
|
|
1248
|
+
bracketed = is_('punc', '(')
|
|
1249
|
+
if bracketed:
|
|
1250
|
+
next()
|
|
1251
|
+
while True:
|
|
1252
|
+
if not is_('name'):
|
|
1253
|
+
croak('Name expected')
|
|
1254
|
+
name = S.token.value
|
|
1255
|
+
val = False if name.startsWith('no_') else True
|
|
1256
|
+
if not val:
|
|
1257
|
+
name = name.slice(3)
|
|
1258
|
+
if not PYTHON_FLAGS:
|
|
1259
|
+
croak('Unknown __python__ flag: ' + name)
|
|
1260
|
+
S.scoped_flags.set(name, val)
|
|
1261
|
+
next()
|
|
1262
|
+
if is_('punc', ','):
|
|
1263
|
+
next()
|
|
1264
|
+
else:
|
|
1265
|
+
if bracketed:
|
|
1266
|
+
if is_('punc', ')'):
|
|
1267
|
+
next()
|
|
1268
|
+
else:
|
|
1269
|
+
continue
|
|
1270
|
+
break
|
|
1271
|
+
return new AST_EmptyStatement({'stype':'scoped_flags', 'start':prev(), 'end':prev()})
|
|
1272
|
+
|
|
1273
|
+
def import_(from_import):
|
|
1274
|
+
ans = new AST_Imports({'imports':[]})
|
|
1275
|
+
while True:
|
|
1276
|
+
tok = tmp = name = last_tok = expression(False)
|
|
1277
|
+
key = ''
|
|
1278
|
+
while is_node_type(tmp, AST_Dot):
|
|
1279
|
+
key = "." + tmp.property + key
|
|
1280
|
+
tmp = last_tok = tmp.expression
|
|
1281
|
+
key = tmp.name + key
|
|
1282
|
+
if from_import and key is '__python__':
|
|
1283
|
+
return read_python_flags()
|
|
1284
|
+
alias = None
|
|
1285
|
+
if not from_import and is_('keyword', 'as'):
|
|
1286
|
+
next()
|
|
1287
|
+
alias = as_symbol(AST_SymbolAlias)
|
|
1288
|
+
aimp = new AST_Import({
|
|
1289
|
+
'module': name,
|
|
1290
|
+
'key': key,
|
|
1291
|
+
'alias': alias,
|
|
1292
|
+
'argnames':None,
|
|
1293
|
+
'body':def():
|
|
1294
|
+
return imported_modules[key]
|
|
1295
|
+
})
|
|
1296
|
+
aimp.start, aimp.end = tok.start, last_tok.end
|
|
1297
|
+
ans.imports.push(aimp)
|
|
1298
|
+
if from_import:
|
|
1299
|
+
break
|
|
1300
|
+
if is_('punc', ','):
|
|
1301
|
+
next()
|
|
1302
|
+
else:
|
|
1303
|
+
break
|
|
1304
|
+
|
|
1305
|
+
for imp in ans['imports']:
|
|
1306
|
+
do_import(imp.key)
|
|
1307
|
+
if imported_module_ids.indexOf(imp.key) is -1:
|
|
1308
|
+
imported_module_ids.push(imp.key)
|
|
1309
|
+
classes = imported_modules[key].classes
|
|
1310
|
+
if from_import:
|
|
1311
|
+
expect_token("keyword", "import")
|
|
1312
|
+
imp.argnames = argnames = []
|
|
1313
|
+
bracketed = is_('punc', '(')
|
|
1314
|
+
if bracketed:
|
|
1315
|
+
next()
|
|
1316
|
+
exports = {}
|
|
1317
|
+
for symdef in imported_modules[key].exports:
|
|
1318
|
+
exports[symdef.name] = True
|
|
1319
|
+
while True:
|
|
1320
|
+
aname = as_symbol(AST_ImportedVar)
|
|
1321
|
+
if not options.for_linting and not has_prop(exports, aname.name):
|
|
1322
|
+
import_error('The symbol "' + aname.name + '" is not exported from the module: ' + key)
|
|
1323
|
+
if is_('keyword', 'as'):
|
|
1324
|
+
next()
|
|
1325
|
+
aname.alias = as_symbol(AST_SymbolAlias)
|
|
1326
|
+
argnames.push(aname)
|
|
1327
|
+
if is_('punc', ','):
|
|
1328
|
+
next()
|
|
1329
|
+
else:
|
|
1330
|
+
if bracketed:
|
|
1331
|
+
if is_('punc', ')'):
|
|
1332
|
+
next()
|
|
1333
|
+
else:
|
|
1334
|
+
continue
|
|
1335
|
+
break
|
|
1336
|
+
|
|
1337
|
+
# Put imported class names in the outermost scope
|
|
1338
|
+
for argvar in argnames:
|
|
1339
|
+
obj = classes[argvar.name]
|
|
1340
|
+
if obj:
|
|
1341
|
+
key = argvar.alias.name if argvar.alias else argvar.name
|
|
1342
|
+
S.classes[-1][key] = { "static": obj.static, 'classmethod': obj.classmethod, 'bound': obj.bound, 'classvars': obj.classvars }
|
|
1343
|
+
else:
|
|
1344
|
+
for cname in Object.keys(classes):
|
|
1345
|
+
obj = classes[cname]
|
|
1346
|
+
key = imp.alias.name if imp.alias else imp.key
|
|
1347
|
+
S.classes[-1][key + '.' + obj.name.name] = { 'static': obj.static, 'classmethod': obj.classmethod, 'bound': obj.bound, 'classvars': obj.classvars }
|
|
1348
|
+
|
|
1349
|
+
return ans
|
|
1350
|
+
|
|
1351
|
+
def class_():
|
|
1352
|
+
name = as_symbol(AST_SymbolDefun)
|
|
1353
|
+
if not name:
|
|
1354
|
+
unexpected()
|
|
1355
|
+
|
|
1356
|
+
# detect external classes
|
|
1357
|
+
externaldecorator = has_simple_decorator(S.decorators, 'external')
|
|
1358
|
+
|
|
1359
|
+
class_details = {
|
|
1360
|
+
"static": {},
|
|
1361
|
+
'classmethod': {},
|
|
1362
|
+
'bound': v'[]',
|
|
1363
|
+
'classvars': {},
|
|
1364
|
+
'processing': name.name,
|
|
1365
|
+
'provisional_classvars': {},
|
|
1366
|
+
}
|
|
1367
|
+
bases = v'[]'
|
|
1368
|
+
class_parent = None
|
|
1369
|
+
|
|
1370
|
+
# read the bases of the class, if any
|
|
1371
|
+
if is_("punc", "("):
|
|
1372
|
+
S.in_parenthesized_expr = True
|
|
1373
|
+
next()
|
|
1374
|
+
while True:
|
|
1375
|
+
if is_('punc', ')'):
|
|
1376
|
+
S.in_parenthesized_expr = False
|
|
1377
|
+
next()
|
|
1378
|
+
break
|
|
1379
|
+
a = expr_atom(False)
|
|
1380
|
+
if class_parent is None:
|
|
1381
|
+
class_parent = a
|
|
1382
|
+
bases.push(a)
|
|
1383
|
+
if is_('punc', ','):
|
|
1384
|
+
next()
|
|
1385
|
+
continue
|
|
1386
|
+
|
|
1387
|
+
class_details['parent'] = class_parent
|
|
1388
|
+
|
|
1389
|
+
docstrings = v'[]'
|
|
1390
|
+
definition = new AST_Class({
|
|
1391
|
+
'name': name,
|
|
1392
|
+
'docstrings': docstrings,
|
|
1393
|
+
'module_id':module_id,
|
|
1394
|
+
'dynamic_properties': Object.create(None),
|
|
1395
|
+
'parent': class_parent,
|
|
1396
|
+
'bases': bases,
|
|
1397
|
+
'localvars': [],
|
|
1398
|
+
'classvars': class_details.classvars,
|
|
1399
|
+
'static': class_details.static,
|
|
1400
|
+
'classmethod': class_details.classmethod,
|
|
1401
|
+
'external': externaldecorator,
|
|
1402
|
+
'bound': class_details.bound,
|
|
1403
|
+
'statements': [],
|
|
1404
|
+
'decorators': (def():
|
|
1405
|
+
d = []
|
|
1406
|
+
for decorator in S.decorators:
|
|
1407
|
+
d.push(new AST_Decorator({
|
|
1408
|
+
'expression': decorator
|
|
1409
|
+
}))
|
|
1410
|
+
S.decorators = v'[]'
|
|
1411
|
+
return d
|
|
1412
|
+
)(),
|
|
1413
|
+
'body': (def(loop, labels):
|
|
1414
|
+
# navigate to correct location in the module tree and append the class
|
|
1415
|
+
S.in_class.push(name.name)
|
|
1416
|
+
S.classes[S.classes.length - 1][name.name] = class_details
|
|
1417
|
+
S.classes.push({})
|
|
1418
|
+
S.scoped_flags.push()
|
|
1419
|
+
S.in_function += 1
|
|
1420
|
+
S.in_loop = 0
|
|
1421
|
+
S.labels = []
|
|
1422
|
+
a = block_(docstrings)
|
|
1423
|
+
S.in_function -= 1
|
|
1424
|
+
S.scoped_flags.pop()
|
|
1425
|
+
S.classes.pop()
|
|
1426
|
+
S.in_class.pop()
|
|
1427
|
+
S.in_loop = loop
|
|
1428
|
+
S.labels = labels
|
|
1429
|
+
return a
|
|
1430
|
+
)(S.in_loop, S.labels)
|
|
1431
|
+
})
|
|
1432
|
+
class_details.processing = False
|
|
1433
|
+
# find the constructor
|
|
1434
|
+
for stmt in definition.body:
|
|
1435
|
+
if is_node_type(stmt, AST_Method):
|
|
1436
|
+
if stmt.is_getter or stmt.is_setter:
|
|
1437
|
+
descriptor = definition.dynamic_properties[stmt.name.name]
|
|
1438
|
+
if not descriptor:
|
|
1439
|
+
descriptor = definition.dynamic_properties[stmt.name.name] = {}
|
|
1440
|
+
descriptor['getter' if stmt.is_getter else 'setter'] = stmt
|
|
1441
|
+
elif stmt.name.name is "__init__":
|
|
1442
|
+
definition.init = stmt
|
|
1443
|
+
# find the class variables
|
|
1444
|
+
class_var_names = {}
|
|
1445
|
+
# Ensure that if a class variable refers to another class variable in
|
|
1446
|
+
# its initialization, the referenced variables' names is correctly
|
|
1447
|
+
# mangled.
|
|
1448
|
+
def walker():
|
|
1449
|
+
def visit_node(node, descend):
|
|
1450
|
+
if is_node_type(node, AST_Method):
|
|
1451
|
+
class_var_names[node.name.name] = True
|
|
1452
|
+
return
|
|
1453
|
+
if is_node_type(node, AST_Function):
|
|
1454
|
+
return
|
|
1455
|
+
if is_node_type(node, AST_Assign) and is_node_type(node.left, AST_SymbolRef):
|
|
1456
|
+
varname = node.left.name
|
|
1457
|
+
if FORBIDDEN_CLASS_VARS.indexOf(varname) is not -1:
|
|
1458
|
+
token_error(node.left.start, varname + ' is not allowed as a class variable name')
|
|
1459
|
+
class_var_names[varname] = True
|
|
1460
|
+
definition.classvars[varname] = True
|
|
1461
|
+
elif is_node_type(node, AST_AnnotatedAssign) and is_node_type(node.target, AST_SymbolRef):
|
|
1462
|
+
varname = node.target.name
|
|
1463
|
+
if FORBIDDEN_CLASS_VARS.indexOf(varname) is not -1:
|
|
1464
|
+
token_error(node.target.start, varname + ' is not allowed as a class variable name')
|
|
1465
|
+
class_var_names[varname] = True
|
|
1466
|
+
definition.classvars[varname] = True
|
|
1467
|
+
elif is_node_type(node, AST_SymbolRef) and has_prop(class_var_names, node.name):
|
|
1468
|
+
node.thedef = new AST_SymbolDefun({'name':name.name + '.prototype.' + node.name})
|
|
1469
|
+
if descend:
|
|
1470
|
+
descend.call(node)
|
|
1471
|
+
this._visit = visit_node
|
|
1472
|
+
visitor = new walker()
|
|
1473
|
+
|
|
1474
|
+
for stmt in definition.body:
|
|
1475
|
+
if not is_node_type(stmt, AST_Class):
|
|
1476
|
+
stmt.walk(visitor)
|
|
1477
|
+
definition.statements.push(stmt)
|
|
1478
|
+
return definition
|
|
1479
|
+
|
|
1480
|
+
def function_(in_class, is_expression, is_async=False):
|
|
1481
|
+
name = as_symbol(AST_SymbolDefun if in_class else AST_SymbolLambda) if is_('name') else None
|
|
1482
|
+
if in_class and not name:
|
|
1483
|
+
croak('Cannot use anonymous function as class methods')
|
|
1484
|
+
is_anonymous = not name
|
|
1485
|
+
|
|
1486
|
+
staticmethod = property_getter = property_setter = classmethod_flag = False
|
|
1487
|
+
if in_class:
|
|
1488
|
+
staticloc = has_simple_decorator(S.decorators, 'staticmethod')
|
|
1489
|
+
classmethodloc = has_simple_decorator(S.decorators, 'classmethod')
|
|
1490
|
+
property_getter = has_simple_decorator(S.decorators, 'property')
|
|
1491
|
+
property_setter = has_setter_decorator(S.decorators, name.name)
|
|
1492
|
+
if staticloc:
|
|
1493
|
+
if property_getter or property_setter:
|
|
1494
|
+
croak('A method cannot be both static and a property getter/setter')
|
|
1495
|
+
S.classes[S.classes.length - 2][in_class].static[name.name] = True
|
|
1496
|
+
staticmethod = True
|
|
1497
|
+
elif classmethodloc:
|
|
1498
|
+
if property_getter or property_setter:
|
|
1499
|
+
croak('A method cannot be both classmethod and a property getter/setter')
|
|
1500
|
+
S.classes[S.classes.length - 2][in_class].classmethod[name.name] = True
|
|
1501
|
+
classmethod_flag = True
|
|
1502
|
+
elif name.name is not "__init__" and S.scoped_flags.get('bound_methods'):
|
|
1503
|
+
S.classes[S.classes.length - 2][in_class].bound.push(name.name)
|
|
1504
|
+
|
|
1505
|
+
expect("(")
|
|
1506
|
+
S.in_parenthesized_expr = True
|
|
1507
|
+
ctor = AST_Method if in_class else AST_Function
|
|
1508
|
+
return_annotation = None
|
|
1509
|
+
is_generator = v'[]'
|
|
1510
|
+
docstrings = v'[]'
|
|
1511
|
+
cm_cls_arg = v'[]' # mutable: holds [cls_argname] for classmethod context
|
|
1512
|
+
definition = new ctor({
|
|
1513
|
+
'name': name,
|
|
1514
|
+
'is_expression': is_expression,
|
|
1515
|
+
'is_anonymous': is_anonymous,
|
|
1516
|
+
'argnames': (def(a):
|
|
1517
|
+
defaults = {}
|
|
1518
|
+
first = True
|
|
1519
|
+
seen_names = {}
|
|
1520
|
+
def_line = S.input.context().tokline
|
|
1521
|
+
current_arg_name = None
|
|
1522
|
+
name_token = None
|
|
1523
|
+
|
|
1524
|
+
def get_arg():
|
|
1525
|
+
nonlocal current_arg_name, name_token
|
|
1526
|
+
current_arg_name = S.token.value
|
|
1527
|
+
if has_prop(seen_names, current_arg_name):
|
|
1528
|
+
token_error(prev(), "Can't repeat parameter names")
|
|
1529
|
+
if current_arg_name is 'arguments':
|
|
1530
|
+
token_error(prev(), "Can't use the name arguments as a parameter name, it is reserved by JavaScript")
|
|
1531
|
+
seen_names[current_arg_name] = True
|
|
1532
|
+
# save these in order to move back if we have an annotation
|
|
1533
|
+
name_token = S.token
|
|
1534
|
+
name_ctx = S.input.context()
|
|
1535
|
+
# check if we have an argument annotation
|
|
1536
|
+
ntok = peek()
|
|
1537
|
+
if ntok.type is 'punc' and ntok.value is ':':
|
|
1538
|
+
next()
|
|
1539
|
+
expect(':')
|
|
1540
|
+
annotation = maybe_conditional()
|
|
1541
|
+
|
|
1542
|
+
# and now, do as_symbol without the next() at the end
|
|
1543
|
+
# since we are already at the next comma (or end bracket)
|
|
1544
|
+
if not is_token(name_token, "name"):
|
|
1545
|
+
# assuming the previous context in case
|
|
1546
|
+
# the annotation was over the line
|
|
1547
|
+
croak("Name expected", name_ctx.tokline)
|
|
1548
|
+
return None
|
|
1549
|
+
|
|
1550
|
+
sym = new AST_SymbolFunarg({
|
|
1551
|
+
'name': name_token.value,
|
|
1552
|
+
'start': S.token,
|
|
1553
|
+
'end': S.token,
|
|
1554
|
+
'annotation': annotation
|
|
1555
|
+
})
|
|
1556
|
+
return sym
|
|
1557
|
+
else:
|
|
1558
|
+
if not is_("name"):
|
|
1559
|
+
# there is no name, which is an error we should report on the
|
|
1560
|
+
# same line as the definition, so move to that is we're not already there.
|
|
1561
|
+
if S.input.context().tokline is not def_line:
|
|
1562
|
+
croak("Name expected", def_line)
|
|
1563
|
+
else:
|
|
1564
|
+
croak("Name expected")
|
|
1565
|
+
return None
|
|
1566
|
+
|
|
1567
|
+
sym = new AST_SymbolFunarg({
|
|
1568
|
+
'name': current_arg_name,
|
|
1569
|
+
'start': S.token,
|
|
1570
|
+
'end': S.token,
|
|
1571
|
+
'annotation': None
|
|
1572
|
+
})
|
|
1573
|
+
next()
|
|
1574
|
+
return sym
|
|
1575
|
+
|
|
1576
|
+
posonly_done = False
|
|
1577
|
+
bare_star_done = False
|
|
1578
|
+
kwonly_has_defaults = False
|
|
1579
|
+
a.posonly_count = 0
|
|
1580
|
+
a.kwonly_count = 0
|
|
1581
|
+
a.bare_star = False
|
|
1582
|
+
|
|
1583
|
+
while not is_("punc", ")"):
|
|
1584
|
+
if first:
|
|
1585
|
+
first = False
|
|
1586
|
+
else:
|
|
1587
|
+
expect(",")
|
|
1588
|
+
if is_('punc', ')'):
|
|
1589
|
+
break
|
|
1590
|
+
if is_('operator', '**'):
|
|
1591
|
+
# **kwargs
|
|
1592
|
+
next()
|
|
1593
|
+
if a.kwargs:
|
|
1594
|
+
token_error(name_token, "Can't define multiple **kwargs in function definition")
|
|
1595
|
+
a.kwargs = get_arg()
|
|
1596
|
+
elif is_('operator', '*'):
|
|
1597
|
+
# *args or bare * (keyword-only separator)
|
|
1598
|
+
next()
|
|
1599
|
+
if is_('punc', ',') or is_('punc', ')'):
|
|
1600
|
+
# Bare star: keyword-only separator
|
|
1601
|
+
if bare_star_done:
|
|
1602
|
+
token_error(S.token, "Only one bare '*' keyword-only separator is allowed in function parameter list")
|
|
1603
|
+
if a.starargs:
|
|
1604
|
+
token_error(S.token, "Cannot use both '*args' and bare '*' in function parameter list")
|
|
1605
|
+
if a.kwargs:
|
|
1606
|
+
token_error(S.token, "Bare '*' must come before '**kwargs'")
|
|
1607
|
+
a.bare_star = True
|
|
1608
|
+
bare_star_done = True
|
|
1609
|
+
else:
|
|
1610
|
+
# *args
|
|
1611
|
+
if a.starargs:
|
|
1612
|
+
token_error(name_token, "Can't define multiple *args in function definition")
|
|
1613
|
+
if a.kwargs:
|
|
1614
|
+
token_error(name_token, "Can't define *args after **kwargs in function definition")
|
|
1615
|
+
a.starargs = get_arg()
|
|
1616
|
+
elif is_('operator', '/'):
|
|
1617
|
+
# Positional-only separator
|
|
1618
|
+
if posonly_done:
|
|
1619
|
+
token_error(S.token, "Only one '/' positional-only separator is allowed in function parameter list")
|
|
1620
|
+
if bare_star_done or a.starargs:
|
|
1621
|
+
token_error(S.token, "'/' positional-only separator must come before '*'")
|
|
1622
|
+
if a.kwargs:
|
|
1623
|
+
token_error(S.token, "'/' positional-only separator must come before '**kwargs'")
|
|
1624
|
+
if a.length is 0:
|
|
1625
|
+
token_error(S.token, "At least one argument must precede '/' positional-only separator")
|
|
1626
|
+
next()
|
|
1627
|
+
for v'var pi = 0; pi < a.length; pi++':
|
|
1628
|
+
a[pi].posonly = True
|
|
1629
|
+
a.posonly_count = a.length
|
|
1630
|
+
posonly_done = True
|
|
1631
|
+
else:
|
|
1632
|
+
if a.starargs:
|
|
1633
|
+
token_error(name_token, "Can't define a formal parameter after *args")
|
|
1634
|
+
if a.kwargs:
|
|
1635
|
+
token_error(name_token, "Can't define a formal parameter after **kwargs")
|
|
1636
|
+
arg = get_arg()
|
|
1637
|
+
if bare_star_done:
|
|
1638
|
+
arg.kwonly = True
|
|
1639
|
+
a.kwonly_count += 1
|
|
1640
|
+
a.push(arg)
|
|
1641
|
+
if is_("operator", "="):
|
|
1642
|
+
next()
|
|
1643
|
+
defaults[current_arg_name] = expression(False)
|
|
1644
|
+
if bare_star_done:
|
|
1645
|
+
kwonly_has_defaults = True
|
|
1646
|
+
else:
|
|
1647
|
+
a.has_defaults = True
|
|
1648
|
+
else:
|
|
1649
|
+
if bare_star_done:
|
|
1650
|
+
if kwonly_has_defaults:
|
|
1651
|
+
token_error(name_token, "Can't define required keyword-only parameters after optional keyword-only parameters")
|
|
1652
|
+
else:
|
|
1653
|
+
if a.has_defaults:
|
|
1654
|
+
token_error(name_token, "Can't define required formal parameters after optional formal parameters")
|
|
1655
|
+
|
|
1656
|
+
if bare_star_done and a.kwonly_count is 0:
|
|
1657
|
+
token_error(S.token, "Named arguments must follow bare '*'")
|
|
1658
|
+
|
|
1659
|
+
next()
|
|
1660
|
+
# check if we have a return type annotation
|
|
1661
|
+
if is_("punc", "->"):
|
|
1662
|
+
next()
|
|
1663
|
+
nonlocal return_annotation
|
|
1664
|
+
return_annotation = maybe_conditional()
|
|
1665
|
+
S.in_parenthesized_expr = False
|
|
1666
|
+
a.defaults = defaults
|
|
1667
|
+
a.is_simple_func = not a.starargs and not a.kwargs and not a.has_defaults and not a.bare_star and not a.posonly_count
|
|
1668
|
+
if classmethod_flag and a.length > 0:
|
|
1669
|
+
cm_cls_arg.push(a[0].name)
|
|
1670
|
+
return a
|
|
1671
|
+
)(v'[]'),
|
|
1672
|
+
'localvars': [],
|
|
1673
|
+
'decorators': (def():
|
|
1674
|
+
d = v'[]'
|
|
1675
|
+
for decorator in S.decorators:
|
|
1676
|
+
d.push(new AST_Decorator({
|
|
1677
|
+
'expression': decorator
|
|
1678
|
+
}))
|
|
1679
|
+
S.decorators = v'[]'
|
|
1680
|
+
return d
|
|
1681
|
+
)(),
|
|
1682
|
+
'docstrings': docstrings,
|
|
1683
|
+
'body': (def(loop, labels):
|
|
1684
|
+
S.in_class.push(False)
|
|
1685
|
+
cm_pushed = v'[false]'
|
|
1686
|
+
if classmethod_flag and in_class and cm_cls_arg.length > 0:
|
|
1687
|
+
cm_ctx_entry = None
|
|
1688
|
+
for v'var si = S.classes.length - 1; si >= 0; si--':
|
|
1689
|
+
if has_prop(S.classes[si], in_class):
|
|
1690
|
+
cm_ctx_entry = S.classes[si][in_class]
|
|
1691
|
+
break
|
|
1692
|
+
if cm_ctx_entry:
|
|
1693
|
+
S.classmethod_ctx_stack.push({'cls_name': cm_cls_arg[0], 'class_entry': cm_ctx_entry})
|
|
1694
|
+
cm_pushed[0] = True
|
|
1695
|
+
S.classes.push({})
|
|
1696
|
+
S.scoped_flags.push()
|
|
1697
|
+
S.in_function += 1
|
|
1698
|
+
S.functions.push({})
|
|
1699
|
+
S.in_loop = 0
|
|
1700
|
+
S.labels = []
|
|
1701
|
+
a = block_(docstrings)
|
|
1702
|
+
S.in_function -= 1
|
|
1703
|
+
S.scoped_flags.pop()
|
|
1704
|
+
is_generator.push(bool(S.functions.pop().is_generator))
|
|
1705
|
+
S.classes.pop()
|
|
1706
|
+
S.in_class.pop()
|
|
1707
|
+
S.in_loop = loop
|
|
1708
|
+
S.labels = labels
|
|
1709
|
+
if cm_pushed[0]:
|
|
1710
|
+
S.classmethod_ctx_stack.pop()
|
|
1711
|
+
return a
|
|
1712
|
+
)(S.in_loop, S.labels)
|
|
1713
|
+
})
|
|
1714
|
+
definition.return_annotation = return_annotation
|
|
1715
|
+
definition.is_generator = is_generator[0]
|
|
1716
|
+
definition.is_async = is_async
|
|
1717
|
+
if is_node_type(definition, AST_Method):
|
|
1718
|
+
definition.static = staticmethod
|
|
1719
|
+
definition.is_classmethod = classmethod_flag
|
|
1720
|
+
definition.is_getter = property_getter
|
|
1721
|
+
definition.is_setter = property_setter
|
|
1722
|
+
if definition.argnames.length < 1 and not definition.static:
|
|
1723
|
+
croak('Methods of a class must have at least one argument, traditionally named self')
|
|
1724
|
+
if definition.name and definition.name.name is '__init__':
|
|
1725
|
+
if definition.is_generator:
|
|
1726
|
+
croak('The __init__ method of a class cannot be a generator (yield not allowed)')
|
|
1727
|
+
if property_getter or property_setter:
|
|
1728
|
+
croak('The __init__ method of a class cannot be a property getter/setter')
|
|
1729
|
+
if definition.is_generator:
|
|
1730
|
+
baselib_items['yield'] = True
|
|
1731
|
+
|
|
1732
|
+
# detect local variables, strip function arguments
|
|
1733
|
+
assignments = scan_for_local_vars(definition.body)
|
|
1734
|
+
for i in range(assignments.length):
|
|
1735
|
+
for j in range(definition.argnames.length+1):
|
|
1736
|
+
if j is definition.argnames.length:
|
|
1737
|
+
definition.localvars.push(new_symbol(AST_SymbolVar, assignments[i]))
|
|
1738
|
+
elif j < definition.argnames.length and assignments[i] is definition.argnames[j].name:
|
|
1739
|
+
break
|
|
1740
|
+
|
|
1741
|
+
nonlocals = scan_for_nonlocal_defs(definition.body)
|
|
1742
|
+
nonlocals = {name for name in nonlocals}
|
|
1743
|
+
definition.localvars = definition.localvars.filter(def(v): return not nonlocals.has(v.name);)
|
|
1744
|
+
return definition
|
|
1745
|
+
|
|
1746
|
+
def lambda_():
|
|
1747
|
+
# Parse Python-style lambda: lambda arg1, arg2, ...: expression
|
|
1748
|
+
# Arguments are comma-separated names (no parentheses), terminated by ':'
|
|
1749
|
+
is_generator = v'[]'
|
|
1750
|
+
argnames = (def(a):
|
|
1751
|
+
defaults = {}
|
|
1752
|
+
first = True
|
|
1753
|
+
seen_names = {}
|
|
1754
|
+
|
|
1755
|
+
def get_arg():
|
|
1756
|
+
if not is_("name"):
|
|
1757
|
+
croak("Name expected in lambda argument list")
|
|
1758
|
+
return None
|
|
1759
|
+
current_arg_name = S.token.value
|
|
1760
|
+
if has_prop(seen_names, current_arg_name):
|
|
1761
|
+
token_error(S.token, "Can't repeat parameter names")
|
|
1762
|
+
seen_names[current_arg_name] = True
|
|
1763
|
+
sym = new AST_SymbolFunarg({
|
|
1764
|
+
'name': current_arg_name,
|
|
1765
|
+
'start': S.token,
|
|
1766
|
+
'end': S.token,
|
|
1767
|
+
'annotation': None
|
|
1768
|
+
})
|
|
1769
|
+
next()
|
|
1770
|
+
return sym
|
|
1771
|
+
|
|
1772
|
+
while not is_("punc", ":"):
|
|
1773
|
+
if first:
|
|
1774
|
+
first = False
|
|
1775
|
+
else:
|
|
1776
|
+
expect(",")
|
|
1777
|
+
if is_("punc", ":"):
|
|
1778
|
+
break
|
|
1779
|
+
if is_('operator', '**'):
|
|
1780
|
+
next()
|
|
1781
|
+
if a.kwargs:
|
|
1782
|
+
croak("Can't define multiple **kwargs in lambda")
|
|
1783
|
+
a.kwargs = get_arg()
|
|
1784
|
+
elif is_('operator', '*'):
|
|
1785
|
+
next()
|
|
1786
|
+
if a.starargs:
|
|
1787
|
+
croak("Can't define multiple *args in lambda")
|
|
1788
|
+
if a.kwargs:
|
|
1789
|
+
croak("Can't define *args after **kwargs in lambda")
|
|
1790
|
+
a.starargs = get_arg()
|
|
1791
|
+
else:
|
|
1792
|
+
if a.starargs or a.kwargs:
|
|
1793
|
+
croak("Can't define a formal parameter after *args or **kwargs")
|
|
1794
|
+
arg = get_arg()
|
|
1795
|
+
a.push(arg)
|
|
1796
|
+
if is_("operator", "="):
|
|
1797
|
+
next()
|
|
1798
|
+
defaults[arg.name] = maybe_conditional()
|
|
1799
|
+
a.has_defaults = True
|
|
1800
|
+
else:
|
|
1801
|
+
if a.has_defaults:
|
|
1802
|
+
croak("Can't define required formal parameters after optional formal parameters")
|
|
1803
|
+
a.defaults = defaults
|
|
1804
|
+
a.is_simple_func = not a.starargs and not a.kwargs and not a.has_defaults
|
|
1805
|
+
return a
|
|
1806
|
+
)(v'[]')
|
|
1807
|
+
|
|
1808
|
+
expect(":")
|
|
1809
|
+
|
|
1810
|
+
# Parse the body expression inside a function scope (mirrors function_())
|
|
1811
|
+
S.in_class.push(False)
|
|
1812
|
+
S.classes.push({})
|
|
1813
|
+
S.scoped_flags.push()
|
|
1814
|
+
S.in_function += 1
|
|
1815
|
+
S.functions.push({})
|
|
1816
|
+
body_expr = maybe_conditional()
|
|
1817
|
+
S.in_function -= 1
|
|
1818
|
+
S.scoped_flags.pop()
|
|
1819
|
+
is_generator.push(bool(S.functions.pop().is_generator))
|
|
1820
|
+
S.classes.pop()
|
|
1821
|
+
S.in_class.pop()
|
|
1822
|
+
|
|
1823
|
+
ret_node = new AST_Return({'value': body_expr, 'start': body_expr.start, 'end': body_expr.end})
|
|
1824
|
+
body = v'[ret_node]'
|
|
1825
|
+
|
|
1826
|
+
definition = new AST_Function({
|
|
1827
|
+
'name': None,
|
|
1828
|
+
'is_expression': True,
|
|
1829
|
+
'is_anonymous': True,
|
|
1830
|
+
'argnames': argnames,
|
|
1831
|
+
'localvars': [],
|
|
1832
|
+
'decorators': [],
|
|
1833
|
+
'docstrings': [],
|
|
1834
|
+
'body': body,
|
|
1835
|
+
})
|
|
1836
|
+
definition.return_annotation = None
|
|
1837
|
+
definition.is_generator = is_generator[0]
|
|
1838
|
+
definition.is_async = False
|
|
1839
|
+
|
|
1840
|
+
assignments = scan_for_local_vars(definition.body)
|
|
1841
|
+
for i in range(assignments.length):
|
|
1842
|
+
for j in range(definition.argnames.length+1):
|
|
1843
|
+
if j is definition.argnames.length:
|
|
1844
|
+
definition.localvars.push(new_symbol(AST_SymbolVar, assignments[i]))
|
|
1845
|
+
elif j < definition.argnames.length and assignments[i] is definition.argnames[j].name:
|
|
1846
|
+
break
|
|
1847
|
+
|
|
1848
|
+
nonlocals = scan_for_nonlocal_defs(definition.body)
|
|
1849
|
+
nonlocals = {name for name in nonlocals}
|
|
1850
|
+
definition.localvars = definition.localvars.filter(def(v): return not nonlocals.has(v.name);)
|
|
1851
|
+
return definition
|
|
1852
|
+
|
|
1853
|
+
def if_():
|
|
1854
|
+
cond = expression(True)
|
|
1855
|
+
body = statement()
|
|
1856
|
+
belse = None
|
|
1857
|
+
if is_("keyword", "elif") or is_("keyword", "else"):
|
|
1858
|
+
if is_("keyword", "else"):
|
|
1859
|
+
next()
|
|
1860
|
+
else:
|
|
1861
|
+
S.token.value = "if"
|
|
1862
|
+
# effectively converts 'elif' to 'else if'
|
|
1863
|
+
belse = statement()
|
|
1864
|
+
|
|
1865
|
+
return new AST_If({
|
|
1866
|
+
'condition': cond,
|
|
1867
|
+
'body': body,
|
|
1868
|
+
'alternative': belse
|
|
1869
|
+
})
|
|
1870
|
+
|
|
1871
|
+
def is_docstring(stmt):
|
|
1872
|
+
if is_node_type(stmt, AST_SimpleStatement):
|
|
1873
|
+
if is_node_type(stmt.body, AST_String):
|
|
1874
|
+
return stmt.body
|
|
1875
|
+
return False
|
|
1876
|
+
|
|
1877
|
+
def block_(docstrings):
|
|
1878
|
+
prev_whitespace = S.token.leading_whitespace
|
|
1879
|
+
expect(":")
|
|
1880
|
+
a = v'[]'
|
|
1881
|
+
if not S.token.nlb:
|
|
1882
|
+
while not S.token.nlb:
|
|
1883
|
+
if is_("eof"):
|
|
1884
|
+
unexpected()
|
|
1885
|
+
stmt = statement()
|
|
1886
|
+
if docstrings:
|
|
1887
|
+
ds = is_docstring(stmt)
|
|
1888
|
+
if ds:
|
|
1889
|
+
docstrings.push(ds)
|
|
1890
|
+
continue
|
|
1891
|
+
a.push(stmt)
|
|
1892
|
+
else:
|
|
1893
|
+
current_whitespace = S.token.leading_whitespace
|
|
1894
|
+
if current_whitespace.length is 0 or prev_whitespace is current_whitespace:
|
|
1895
|
+
croak('Expected an indented block')
|
|
1896
|
+
while not is_("punc", "}"):
|
|
1897
|
+
if is_("eof"):
|
|
1898
|
+
# end of file, terminate block automatically
|
|
1899
|
+
return a
|
|
1900
|
+
stmt = statement()
|
|
1901
|
+
if docstrings:
|
|
1902
|
+
ds = is_docstring(stmt)
|
|
1903
|
+
if ds:
|
|
1904
|
+
docstrings.push(ds)
|
|
1905
|
+
continue
|
|
1906
|
+
a.push(stmt)
|
|
1907
|
+
next()
|
|
1908
|
+
return a
|
|
1909
|
+
|
|
1910
|
+
def try_():
|
|
1911
|
+
body = block_()
|
|
1912
|
+
bcatch = v'[]'
|
|
1913
|
+
bfinally = None
|
|
1914
|
+
belse = None
|
|
1915
|
+
while is_("keyword", "except"):
|
|
1916
|
+
start = S.token
|
|
1917
|
+
next()
|
|
1918
|
+
exceptions = []
|
|
1919
|
+
if not is_("punc", ":") and not is_("keyword", "as"):
|
|
1920
|
+
exceptions.push(as_symbol(AST_SymbolVar))
|
|
1921
|
+
while is_("punc", ","):
|
|
1922
|
+
next()
|
|
1923
|
+
exceptions.push(as_symbol(AST_SymbolVar))
|
|
1924
|
+
|
|
1925
|
+
name = None
|
|
1926
|
+
if is_("keyword", "as"):
|
|
1927
|
+
next()
|
|
1928
|
+
name = as_symbol(AST_SymbolCatch)
|
|
1929
|
+
|
|
1930
|
+
bcatch.push(new AST_Except({
|
|
1931
|
+
'start': start,
|
|
1932
|
+
'argname': name,
|
|
1933
|
+
'errors': exceptions,
|
|
1934
|
+
'body': block_(),
|
|
1935
|
+
'end': prev()
|
|
1936
|
+
}))
|
|
1937
|
+
|
|
1938
|
+
if is_("keyword", "else"):
|
|
1939
|
+
start = S.token
|
|
1940
|
+
next()
|
|
1941
|
+
belse = new AST_Else({
|
|
1942
|
+
'start': start,
|
|
1943
|
+
'body': block_(),
|
|
1944
|
+
'end': prev()
|
|
1945
|
+
})
|
|
1946
|
+
|
|
1947
|
+
if is_("keyword", "finally"):
|
|
1948
|
+
start = S.token
|
|
1949
|
+
next()
|
|
1950
|
+
bfinally = new AST_Finally({
|
|
1951
|
+
'start': start,
|
|
1952
|
+
'body': block_(),
|
|
1953
|
+
'end': prev()
|
|
1954
|
+
})
|
|
1955
|
+
|
|
1956
|
+
if not bcatch.length and not bfinally:
|
|
1957
|
+
croak("Missing except/finally blocks")
|
|
1958
|
+
|
|
1959
|
+
return new AST_Try({
|
|
1960
|
+
'body': body,
|
|
1961
|
+
'bcatch': (new AST_Catch({ 'body': bcatch }) if bcatch.length else None),
|
|
1962
|
+
'bfinally': bfinally,
|
|
1963
|
+
'belse': belse
|
|
1964
|
+
})
|
|
1965
|
+
|
|
1966
|
+
def vardefs(symbol_class):
|
|
1967
|
+
a = []
|
|
1968
|
+
while True:
|
|
1969
|
+
a.push(new AST_VarDef({
|
|
1970
|
+
'start': S.token,
|
|
1971
|
+
'name': as_symbol(symbol_class),
|
|
1972
|
+
'value': (next(), expression(False)) if is_('operator', '=') else None,
|
|
1973
|
+
'end': prev()
|
|
1974
|
+
}))
|
|
1975
|
+
if not is_("punc", ","):
|
|
1976
|
+
break
|
|
1977
|
+
next()
|
|
1978
|
+
|
|
1979
|
+
return a
|
|
1980
|
+
|
|
1981
|
+
def nonlocal_(is_global):
|
|
1982
|
+
defs = vardefs(AST_SymbolNonlocal)
|
|
1983
|
+
if is_global:
|
|
1984
|
+
for vardef in defs:
|
|
1985
|
+
S.globals.push(vardef.name.name)
|
|
1986
|
+
return new AST_Var({
|
|
1987
|
+
'start': prev(),
|
|
1988
|
+
'definitions': defs,
|
|
1989
|
+
'end': prev()
|
|
1990
|
+
})
|
|
1991
|
+
|
|
1992
|
+
def new_():
|
|
1993
|
+
start = S.token
|
|
1994
|
+
expect_token("operator", "new")
|
|
1995
|
+
newexp = expr_atom(False)
|
|
1996
|
+
|
|
1997
|
+
if is_("punc", "("):
|
|
1998
|
+
S.in_parenthesized_expr = True
|
|
1999
|
+
next()
|
|
2000
|
+
args = func_call_list()
|
|
2001
|
+
S.in_parenthesized_expr = False
|
|
2002
|
+
else:
|
|
2003
|
+
args = func_call_list(True)
|
|
2004
|
+
return subscripts(new AST_New({
|
|
2005
|
+
'start': start,
|
|
2006
|
+
'expression': newexp,
|
|
2007
|
+
'args': args,
|
|
2008
|
+
'end': prev()
|
|
2009
|
+
}), True)
|
|
2010
|
+
|
|
2011
|
+
def string_():
|
|
2012
|
+
strings = v'[]'
|
|
2013
|
+
start = S.token
|
|
2014
|
+
while True:
|
|
2015
|
+
strings.push(S.token.value)
|
|
2016
|
+
if peek().type is not 'string':
|
|
2017
|
+
break
|
|
2018
|
+
next()
|
|
2019
|
+
return new AST_String({
|
|
2020
|
+
'start': start,
|
|
2021
|
+
'end': S.token,
|
|
2022
|
+
'value': strings.join('')
|
|
2023
|
+
})
|
|
2024
|
+
|
|
2025
|
+
def token_as_atom_node():
|
|
2026
|
+
tok = S.token
|
|
2027
|
+
tmp_ = tok.type
|
|
2028
|
+
if tmp_ is "name":
|
|
2029
|
+
return token_as_symbol(tok, AST_SymbolRef)
|
|
2030
|
+
elif tmp_ is "num":
|
|
2031
|
+
return new AST_Number({
|
|
2032
|
+
'start': tok,
|
|
2033
|
+
'end': tok,
|
|
2034
|
+
'value': tok.value
|
|
2035
|
+
})
|
|
2036
|
+
elif tmp_ is "string":
|
|
2037
|
+
return string_()
|
|
2038
|
+
elif tmp_ is "regexp":
|
|
2039
|
+
return new AST_RegExp({
|
|
2040
|
+
'start': tok,
|
|
2041
|
+
'end': tok,
|
|
2042
|
+
'value': tok.value
|
|
2043
|
+
})
|
|
2044
|
+
elif tmp_ is "atom":
|
|
2045
|
+
tmp__ = tok.value
|
|
2046
|
+
if tmp__ is "False":
|
|
2047
|
+
return new AST_False({
|
|
2048
|
+
'start': tok,
|
|
2049
|
+
'end': tok
|
|
2050
|
+
})
|
|
2051
|
+
elif tmp__ is "True":
|
|
2052
|
+
return new AST_True({
|
|
2053
|
+
'start': tok,
|
|
2054
|
+
'end': tok
|
|
2055
|
+
})
|
|
2056
|
+
elif tmp__ is "None":
|
|
2057
|
+
return new AST_Null({
|
|
2058
|
+
'start': tok,
|
|
2059
|
+
'end': tok
|
|
2060
|
+
})
|
|
2061
|
+
elif tmp__ is "Ellipsis":
|
|
2062
|
+
return new AST_Ellipsis({
|
|
2063
|
+
'start': tok,
|
|
2064
|
+
'end': tok
|
|
2065
|
+
})
|
|
2066
|
+
elif tmp_ is "js":
|
|
2067
|
+
return new AST_Verbatim({
|
|
2068
|
+
'start': tok,
|
|
2069
|
+
'end': tok,
|
|
2070
|
+
'value': tok.value,
|
|
2071
|
+
})
|
|
2072
|
+
token_error(tok, 'Expecting an atomic token (number/string/bool/regexp/js/None)')
|
|
2073
|
+
|
|
2074
|
+
def as_atom_node():
|
|
2075
|
+
ret = token_as_atom_node()
|
|
2076
|
+
next()
|
|
2077
|
+
return ret
|
|
2078
|
+
|
|
2079
|
+
def expr_atom(allow_calls):
|
|
2080
|
+
if is_("operator", "new"):
|
|
2081
|
+
return new_()
|
|
2082
|
+
|
|
2083
|
+
start = S.token
|
|
2084
|
+
if is_("punc"):
|
|
2085
|
+
tmp_ = start.value
|
|
2086
|
+
if tmp_ is "(":
|
|
2087
|
+
S.in_parenthesized_expr = True
|
|
2088
|
+
next()
|
|
2089
|
+
if is_('punc', ')'):
|
|
2090
|
+
next()
|
|
2091
|
+
return new AST_Array({'elements':[]})
|
|
2092
|
+
ex = expression(True)
|
|
2093
|
+
if is_('keyword', 'for'):
|
|
2094
|
+
ret = read_comprehension(new AST_GeneratorComprehension({'statement': ex}), ')')
|
|
2095
|
+
S.in_parenthesized_expr = False
|
|
2096
|
+
return ret
|
|
2097
|
+
ex.start = start
|
|
2098
|
+
ex.end = S.token
|
|
2099
|
+
if is_node_type(ex, AST_SymbolRef):
|
|
2100
|
+
ex.parens = True
|
|
2101
|
+
if not is_node_type(ex, AST_GeneratorComprehension):
|
|
2102
|
+
expect(")")
|
|
2103
|
+
if is_node_type(ex, AST_UnaryPrefix):
|
|
2104
|
+
ex.parenthesized = True
|
|
2105
|
+
S.in_parenthesized_expr = False
|
|
2106
|
+
return subscripts(ex, allow_calls)
|
|
2107
|
+
elif tmp_ is "[":
|
|
2108
|
+
return subscripts(array_(), allow_calls)
|
|
2109
|
+
elif tmp_ is "{":
|
|
2110
|
+
return subscripts(object_(), allow_calls)
|
|
2111
|
+
|
|
2112
|
+
unexpected()
|
|
2113
|
+
|
|
2114
|
+
if is_("keyword", "class"):
|
|
2115
|
+
next()
|
|
2116
|
+
cls = class_()
|
|
2117
|
+
cls.start = start
|
|
2118
|
+
cls.end = prev()
|
|
2119
|
+
return subscripts(cls, allow_calls)
|
|
2120
|
+
|
|
2121
|
+
if is_("keyword", "def"):
|
|
2122
|
+
next()
|
|
2123
|
+
func = function_(False, True)
|
|
2124
|
+
func.start = start
|
|
2125
|
+
func.end = prev()
|
|
2126
|
+
return subscripts(func, allow_calls)
|
|
2127
|
+
|
|
2128
|
+
if is_("keyword", "lambda"):
|
|
2129
|
+
next()
|
|
2130
|
+
func = lambda_()
|
|
2131
|
+
func.start = start
|
|
2132
|
+
func.end = prev()
|
|
2133
|
+
return subscripts(func, allow_calls)
|
|
2134
|
+
|
|
2135
|
+
if is_('keyword', 'yield'):
|
|
2136
|
+
next()
|
|
2137
|
+
return yield_()
|
|
2138
|
+
|
|
2139
|
+
if is_('keyword', 'async'):
|
|
2140
|
+
next()
|
|
2141
|
+
if not is_("keyword", "def"):
|
|
2142
|
+
croak("Expected 'def' after 'async'")
|
|
2143
|
+
next()
|
|
2144
|
+
func = function_(False, True, True)
|
|
2145
|
+
func.start = start
|
|
2146
|
+
func.end = prev()
|
|
2147
|
+
return subscripts(func, allow_calls)
|
|
2148
|
+
|
|
2149
|
+
if is_('keyword', 'await'):
|
|
2150
|
+
next()
|
|
2151
|
+
return new AST_Await({'start': start, 'value': expression(False), 'end': prev()})
|
|
2152
|
+
|
|
2153
|
+
if ATOMIC_START_TOKEN[S.token.type]:
|
|
2154
|
+
return subscripts(as_atom_node(), allow_calls)
|
|
2155
|
+
|
|
2156
|
+
unexpected()
|
|
2157
|
+
|
|
2158
|
+
def expr_list(closing, allow_trailing_comma, allow_empty, func_call):
|
|
2159
|
+
first = True
|
|
2160
|
+
a = []
|
|
2161
|
+
saw_starargs = False
|
|
2162
|
+
while not is_("punc", closing):
|
|
2163
|
+
if saw_starargs:
|
|
2164
|
+
token_error(prev(), "*args must be the last argument in a function call")
|
|
2165
|
+
|
|
2166
|
+
if first:
|
|
2167
|
+
first = False
|
|
2168
|
+
else:
|
|
2169
|
+
expect(",")
|
|
2170
|
+
if allow_trailing_comma and is_("punc", closing):
|
|
2171
|
+
break
|
|
2172
|
+
|
|
2173
|
+
if is_("operator", "*") and func_call:
|
|
2174
|
+
saw_starargs = True
|
|
2175
|
+
next()
|
|
2176
|
+
|
|
2177
|
+
if is_("punc", ",") and allow_empty:
|
|
2178
|
+
a.push(new AST_Hole({
|
|
2179
|
+
'start': S.token,
|
|
2180
|
+
'end': S.token
|
|
2181
|
+
}))
|
|
2182
|
+
else:
|
|
2183
|
+
a.push(expression(False))
|
|
2184
|
+
|
|
2185
|
+
if func_call:
|
|
2186
|
+
tmp = []
|
|
2187
|
+
tmp.kwargs = []
|
|
2188
|
+
for arg in a:
|
|
2189
|
+
if is_node_type(arg, AST_Assign):
|
|
2190
|
+
tmp.kwargs.push([arg.left, arg.right])
|
|
2191
|
+
else:
|
|
2192
|
+
tmp.push(arg)
|
|
2193
|
+
a = tmp
|
|
2194
|
+
|
|
2195
|
+
next()
|
|
2196
|
+
if saw_starargs:
|
|
2197
|
+
a.starargs = True
|
|
2198
|
+
return a
|
|
2199
|
+
|
|
2200
|
+
def func_call_list(empty):
|
|
2201
|
+
a = v'[]'
|
|
2202
|
+
first = True
|
|
2203
|
+
a.kwargs = v'[]'
|
|
2204
|
+
a.kwarg_items = v'[]'
|
|
2205
|
+
a.starargs = False
|
|
2206
|
+
if empty:
|
|
2207
|
+
return a
|
|
2208
|
+
single_comprehension = False
|
|
2209
|
+
while not is_("punc", ')') and not is_('eof'):
|
|
2210
|
+
if not first:
|
|
2211
|
+
expect(",")
|
|
2212
|
+
if is_('punc', ')'):
|
|
2213
|
+
break
|
|
2214
|
+
if is_('operator', '*'):
|
|
2215
|
+
next()
|
|
2216
|
+
arg = expression(False)
|
|
2217
|
+
arg.is_array = True
|
|
2218
|
+
a.push(arg)
|
|
2219
|
+
a.starargs = True
|
|
2220
|
+
elif is_('operator', '**'):
|
|
2221
|
+
next()
|
|
2222
|
+
a.kwarg_items.push(as_symbol(AST_SymbolRef, False))
|
|
2223
|
+
a.starargs = True
|
|
2224
|
+
else:
|
|
2225
|
+
arg = expression(False)
|
|
2226
|
+
if is_node_type(arg, AST_Assign):
|
|
2227
|
+
a.kwargs.push([arg.left, arg.right])
|
|
2228
|
+
else:
|
|
2229
|
+
if is_('keyword', 'for'):
|
|
2230
|
+
if not first:
|
|
2231
|
+
croak('Generator expression must be parenthesized if not sole argument')
|
|
2232
|
+
a.push(read_comprehension(new AST_GeneratorComprehension({'statement': arg}), ')'))
|
|
2233
|
+
single_comprehension = True
|
|
2234
|
+
break
|
|
2235
|
+
a.push(arg)
|
|
2236
|
+
first = False
|
|
2237
|
+
if not single_comprehension:
|
|
2238
|
+
next()
|
|
2239
|
+
return a
|
|
2240
|
+
|
|
2241
|
+
@embed_tokens
|
|
2242
|
+
def array_():
|
|
2243
|
+
expect("[")
|
|
2244
|
+
expr = []
|
|
2245
|
+
if not is_("punc", "]"):
|
|
2246
|
+
expr.push(expression(False))
|
|
2247
|
+
if is_("keyword", "for"):
|
|
2248
|
+
# list comprehension
|
|
2249
|
+
return read_comprehension(new AST_ListComprehension({'statement': expr[0]}), ']')
|
|
2250
|
+
|
|
2251
|
+
if not is_("punc", "]"):
|
|
2252
|
+
expect(",")
|
|
2253
|
+
|
|
2254
|
+
return new AST_Array({
|
|
2255
|
+
'elements': expr.concat(expr_list("]", True, True))
|
|
2256
|
+
})
|
|
2257
|
+
|
|
2258
|
+
@embed_tokens
|
|
2259
|
+
def object_():
|
|
2260
|
+
expect("{")
|
|
2261
|
+
first = True
|
|
2262
|
+
has_non_const_keys = False
|
|
2263
|
+
is_pydict = S.scoped_flags.get('dict_literals', False)
|
|
2264
|
+
is_jshash = S.scoped_flags.get('hash_literals', False)
|
|
2265
|
+
a = []
|
|
2266
|
+
while not is_("punc", "}"):
|
|
2267
|
+
if not first:
|
|
2268
|
+
expect(",")
|
|
2269
|
+
if is_("punc", "}"):
|
|
2270
|
+
# allow trailing comma
|
|
2271
|
+
break
|
|
2272
|
+
first = False
|
|
2273
|
+
|
|
2274
|
+
start = S.token
|
|
2275
|
+
if is_('operator', '**'):
|
|
2276
|
+
next()
|
|
2277
|
+
a.push(new AST_ObjectSpread({
|
|
2278
|
+
'start': start,
|
|
2279
|
+
'value': expression(False),
|
|
2280
|
+
'end': prev()
|
|
2281
|
+
}))
|
|
2282
|
+
has_non_const_keys = True
|
|
2283
|
+
continue
|
|
2284
|
+
ctx = S.input.context()
|
|
2285
|
+
orig = ctx.expecting_object_literal_key
|
|
2286
|
+
ctx.expecting_object_literal_key = True
|
|
2287
|
+
try:
|
|
2288
|
+
left = expression(False)
|
|
2289
|
+
finally:
|
|
2290
|
+
ctx.expecting_object_literal_key = orig
|
|
2291
|
+
if is_('keyword', 'for'):
|
|
2292
|
+
# is_pydict is irrelevant here
|
|
2293
|
+
return read_comprehension(new AST_SetComprehension({'statement':left}), '}')
|
|
2294
|
+
if a.length is 0 and (is_('punc', ',') or is_('punc', '}')):
|
|
2295
|
+
end = prev()
|
|
2296
|
+
return set_(start, end, left)
|
|
2297
|
+
if not is_node_type(left, AST_Constant):
|
|
2298
|
+
has_non_const_keys = True
|
|
2299
|
+
expect(":")
|
|
2300
|
+
a.push(new AST_ObjectKeyVal({
|
|
2301
|
+
'start': start,
|
|
2302
|
+
'key': left,
|
|
2303
|
+
'value': expression(False),
|
|
2304
|
+
'end': prev()
|
|
2305
|
+
}))
|
|
2306
|
+
if a.length is 1 and is_('keyword', 'for'):
|
|
2307
|
+
return dict_comprehension(a, is_pydict, is_jshash)
|
|
2308
|
+
|
|
2309
|
+
next()
|
|
2310
|
+
return new (AST_ExpressiveObject if has_non_const_keys else AST_Object)({
|
|
2311
|
+
'properties': a,
|
|
2312
|
+
'is_pydict': is_pydict,
|
|
2313
|
+
'is_jshash': is_jshash,
|
|
2314
|
+
})
|
|
2315
|
+
|
|
2316
|
+
def set_(start, end, expr):
|
|
2317
|
+
ostart = start
|
|
2318
|
+
a = [new AST_SetItem({'start':start, 'end':end, 'value':expr})]
|
|
2319
|
+
while not is_("punc", "}"):
|
|
2320
|
+
expect(",")
|
|
2321
|
+
start = S.token
|
|
2322
|
+
if is_("punc", "}"):
|
|
2323
|
+
# allow trailing comma
|
|
2324
|
+
break
|
|
2325
|
+
a.push(new AST_SetItem({'start':start, 'value':expression(False), 'end':prev()}))
|
|
2326
|
+
next()
|
|
2327
|
+
return new AST_Set({'items':a, 'start':ostart, 'end':prev()})
|
|
2328
|
+
|
|
2329
|
+
def _read_comp_conditions():
|
|
2330
|
+
# Read zero or more consecutive `if` conditions, combining them with &&.
|
|
2331
|
+
if not is_('keyword', 'if'):
|
|
2332
|
+
return None
|
|
2333
|
+
expect_token('keyword', 'if')
|
|
2334
|
+
cond = expression(True)
|
|
2335
|
+
while is_('keyword', 'if'):
|
|
2336
|
+
expect_token('keyword', 'if')
|
|
2337
|
+
next_cond = expression(True)
|
|
2338
|
+
cond = new AST_Binary({'operator': '&&', 'left': cond, 'right': next_cond,
|
|
2339
|
+
'start': cond.start, 'end': next_cond.end})
|
|
2340
|
+
return cond
|
|
2341
|
+
|
|
2342
|
+
def read_comprehension(obj, terminator):
|
|
2343
|
+
if is_node_type(obj, AST_GeneratorComprehension):
|
|
2344
|
+
baselib_items['yield'] = True
|
|
2345
|
+
S.in_comprehension = True
|
|
2346
|
+
S.in_parenthesized_expr = False # in case we are already in a parenthesized expression
|
|
2347
|
+
expect_token('keyword', 'for')
|
|
2348
|
+
forloop = for_(True)
|
|
2349
|
+
obj.init = forloop.init
|
|
2350
|
+
obj.name = forloop.name
|
|
2351
|
+
obj.object = forloop.object
|
|
2352
|
+
obj.condition = _read_comp_conditions()
|
|
2353
|
+
# Read additional for-clauses for nested comprehensions
|
|
2354
|
+
clauses = []
|
|
2355
|
+
while is_('keyword', 'for'):
|
|
2356
|
+
expect_token('keyword', 'for')
|
|
2357
|
+
inner = for_(True)
|
|
2358
|
+
clauses.push({'init': inner.init, 'object': inner.object,
|
|
2359
|
+
'condition': _read_comp_conditions()})
|
|
2360
|
+
obj.clauses = clauses
|
|
2361
|
+
expect(terminator)
|
|
2362
|
+
S.in_comprehension = False
|
|
2363
|
+
return obj
|
|
2364
|
+
|
|
2365
|
+
def dict_comprehension(a, is_pydict, is_jshash):
|
|
2366
|
+
if a.length:
|
|
2367
|
+
left, right = a[0].key, a[0].value
|
|
2368
|
+
else:
|
|
2369
|
+
left = expression(False)
|
|
2370
|
+
if not is_('punc', ':'):
|
|
2371
|
+
return read_comprehension(new AST_SetComprehension({'statement':left}), '}')
|
|
2372
|
+
expect(':')
|
|
2373
|
+
right = expression(False)
|
|
2374
|
+
return read_comprehension(new AST_DictComprehension({'statement':left, 'value_statement':right, 'is_pydict':is_pydict, 'is_jshash':is_jshash}), '}')
|
|
2375
|
+
|
|
2376
|
+
def as_name():
|
|
2377
|
+
tmp = S.token
|
|
2378
|
+
next()
|
|
2379
|
+
tmp_ = tmp.type
|
|
2380
|
+
if tmp_ is "name" or tmp_ is "operator" or tmp_ is "keyword" or tmp_ is "atom":
|
|
2381
|
+
return tmp.value
|
|
2382
|
+
else:
|
|
2383
|
+
unexpected()
|
|
2384
|
+
|
|
2385
|
+
def token_as_symbol(tok, ttype):
|
|
2386
|
+
name = tok.value
|
|
2387
|
+
if RESERVED_WORDS[name] and name is not 'this':
|
|
2388
|
+
croak(name + ' is a reserved word')
|
|
2389
|
+
return new (AST_This if name is 'this' else ttype)({
|
|
2390
|
+
'name': v"String(tok.value)",
|
|
2391
|
+
'start': tok,
|
|
2392
|
+
'end': tok
|
|
2393
|
+
})
|
|
2394
|
+
|
|
2395
|
+
def as_symbol(ttype, noerror):
|
|
2396
|
+
if not is_("name"):
|
|
2397
|
+
if not noerror:
|
|
2398
|
+
croak("Name expected")
|
|
2399
|
+
return None
|
|
2400
|
+
|
|
2401
|
+
sym = token_as_symbol(S.token, ttype)
|
|
2402
|
+
next()
|
|
2403
|
+
return sym
|
|
2404
|
+
|
|
2405
|
+
# for generating/inserting a new symbol
|
|
2406
|
+
def new_symbol(type, name):
|
|
2407
|
+
sym = new (AST_This if name is 'this' else type)({
|
|
2408
|
+
'name': v"String(name)",
|
|
2409
|
+
'start': None,
|
|
2410
|
+
'end': None
|
|
2411
|
+
})
|
|
2412
|
+
return sym
|
|
2413
|
+
|
|
2414
|
+
def is_static_method(cls, method):
|
|
2415
|
+
if has_prop(COMMON_STATIC, method):
|
|
2416
|
+
return True
|
|
2417
|
+
if cls.static and has_prop(cls.static, method):
|
|
2418
|
+
return True
|
|
2419
|
+
if cls.classmethod and has_prop(cls.classmethod, method):
|
|
2420
|
+
return True
|
|
2421
|
+
# Walk parent chain for inherited static/classmethod
|
|
2422
|
+
if cls.parent and is_node_type(cls.parent, AST_SymbolRef):
|
|
2423
|
+
parent_name = cls.parent.name
|
|
2424
|
+
for v'var si = S.classes.length - 1; si >= 0; si--':
|
|
2425
|
+
if has_prop(S.classes[si], parent_name):
|
|
2426
|
+
return is_static_method(S.classes[si][parent_name], method)
|
|
2427
|
+
return False
|
|
2428
|
+
|
|
2429
|
+
def getitem(expr, allow_calls):
|
|
2430
|
+
start = expr.start
|
|
2431
|
+
next()
|
|
2432
|
+
is_py_sub = S.scoped_flags.get('overload_getitem', False)
|
|
2433
|
+
slice_bounds = v'[]'
|
|
2434
|
+
is_slice = False
|
|
2435
|
+
if is_("punc", ":"):
|
|
2436
|
+
# slice [:n]
|
|
2437
|
+
slice_bounds.push(None)
|
|
2438
|
+
else:
|
|
2439
|
+
slice_bounds.push(expression(False))
|
|
2440
|
+
|
|
2441
|
+
if is_("punc", ":"):
|
|
2442
|
+
# slice [n:m?]
|
|
2443
|
+
is_slice = True
|
|
2444
|
+
next()
|
|
2445
|
+
if is_("punc", ":"):
|
|
2446
|
+
slice_bounds.push(None)
|
|
2447
|
+
elif not is_("punc", "]"):
|
|
2448
|
+
slice_bounds.push(expression(False))
|
|
2449
|
+
|
|
2450
|
+
if is_("punc", ":"):
|
|
2451
|
+
# slice [n:m:o?]
|
|
2452
|
+
next()
|
|
2453
|
+
if is_("punc", "]"):
|
|
2454
|
+
unexpected()
|
|
2455
|
+
else:
|
|
2456
|
+
slice_bounds.push(expression(False))
|
|
2457
|
+
|
|
2458
|
+
# Multi-dimensional subscript: a[1, 2] → a[[1, 2]]
|
|
2459
|
+
is_multi = False
|
|
2460
|
+
multi_items = v'[]'
|
|
2461
|
+
if not is_slice and is_("punc", ","):
|
|
2462
|
+
is_multi = True
|
|
2463
|
+
multi_items.push(slice_bounds[0] or new AST_Number({'value': 0}))
|
|
2464
|
+
while is_("punc", ","):
|
|
2465
|
+
next()
|
|
2466
|
+
multi_items.push(expression(False))
|
|
2467
|
+
|
|
2468
|
+
expect("]")
|
|
2469
|
+
|
|
2470
|
+
if is_slice:
|
|
2471
|
+
if is_("operator", '='):
|
|
2472
|
+
# splice-assignment (arr[start:end] = ...)
|
|
2473
|
+
next() # swallow the assignment
|
|
2474
|
+
return subscripts(new AST_Splice({
|
|
2475
|
+
'start': start,
|
|
2476
|
+
'expression': expr,
|
|
2477
|
+
'property': slice_bounds[0] or new AST_Number({
|
|
2478
|
+
'value': 0
|
|
2479
|
+
}),
|
|
2480
|
+
'property2': slice_bounds[1],
|
|
2481
|
+
'assignment': expression(True),
|
|
2482
|
+
'end': prev()
|
|
2483
|
+
}), allow_calls)
|
|
2484
|
+
elif slice_bounds.length is 3:
|
|
2485
|
+
# extended slice (arr[start:end:step])
|
|
2486
|
+
slice_bounds.unshift(slice_bounds.pop())
|
|
2487
|
+
if not slice_bounds[-1]:
|
|
2488
|
+
slice_bounds.pop()
|
|
2489
|
+
if not slice_bounds[-1]:
|
|
2490
|
+
slice_bounds.pop()
|
|
2491
|
+
elif not slice_bounds[-2]:
|
|
2492
|
+
slice_bounds[-2] = new AST_Undefined()
|
|
2493
|
+
return subscripts(new AST_Call({
|
|
2494
|
+
'start': start,
|
|
2495
|
+
'expression': new AST_SymbolRef({
|
|
2496
|
+
'name': 'ρσ_delslice' if S.in_delete else "ρσ_eslice"
|
|
2497
|
+
}),
|
|
2498
|
+
'args': [expr].concat(slice_bounds),
|
|
2499
|
+
'end': prev()
|
|
2500
|
+
}), allow_calls)
|
|
2501
|
+
else:
|
|
2502
|
+
# regular slice (arr[start:end])
|
|
2503
|
+
slice_bounds = [new AST_Number({'value':0}) if i is None else i for i in slice_bounds]
|
|
2504
|
+
if S.in_delete:
|
|
2505
|
+
return subscripts(new AST_Call({
|
|
2506
|
+
'start': start,
|
|
2507
|
+
'expression': new AST_SymbolRef({'name': 'ρσ_delslice'}),
|
|
2508
|
+
'args': [expr, new AST_Number({'value':1})].concat(slice_bounds),
|
|
2509
|
+
'end': prev()
|
|
2510
|
+
}), allow_calls)
|
|
2511
|
+
|
|
2512
|
+
return subscripts(new AST_Call({
|
|
2513
|
+
'start': start,
|
|
2514
|
+
'expression': new AST_Dot({
|
|
2515
|
+
'start': start,
|
|
2516
|
+
'expression': expr,
|
|
2517
|
+
'property': "slice",
|
|
2518
|
+
'end': prev()
|
|
2519
|
+
}),
|
|
2520
|
+
'args': slice_bounds,
|
|
2521
|
+
'end': prev()
|
|
2522
|
+
}), allow_calls)
|
|
2523
|
+
else:
|
|
2524
|
+
# regular index (arr[index])
|
|
2525
|
+
prop = slice_bounds[0] or new AST_Number({'value': 0})
|
|
2526
|
+
if is_multi:
|
|
2527
|
+
multi_arr = new AST_Array({'start': start, 'elements': multi_items, 'end': prev()})
|
|
2528
|
+
multi_arr.is_subscript_tuple = True
|
|
2529
|
+
prop = multi_arr
|
|
2530
|
+
if is_py_sub:
|
|
2531
|
+
assignment = None
|
|
2532
|
+
assign_operator = ''
|
|
2533
|
+
if is_("operator") and ASSIGNMENT[S.token.value]:
|
|
2534
|
+
assign_operator = S.token.value[:-1]
|
|
2535
|
+
next()
|
|
2536
|
+
assignment = expression(True)
|
|
2537
|
+
return subscripts(new AST_ItemAccess({
|
|
2538
|
+
'start': start,
|
|
2539
|
+
'expression': expr,
|
|
2540
|
+
'property': prop,
|
|
2541
|
+
'assignment':assignment,
|
|
2542
|
+
'assign_operator': assign_operator,
|
|
2543
|
+
'end': prev()
|
|
2544
|
+
}), allow_calls)
|
|
2545
|
+
|
|
2546
|
+
return subscripts(new AST_Sub({
|
|
2547
|
+
'start': start,
|
|
2548
|
+
'expression': expr,
|
|
2549
|
+
'property': prop,
|
|
2550
|
+
'end': prev()
|
|
2551
|
+
}), allow_calls)
|
|
2552
|
+
|
|
2553
|
+
def call_(expr):
|
|
2554
|
+
start = expr.start
|
|
2555
|
+
S.in_parenthesized_expr = True
|
|
2556
|
+
next()
|
|
2557
|
+
if not expr.parens and get_class_in_scope(expr):
|
|
2558
|
+
# this is an object being created using a class
|
|
2559
|
+
ret = subscripts(new AST_New({
|
|
2560
|
+
'start': start,
|
|
2561
|
+
'expression': expr,
|
|
2562
|
+
'args': func_call_list(),
|
|
2563
|
+
'end': prev()
|
|
2564
|
+
}), True)
|
|
2565
|
+
S.in_parenthesized_expr = False
|
|
2566
|
+
return ret
|
|
2567
|
+
else:
|
|
2568
|
+
if is_node_type(expr, AST_Dot):
|
|
2569
|
+
if is_node_type(expr.expression, AST_Super):
|
|
2570
|
+
# super().method(args) → ParentClass.prototype.method.call(this, args)
|
|
2571
|
+
super_node = expr.expression
|
|
2572
|
+
method_name = expr.property
|
|
2573
|
+
method_args = func_call_list()
|
|
2574
|
+
this_node = new AST_This({
|
|
2575
|
+
'name': 'this',
|
|
2576
|
+
'start': start,
|
|
2577
|
+
'end': start
|
|
2578
|
+
})
|
|
2579
|
+
method_args.unshift(this_node)
|
|
2580
|
+
ret = subscripts(new AST_ClassCall({
|
|
2581
|
+
'start': start,
|
|
2582
|
+
'class': super_node.parent,
|
|
2583
|
+
'method': method_name,
|
|
2584
|
+
'static': False,
|
|
2585
|
+
'args': method_args,
|
|
2586
|
+
'end': prev()
|
|
2587
|
+
}), True)
|
|
2588
|
+
S.in_parenthesized_expr = False
|
|
2589
|
+
return ret
|
|
2590
|
+
c = get_class_in_scope(expr.expression)
|
|
2591
|
+
|
|
2592
|
+
if c:
|
|
2593
|
+
# generate class call
|
|
2594
|
+
funcname = expr
|
|
2595
|
+
|
|
2596
|
+
ret = subscripts(new AST_ClassCall({
|
|
2597
|
+
'start': start,
|
|
2598
|
+
"class": expr.expression,
|
|
2599
|
+
'method': funcname.property,
|
|
2600
|
+
"static": is_static_method(c, funcname.property),
|
|
2601
|
+
'args': func_call_list(),
|
|
2602
|
+
'end': prev()
|
|
2603
|
+
}), True)
|
|
2604
|
+
S.in_parenthesized_expr = False
|
|
2605
|
+
return ret
|
|
2606
|
+
elif is_node_type(expr, AST_SymbolRef):
|
|
2607
|
+
tmp_ = expr.name
|
|
2608
|
+
if tmp_ is "jstype":
|
|
2609
|
+
ret = new AST_UnaryPrefix({
|
|
2610
|
+
'start': start,
|
|
2611
|
+
'operator': "typeof",
|
|
2612
|
+
'expression': func_call_list()[0],
|
|
2613
|
+
'end': prev()
|
|
2614
|
+
})
|
|
2615
|
+
S.in_parenthesized_expr = False
|
|
2616
|
+
return ret
|
|
2617
|
+
elif tmp_ is "isinstance":
|
|
2618
|
+
args = func_call_list()
|
|
2619
|
+
if args.length is not 2:
|
|
2620
|
+
croak('isinstance() must be called with exactly two arguments')
|
|
2621
|
+
ret = new AST_Binary({
|
|
2622
|
+
'start': start,
|
|
2623
|
+
'left': args[0],
|
|
2624
|
+
'operator': 'instanceof',
|
|
2625
|
+
'right': args[1],
|
|
2626
|
+
'end': prev()
|
|
2627
|
+
})
|
|
2628
|
+
S.in_parenthesized_expr = False
|
|
2629
|
+
return ret
|
|
2630
|
+
elif tmp_ is "super":
|
|
2631
|
+
# Find the innermost enclosing class name
|
|
2632
|
+
current_class = None
|
|
2633
|
+
for v'var i = S.in_class.length - 1; i >= 0; i--':
|
|
2634
|
+
if S.in_class[i]:
|
|
2635
|
+
current_class = S.in_class[i]
|
|
2636
|
+
break
|
|
2637
|
+
if not current_class:
|
|
2638
|
+
croak('super() is only valid inside a class method')
|
|
2639
|
+
super_args = func_call_list()
|
|
2640
|
+
parent_expr = None
|
|
2641
|
+
if super_args.length is 0:
|
|
2642
|
+
# 0-arg form: use parent of current class
|
|
2643
|
+
for v'var s = S.classes.length - 1; s >= 0; s--':
|
|
2644
|
+
if has_prop(S.classes[s], current_class):
|
|
2645
|
+
parent_expr = S.classes[s][current_class].parent
|
|
2646
|
+
break
|
|
2647
|
+
if not parent_expr:
|
|
2648
|
+
croak('super() used in a class without a parent class')
|
|
2649
|
+
elif super_args.length is 2:
|
|
2650
|
+
# 2-arg form: super(ClassName, self) — use parent of ClassName
|
|
2651
|
+
cls_ref = super_args[0]
|
|
2652
|
+
cls_info = get_class_in_scope(cls_ref)
|
|
2653
|
+
if not cls_info or not cls_info.parent:
|
|
2654
|
+
croak('First argument to super() must be a subclass with a parent')
|
|
2655
|
+
parent_expr = cls_info.parent
|
|
2656
|
+
else:
|
|
2657
|
+
croak('super() takes 0 or 2 arguments (' + super_args.length + ' given)')
|
|
2658
|
+
super_node = new AST_Super({
|
|
2659
|
+
'start': start,
|
|
2660
|
+
'parent': parent_expr,
|
|
2661
|
+
'class_name': current_class,
|
|
2662
|
+
'end': prev()
|
|
2663
|
+
})
|
|
2664
|
+
S.in_parenthesized_expr = False
|
|
2665
|
+
# Call subscripts to handle subsequent .method(args) chain
|
|
2666
|
+
return subscripts(super_node, True)
|
|
2667
|
+
|
|
2668
|
+
# fall-through to basic function call
|
|
2669
|
+
ret = subscripts(new AST_Call({
|
|
2670
|
+
'start': start,
|
|
2671
|
+
'expression': expr,
|
|
2672
|
+
'args': func_call_list(),
|
|
2673
|
+
'end': prev()
|
|
2674
|
+
}), True)
|
|
2675
|
+
S.in_parenthesized_expr = False
|
|
2676
|
+
return ret
|
|
2677
|
+
|
|
2678
|
+
def get_attr(expr, allow_calls):
|
|
2679
|
+
next()
|
|
2680
|
+
prop = as_name()
|
|
2681
|
+
c = get_class_in_scope(expr)
|
|
2682
|
+
if c:
|
|
2683
|
+
classvars = c.provisional_classvars if c.processing else c.classvars
|
|
2684
|
+
if classvars and v'classvars[prop]':
|
|
2685
|
+
prop = 'prototype.' + prop
|
|
2686
|
+
elif S.classmethod_ctx_stack.length > 0:
|
|
2687
|
+
ctx = S.classmethod_ctx_stack[S.classmethod_ctx_stack.length - 1]
|
|
2688
|
+
if is_node_type(expr, AST_SymbolRef) and expr.name is ctx.cls_name:
|
|
2689
|
+
pclassvars = ctx.class_entry.provisional_classvars
|
|
2690
|
+
if pclassvars and v'pclassvars[prop]':
|
|
2691
|
+
prop = 'prototype.' + prop
|
|
2692
|
+
|
|
2693
|
+
return subscripts(new AST_Dot({
|
|
2694
|
+
'start': expr.start,
|
|
2695
|
+
'expression': expr,
|
|
2696
|
+
'property': prop,
|
|
2697
|
+
'end': prev()
|
|
2698
|
+
}), allow_calls)
|
|
2699
|
+
|
|
2700
|
+
def existential(expr, allow_calls):
|
|
2701
|
+
ans = new AST_Existential({'start':expr.start, 'end':S.token, 'expression':expr})
|
|
2702
|
+
next()
|
|
2703
|
+
ttype = S.token.type
|
|
2704
|
+
val = S.token.value
|
|
2705
|
+
if S.token.nlb or ttype is 'keyword' or ttype is 'operator' or ttype is 'eof':
|
|
2706
|
+
ans.after = None
|
|
2707
|
+
return ans
|
|
2708
|
+
if ttype is 'punc':
|
|
2709
|
+
if val is '.':
|
|
2710
|
+
ans.after = '.'
|
|
2711
|
+
elif val is '[':
|
|
2712
|
+
is_py_sub = S.scoped_flags.get('overload_getitem', False)
|
|
2713
|
+
ans.after = 'g' if is_py_sub else '['
|
|
2714
|
+
elif val is '(':
|
|
2715
|
+
if not allow_calls:
|
|
2716
|
+
unexpected()
|
|
2717
|
+
ans.after = '('
|
|
2718
|
+
else:
|
|
2719
|
+
ans.after = None
|
|
2720
|
+
return ans
|
|
2721
|
+
return subscripts(ans, allow_calls)
|
|
2722
|
+
|
|
2723
|
+
ans.after = expression()
|
|
2724
|
+
return ans
|
|
2725
|
+
|
|
2726
|
+
def subscripts(expr, allow_calls):
|
|
2727
|
+
if is_("punc", "."):
|
|
2728
|
+
return get_attr(expr, allow_calls)
|
|
2729
|
+
|
|
2730
|
+
if is_("punc", "[") and not S.token.nlb:
|
|
2731
|
+
return getitem(expr, allow_calls)
|
|
2732
|
+
|
|
2733
|
+
if allow_calls and is_("punc", "(") and not S.token.nlb:
|
|
2734
|
+
return call_(expr)
|
|
2735
|
+
|
|
2736
|
+
if is_('punc', '?'):
|
|
2737
|
+
return existential(expr, allow_calls)
|
|
2738
|
+
|
|
2739
|
+
return expr
|
|
2740
|
+
|
|
2741
|
+
def maybe_unary(allow_calls):
|
|
2742
|
+
start = S.token
|
|
2743
|
+
if is_('operator', '@'):
|
|
2744
|
+
if S.parsing_decorator:
|
|
2745
|
+
croak('Nested decorators are not allowed')
|
|
2746
|
+
next()
|
|
2747
|
+
S.parsing_decorator = True
|
|
2748
|
+
expr = expression()
|
|
2749
|
+
S.parsing_decorator = False
|
|
2750
|
+
S.decorators.push(expr)
|
|
2751
|
+
return new AST_EmptyStatement({'stype':'@', 'start':prev(), 'end':prev()})
|
|
2752
|
+
if is_("operator") and UNARY_PREFIX[start.value]:
|
|
2753
|
+
next()
|
|
2754
|
+
is_parenthesized = is_('punc', '(')
|
|
2755
|
+
S.in_delete = start.value is 'delete'
|
|
2756
|
+
expr = maybe_unary(allow_calls)
|
|
2757
|
+
S.in_delete = False
|
|
2758
|
+
ex = make_unary(AST_UnaryPrefix, start.value, expr, is_parenthesized)
|
|
2759
|
+
ex.start = start
|
|
2760
|
+
ex.end = prev()
|
|
2761
|
+
if S.scoped_flags.get('overload_operators', False) and (start.value is '-' or start.value is '+' or start.value is '~'):
|
|
2762
|
+
ex.overloaded = True
|
|
2763
|
+
return ex
|
|
2764
|
+
|
|
2765
|
+
val = expr_atom(allow_calls)
|
|
2766
|
+
return val
|
|
2767
|
+
|
|
2768
|
+
def make_unary(ctor, op, expr, is_parenthesized):
|
|
2769
|
+
return new ctor({
|
|
2770
|
+
'operator': op,
|
|
2771
|
+
'expression': expr,
|
|
2772
|
+
'parenthesized': is_parenthesized
|
|
2773
|
+
})
|
|
2774
|
+
|
|
2775
|
+
def expr_op(left, min_prec, no_in):
|
|
2776
|
+
op = S.token.value if is_('operator') else None
|
|
2777
|
+
if op is "!" and peek().type is "operator" and peek().value is "in":
|
|
2778
|
+
next()
|
|
2779
|
+
S.token.value = op = 'nin'
|
|
2780
|
+
|
|
2781
|
+
if no_in and (op is "in" or op is 'nin'):
|
|
2782
|
+
op = None
|
|
2783
|
+
|
|
2784
|
+
prec = PRECEDENCE[op] if op is not None else None
|
|
2785
|
+
# Prevent * from crossing a newline boundary outside of brackets:
|
|
2786
|
+
# `*name` at the start of a line is a starred assignment target, not multiplication.
|
|
2787
|
+
if prec is not None and op is '*' and S.token.nlb and not S.in_parenthesized_expr and peek().type is 'name':
|
|
2788
|
+
prec = None
|
|
2789
|
+
if prec is not None and prec > min_prec:
|
|
2790
|
+
next()
|
|
2791
|
+
right = expr_op(maybe_unary(True), prec, no_in)
|
|
2792
|
+
ret = new AST_Binary({
|
|
2793
|
+
'start': left.start,
|
|
2794
|
+
'left': left,
|
|
2795
|
+
'operator': op,
|
|
2796
|
+
'right': right,
|
|
2797
|
+
'end': right.end,
|
|
2798
|
+
'overloaded': S.scoped_flags.get('overload_operators', False)
|
|
2799
|
+
})
|
|
2800
|
+
return expr_op(ret, min_prec, no_in)
|
|
2801
|
+
return left
|
|
2802
|
+
|
|
2803
|
+
def expr_ops(no_in):
|
|
2804
|
+
return expr_op(maybe_unary(True), 0, no_in)
|
|
2805
|
+
|
|
2806
|
+
def maybe_conditional(no_in):
|
|
2807
|
+
start = S.token
|
|
2808
|
+
expr = expr_ops(no_in)
|
|
2809
|
+
if (is_('keyword', 'if') and (S.in_parenthesized_expr or (S.statement_starting_token is not S.token and not S.in_comprehension and not S.token.nlb))):
|
|
2810
|
+
next()
|
|
2811
|
+
ne = expression(False)
|
|
2812
|
+
expect_token('keyword', 'else')
|
|
2813
|
+
conditional = new AST_Conditional({
|
|
2814
|
+
'start': start,
|
|
2815
|
+
'condition': ne,
|
|
2816
|
+
'consequent': expr,
|
|
2817
|
+
'alternative': expression(False, no_in),
|
|
2818
|
+
'end': peek()
|
|
2819
|
+
})
|
|
2820
|
+
return conditional
|
|
2821
|
+
return expr
|
|
2822
|
+
|
|
2823
|
+
def create_assign(data):
|
|
2824
|
+
if data.right and is_node_type(data.right, AST_Seq) and (
|
|
2825
|
+
is_node_type(data.right.car, AST_Assign) or
|
|
2826
|
+
is_node_type(data.right.cdr, AST_Assign)) and data.operator is not '=':
|
|
2827
|
+
token_error(data.start, 'Invalid assignment operator for chained assignment: ' + data.operator)
|
|
2828
|
+
if is_node_type(data.left, AST_Array):
|
|
2829
|
+
starred_count = 0
|
|
2830
|
+
for elem in data.left.elements:
|
|
2831
|
+
if is_node_type(elem, AST_Starred):
|
|
2832
|
+
starred_count += 1
|
|
2833
|
+
if starred_count > 1:
|
|
2834
|
+
token_error(data.start, 'Multiple starred expressions in assignment')
|
|
2835
|
+
if starred_count > 0 and data.operator is not '=':
|
|
2836
|
+
token_error(data.start, 'Starred assignment requires = operator, not ' + data.operator)
|
|
2837
|
+
ans = AST_Assign(data)
|
|
2838
|
+
if S.in_class.length and S.in_class[-1]:
|
|
2839
|
+
class_name = S.in_class[-1]
|
|
2840
|
+
if is_node_type(ans.left, AST_SymbolRef) and S.classes.length > 1:
|
|
2841
|
+
c = S.classes[-2][class_name]
|
|
2842
|
+
if c:
|
|
2843
|
+
if ans.is_chained():
|
|
2844
|
+
for lhs in ans.traverse_chain()[0]:
|
|
2845
|
+
c.provisional_classvars[lhs.name] = True
|
|
2846
|
+
else:
|
|
2847
|
+
c.provisional_classvars[ans.left.name] = True
|
|
2848
|
+
return ans
|
|
2849
|
+
|
|
2850
|
+
def maybe_assign(no_in, only_plain_assignment):
|
|
2851
|
+
start = S.token
|
|
2852
|
+
left = maybe_conditional(no_in)
|
|
2853
|
+
val = S.token.value
|
|
2854
|
+
if is_("operator", ":="):
|
|
2855
|
+
# walrus operator (named expression): name := value
|
|
2856
|
+
if not is_node_type(left, AST_SymbolRef):
|
|
2857
|
+
croak('Walrus operator := requires a simple name on the left-hand side')
|
|
2858
|
+
next()
|
|
2859
|
+
return new AST_NamedExpr({
|
|
2860
|
+
'start': start,
|
|
2861
|
+
'name': left,
|
|
2862
|
+
'value': maybe_assign(no_in),
|
|
2863
|
+
'end': prev()
|
|
2864
|
+
})
|
|
2865
|
+
if is_("operator") and ASSIGNMENT[val]:
|
|
2866
|
+
if only_plain_assignment and val is not '=':
|
|
2867
|
+
croak('Invalid assignment operator for chained assignment: ' + val)
|
|
2868
|
+
next()
|
|
2869
|
+
asgn = create_assign({
|
|
2870
|
+
'start': start,
|
|
2871
|
+
'left': left,
|
|
2872
|
+
'operator': val,
|
|
2873
|
+
'right': maybe_assign(no_in, True),
|
|
2874
|
+
'end': prev()
|
|
2875
|
+
})
|
|
2876
|
+
if S.scoped_flags.get('overload_operators', False) and val is not '=':
|
|
2877
|
+
asgn.overloaded = True
|
|
2878
|
+
return asgn
|
|
2879
|
+
return left
|
|
2880
|
+
|
|
2881
|
+
def parse_starred_lhs():
|
|
2882
|
+
# Parse *name as a starred assignment target
|
|
2883
|
+
start = S.token
|
|
2884
|
+
next() # consume '*'
|
|
2885
|
+
if not is_('name'):
|
|
2886
|
+
croak('Expected identifier after * in starred assignment')
|
|
2887
|
+
name_tok = S.token
|
|
2888
|
+
next()
|
|
2889
|
+
return new AST_Starred({
|
|
2890
|
+
'start': start,
|
|
2891
|
+
'expression': new AST_SymbolRef({
|
|
2892
|
+
'start': name_tok,
|
|
2893
|
+
'name': name_tok.value,
|
|
2894
|
+
'end': name_tok
|
|
2895
|
+
}),
|
|
2896
|
+
'end': prev()
|
|
2897
|
+
})
|
|
2898
|
+
|
|
2899
|
+
def expression(commas, no_in):
|
|
2900
|
+
# if there is an assignment, we want the sequences to pivot
|
|
2901
|
+
# around it to allow for tuple packing/unpacking
|
|
2902
|
+
start = S.token
|
|
2903
|
+
if commas and is_('operator', '*'):
|
|
2904
|
+
expr = parse_starred_lhs()
|
|
2905
|
+
else:
|
|
2906
|
+
expr = maybe_assign(no_in)
|
|
2907
|
+
def build_seq(a):
|
|
2908
|
+
if a.length is 1:
|
|
2909
|
+
return a[0]
|
|
2910
|
+
|
|
2911
|
+
return new AST_Seq({
|
|
2912
|
+
'start': start,
|
|
2913
|
+
'car': a.shift(),
|
|
2914
|
+
'cdr': build_seq(a),
|
|
2915
|
+
'end': peek()
|
|
2916
|
+
})
|
|
2917
|
+
if commas:
|
|
2918
|
+
left = v'[ expr ]'
|
|
2919
|
+
while is_("punc", ",") and not peek().nlb:
|
|
2920
|
+
next()
|
|
2921
|
+
if is_node_type(expr, AST_Assign):
|
|
2922
|
+
left[-1] = left[-1].left
|
|
2923
|
+
return create_assign({
|
|
2924
|
+
'start': start,
|
|
2925
|
+
'left': (left[0] if left.length is 1 else new AST_Array({
|
|
2926
|
+
'elements': left
|
|
2927
|
+
})),
|
|
2928
|
+
'operator': expr.operator,
|
|
2929
|
+
'right': new AST_Seq({
|
|
2930
|
+
'car': expr.right,
|
|
2931
|
+
'cdr': expression(True, no_in)
|
|
2932
|
+
}),
|
|
2933
|
+
'end': peek()
|
|
2934
|
+
})
|
|
2935
|
+
|
|
2936
|
+
if is_('operator', '*'):
|
|
2937
|
+
expr = parse_starred_lhs()
|
|
2938
|
+
else:
|
|
2939
|
+
expr = maybe_assign(no_in)
|
|
2940
|
+
left.push(expr)
|
|
2941
|
+
|
|
2942
|
+
# Handle starred as final LHS item: a, *b = rhs
|
|
2943
|
+
if left.length > 1 and is_node_type(left[-1], AST_Starred) and is_("operator") and ASSIGNMENT[S.token.value]:
|
|
2944
|
+
val = S.token.value
|
|
2945
|
+
next()
|
|
2946
|
+
return create_assign({
|
|
2947
|
+
'start': start,
|
|
2948
|
+
'left': new AST_Array({'elements': left}),
|
|
2949
|
+
'operator': val,
|
|
2950
|
+
'right': expression(True, no_in),
|
|
2951
|
+
'end': prev()
|
|
2952
|
+
})
|
|
2953
|
+
|
|
2954
|
+
# if last one was an assignment, fix it
|
|
2955
|
+
if left.length > 1 and is_node_type(left[-1], AST_Assign):
|
|
2956
|
+
left[-1] = left[-1].left
|
|
2957
|
+
return create_assign({
|
|
2958
|
+
'start': start,
|
|
2959
|
+
'left': new AST_Array({
|
|
2960
|
+
'elements': left
|
|
2961
|
+
}),
|
|
2962
|
+
'operator': expr.operator,
|
|
2963
|
+
'right': expr.right,
|
|
2964
|
+
'end': peek()
|
|
2965
|
+
})
|
|
2966
|
+
|
|
2967
|
+
return build_seq(left)
|
|
2968
|
+
return expr
|
|
2969
|
+
|
|
2970
|
+
def in_loop(cont):
|
|
2971
|
+
S.in_loop += 1
|
|
2972
|
+
ret = cont()
|
|
2973
|
+
S.in_loop -= 1
|
|
2974
|
+
return ret
|
|
2975
|
+
|
|
2976
|
+
def run_parser():
|
|
2977
|
+
start = S.token = next()
|
|
2978
|
+
body = v'[]'
|
|
2979
|
+
docstrings = v'[]'
|
|
2980
|
+
first_token = True
|
|
2981
|
+
toplevel = options.toplevel
|
|
2982
|
+
while not is_("eof"):
|
|
2983
|
+
element = statement()
|
|
2984
|
+
if first_token and is_node_type(element, AST_Directive) and element.value.indexOf('#!') is 0:
|
|
2985
|
+
shebang = element.value
|
|
2986
|
+
else:
|
|
2987
|
+
ds = not toplevel and is_docstring(element) # do not process strings as docstrings if we are concatenating toplevels
|
|
2988
|
+
if ds:
|
|
2989
|
+
docstrings.push(ds)
|
|
2990
|
+
else:
|
|
2991
|
+
body.push(element)
|
|
2992
|
+
first_token = False
|
|
2993
|
+
|
|
2994
|
+
end = prev()
|
|
2995
|
+
if toplevel:
|
|
2996
|
+
toplevel.body = toplevel.body.concat(body)
|
|
2997
|
+
toplevel.end = end
|
|
2998
|
+
toplevel.docstrings
|
|
2999
|
+
else:
|
|
3000
|
+
toplevel = new AST_Toplevel({
|
|
3001
|
+
'start': start,
|
|
3002
|
+
'body': body,
|
|
3003
|
+
'shebang': shebang,
|
|
3004
|
+
'end': end,
|
|
3005
|
+
'docstrings': docstrings,
|
|
3006
|
+
})
|
|
3007
|
+
|
|
3008
|
+
toplevel.nonlocalvars = scan_for_nonlocal_defs(toplevel.body).concat(S.globals)
|
|
3009
|
+
toplevel.localvars = []
|
|
3010
|
+
toplevel.exports = []
|
|
3011
|
+
seen_exports = {}
|
|
3012
|
+
|
|
3013
|
+
def add_item(item, isvar):
|
|
3014
|
+
if (toplevel.nonlocalvars.indexOf(item) < 0):
|
|
3015
|
+
symbol = new_symbol(AST_SymbolVar, item)
|
|
3016
|
+
if isvar:
|
|
3017
|
+
toplevel.localvars.push(symbol)
|
|
3018
|
+
if not has_prop(seen_exports, item):
|
|
3019
|
+
toplevel.exports.push(symbol)
|
|
3020
|
+
seen_exports[item] = True
|
|
3021
|
+
|
|
3022
|
+
for item in scan_for_local_vars(toplevel.body):
|
|
3023
|
+
add_item(item, True)
|
|
3024
|
+
for item in scan_for_top_level_callables(toplevel.body):
|
|
3025
|
+
add_item(item, False)
|
|
3026
|
+
|
|
3027
|
+
toplevel.filename = options.filename
|
|
3028
|
+
toplevel.imported_module_ids = imported_module_ids
|
|
3029
|
+
toplevel.classes = scan_for_classes(toplevel.body)
|
|
3030
|
+
toplevel.import_order = Object.keys(imported_modules).length
|
|
3031
|
+
toplevel.module_id = module_id
|
|
3032
|
+
imported_modules[module_id] = toplevel
|
|
3033
|
+
toplevel.imports = imported_modules
|
|
3034
|
+
toplevel.baselib = baselib_items
|
|
3035
|
+
toplevel.scoped_flags = S.scoped_flags.stack[0]
|
|
3036
|
+
importing_modules[module_id] = False
|
|
3037
|
+
toplevel.comments_after = S.token.comments_before or v'[]'
|
|
3038
|
+
return toplevel
|
|
3039
|
+
|
|
3040
|
+
return run_parser
|
|
3041
|
+
|
|
3042
|
+
def parse(text, options):
|
|
3043
|
+
options = defaults(options, {
|
|
3044
|
+
'filename': None, # name of the file being parsed
|
|
3045
|
+
'module_id':'__main__', # The id of the module being parsed
|
|
3046
|
+
'toplevel': None,
|
|
3047
|
+
'for_linting': False, # If True certain actions are not performed, such as importing modules
|
|
3048
|
+
'import_dirs': v'[]',
|
|
3049
|
+
'classes': undefined, # Map of class names to AST_Class that are available in the global namespace (used by the REPL)
|
|
3050
|
+
'scoped_flags': {}, # Global scoped flags (used by the REPL)
|
|
3051
|
+
'discard_asserts': False,
|
|
3052
|
+
'module_cache_dir': '',
|
|
3053
|
+
})
|
|
3054
|
+
import_dirs = [x for x in options.import_dirs]
|
|
3055
|
+
for location in v'[options.libdir, options.basedir]':
|
|
3056
|
+
if location:
|
|
3057
|
+
import_dirs.push(location)
|
|
3058
|
+
module_id = options.module_id
|
|
3059
|
+
baselib_items = {}
|
|
3060
|
+
imported_module_ids = []
|
|
3061
|
+
imported_modules = options.imported_modules or {}
|
|
3062
|
+
importing_modules = options.importing_modules or {}
|
|
3063
|
+
importing_modules[module_id] = True
|
|
3064
|
+
|
|
3065
|
+
# The internal state of the parser
|
|
3066
|
+
S = {
|
|
3067
|
+
'input': tokenizer(text, options.filename) if jstype(text) is 'string' else text,
|
|
3068
|
+
'token': None,
|
|
3069
|
+
'prev': None,
|
|
3070
|
+
'peeked': [],
|
|
3071
|
+
'in_function': 0,
|
|
3072
|
+
'statement_starting_token': None,
|
|
3073
|
+
'in_comprehension': False,
|
|
3074
|
+
'in_parenthesized_expr': False,
|
|
3075
|
+
'in_delete': False,
|
|
3076
|
+
'in_loop': 0,
|
|
3077
|
+
'in_class': [ False ],
|
|
3078
|
+
'classes': [ {} ],
|
|
3079
|
+
'functions': [ {} ],
|
|
3080
|
+
'labels': [],
|
|
3081
|
+
'decorators': v'[]',
|
|
3082
|
+
'parsing_decorator': False,
|
|
3083
|
+
'globals': v'[]',
|
|
3084
|
+
'scoped_flags': {
|
|
3085
|
+
'stack': v'[options.scoped_flags || Object.create(null)]',
|
|
3086
|
+
'push': def (): this.stack.push(Object.create(None));,
|
|
3087
|
+
'pop': def (): this.stack.pop();,
|
|
3088
|
+
'get': def (name, defval):
|
|
3089
|
+
for v'var i = this.stack.length - 1; i >= 0; i--':
|
|
3090
|
+
d = this.stack[i]
|
|
3091
|
+
q = d[name]
|
|
3092
|
+
if q:
|
|
3093
|
+
return q
|
|
3094
|
+
return defval
|
|
3095
|
+
,
|
|
3096
|
+
'set': def (name, val): this.stack[-1][name] = val;,
|
|
3097
|
+
},
|
|
3098
|
+
'classmethod_ctx_stack': v'[]',
|
|
3099
|
+
}
|
|
3100
|
+
|
|
3101
|
+
if options.classes:
|
|
3102
|
+
for cname in options.classes:
|
|
3103
|
+
obj = options.classes[cname]
|
|
3104
|
+
S.classes[0][cname] = { 'static':obj.static, 'classmethod':obj.classmethod, 'bound':obj.bound, 'classvars': obj.classvars }
|
|
3105
|
+
|
|
3106
|
+
return create_parser_ctx(S, import_dirs, module_id, baselib_items, imported_module_ids, imported_modules, importing_modules, options)()
|