@samuelbines/nunjucks 0.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +26 -0
- package/README.md +55 -0
- package/dist/scripts/smoke.d.ts +1 -0
- package/dist/scripts/smoke.js +95 -0
- package/dist/src/compiler.d.ts +12 -0
- package/dist/src/compiler.js +1050 -0
- package/dist/src/environment.d.ts +103 -0
- package/dist/src/environment.js +621 -0
- package/dist/src/express-app.d.ts +2 -0
- package/dist/src/express-app.js +33 -0
- package/dist/src/filters.d.ts +44 -0
- package/dist/src/filters.js +424 -0
- package/dist/src/globals.d.ts +14 -0
- package/dist/src/globals.js +342 -0
- package/dist/src/index.d.ts +28 -0
- package/dist/src/index.js +116 -0
- package/dist/src/interpreter.d.ts +16 -0
- package/dist/src/interpreter.js +489 -0
- package/dist/src/lexer.d.ts +72 -0
- package/dist/src/lexer.js +480 -0
- package/dist/src/lib.d.ts +74 -0
- package/dist/src/lib.js +237 -0
- package/dist/src/loader.d.ts +80 -0
- package/dist/src/loader.js +175 -0
- package/dist/src/nodes.d.ts +362 -0
- package/dist/src/nodes.js +894 -0
- package/dist/src/parser.d.ts +66 -0
- package/dist/src/parser.js +1068 -0
- package/dist/src/precompile.d.ts +15 -0
- package/dist/src/precompile.js +108 -0
- package/dist/src/runtime.d.ts +33 -0
- package/dist/src/runtime.js +314 -0
- package/dist/src/transformer.d.ts +3 -0
- package/dist/src/transformer.js +161 -0
- package/dist/src/types.d.ts +27 -0
- package/dist/src/types.js +2 -0
- package/dist/tests/compiler.test.d.ts +1 -0
- package/dist/tests/compiler.test.js +201 -0
- package/dist/tests/enviornment.test.d.ts +1 -0
- package/dist/tests/enviornment.test.js +279 -0
- package/dist/tests/express.test.d.ts +1 -0
- package/dist/tests/express.test.js +86 -0
- package/dist/tests/filters.test.d.ts +13 -0
- package/dist/tests/filters.test.js +286 -0
- package/dist/tests/globals.test.d.ts +1 -0
- package/dist/tests/globals.test.js +579 -0
- package/dist/tests/interpreter.test.d.ts +1 -0
- package/dist/tests/interpreter.test.js +208 -0
- package/dist/tests/lexer.test.d.ts +1 -0
- package/dist/tests/lexer.test.js +249 -0
- package/dist/tests/lib.test.d.ts +1 -0
- package/dist/tests/lib.test.js +236 -0
- package/dist/tests/loader.test.d.ts +1 -0
- package/dist/tests/loader.test.js +301 -0
- package/dist/tests/nodes.test.d.ts +1 -0
- package/dist/tests/nodes.test.js +137 -0
- package/dist/tests/parser.test.d.ts +1 -0
- package/dist/tests/parser.test.js +294 -0
- package/dist/tests/precompile.test.d.ts +1 -0
- package/dist/tests/precompile.test.js +224 -0
- package/dist/tests/runtime.test.d.ts +1 -0
- package/dist/tests/runtime.test.js +237 -0
- package/dist/tests/transformer.test.d.ts +1 -0
- package/dist/tests/transformer.test.js +125 -0
- package/dist/tsconfig.tsbuildinfo +1 -0
- package/package.json +59 -0
|
@@ -0,0 +1,1068 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
36
|
+
exports.parse = exports.Parser = void 0;
|
|
37
|
+
//Requires work: Sun 4th Jan 2026
|
|
38
|
+
const lexer_1 = require("./lexer");
|
|
39
|
+
const lexer = __importStar(require("./lexer"));
|
|
40
|
+
const nodes_1 = require("./nodes");
|
|
41
|
+
const lib_1 = require("./lib");
|
|
42
|
+
class Parser {
|
|
43
|
+
tokenizer;
|
|
44
|
+
peeked = null;
|
|
45
|
+
breakOnBlocks = null;
|
|
46
|
+
dropLeadingWhitespace = false;
|
|
47
|
+
extensions = [];
|
|
48
|
+
constructor(tokens) {
|
|
49
|
+
this.tokenizer = tokens;
|
|
50
|
+
}
|
|
51
|
+
init(tokens) {
|
|
52
|
+
this.tokenizer = tokens;
|
|
53
|
+
}
|
|
54
|
+
nextToken(withWhitespace = false) {
|
|
55
|
+
let tok;
|
|
56
|
+
if (this.peeked) {
|
|
57
|
+
if (!withWhitespace && this.peeked.type === lexer_1.TOKEN_WHITESPACE) {
|
|
58
|
+
this.peeked = null;
|
|
59
|
+
}
|
|
60
|
+
else {
|
|
61
|
+
tok = this.peeked;
|
|
62
|
+
this.peeked = null;
|
|
63
|
+
return tok;
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
tok = this.tokenizer.nextToken();
|
|
67
|
+
if (!withWhitespace) {
|
|
68
|
+
while (tok && tok.type === lexer_1.TOKEN_WHITESPACE) {
|
|
69
|
+
tok = this.tokenizer.nextToken();
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
return tok;
|
|
73
|
+
}
|
|
74
|
+
peekToken() {
|
|
75
|
+
this.peeked = this.peeked || this.nextToken();
|
|
76
|
+
return this.peeked;
|
|
77
|
+
}
|
|
78
|
+
pushToken(tok) {
|
|
79
|
+
if (this.peeked) {
|
|
80
|
+
throw new Error('pushToken: can only push one token on between reads');
|
|
81
|
+
}
|
|
82
|
+
this.peeked = tok;
|
|
83
|
+
}
|
|
84
|
+
error(msg, lineno = 0, colno = 0) {
|
|
85
|
+
if (!lineno || !colno) {
|
|
86
|
+
const tok = this.peekToken();
|
|
87
|
+
lineno = tok?.lineno;
|
|
88
|
+
colno = tok?.colno;
|
|
89
|
+
}
|
|
90
|
+
if (lineno !== undefined)
|
|
91
|
+
lineno += 1;
|
|
92
|
+
if (colno !== undefined)
|
|
93
|
+
colno += 1;
|
|
94
|
+
// p.log;
|
|
95
|
+
return (0, lib_1.TemplateError)(msg, lineno, colno);
|
|
96
|
+
}
|
|
97
|
+
fail(msg, lineno = 0, colno = 0) {
|
|
98
|
+
throw this.error(msg, lineno, colno);
|
|
99
|
+
}
|
|
100
|
+
skip(type) {
|
|
101
|
+
var tok = this.nextToken();
|
|
102
|
+
if (!tok || tok.type !== type) {
|
|
103
|
+
this?.pushToken(tok);
|
|
104
|
+
return false;
|
|
105
|
+
}
|
|
106
|
+
return true;
|
|
107
|
+
}
|
|
108
|
+
expect(type) {
|
|
109
|
+
var tok = this.nextToken();
|
|
110
|
+
if (tok.type !== type) {
|
|
111
|
+
this.fail('expected ' + type + ', got ' + tok.type, tok?.lineno, tok?.colno);
|
|
112
|
+
}
|
|
113
|
+
return tok;
|
|
114
|
+
}
|
|
115
|
+
skipValue(type, val) {
|
|
116
|
+
var tok = this.nextToken();
|
|
117
|
+
if (!tok || tok.type !== type || tok.value !== val) {
|
|
118
|
+
this?.pushToken(tok);
|
|
119
|
+
return false;
|
|
120
|
+
}
|
|
121
|
+
return true;
|
|
122
|
+
}
|
|
123
|
+
skipSymbol(val) {
|
|
124
|
+
return this.skipValue(lexer_1.TOKEN_SYMBOL, val);
|
|
125
|
+
}
|
|
126
|
+
advanceAfterBlockEnd(name) {
|
|
127
|
+
let tok;
|
|
128
|
+
if (!name) {
|
|
129
|
+
tok = this.peekToken();
|
|
130
|
+
if (!tok) {
|
|
131
|
+
lib_1.p.err('End of file?');
|
|
132
|
+
this.fail('unexpected end of file');
|
|
133
|
+
}
|
|
134
|
+
if (tok.type !== lexer_1.TOKEN_SYMBOL) {
|
|
135
|
+
this.fail('advanceAfterBlockEnd: expected symbol token or ' +
|
|
136
|
+
'explicit name to be passed');
|
|
137
|
+
}
|
|
138
|
+
name = this.nextToken().value;
|
|
139
|
+
}
|
|
140
|
+
tok = this.nextToken();
|
|
141
|
+
if (tok && tok.type === lexer_1.TOKEN_BLOCK_END) {
|
|
142
|
+
if (tok.value.charAt(0) === '-') {
|
|
143
|
+
this.dropLeadingWhitespace = true;
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
else {
|
|
147
|
+
this.fail('expected block end in ' + name + ' statement');
|
|
148
|
+
}
|
|
149
|
+
return tok;
|
|
150
|
+
}
|
|
151
|
+
advanceAfterVariableEnd() {
|
|
152
|
+
let tok = this.nextToken();
|
|
153
|
+
if (tok && tok.type === lexer_1.TOKEN_VARIABLE_END) {
|
|
154
|
+
this.dropLeadingWhitespace =
|
|
155
|
+
tok.value.charAt(tok.value?.length - this.tokenizer.tags.VARIABLE_END?.length - 1) === '-';
|
|
156
|
+
return;
|
|
157
|
+
}
|
|
158
|
+
this?.pushToken(tok);
|
|
159
|
+
this.fail('expected variable end');
|
|
160
|
+
}
|
|
161
|
+
parseFor() {
|
|
162
|
+
var forTok = this.peekToken();
|
|
163
|
+
var node;
|
|
164
|
+
var endBlock;
|
|
165
|
+
if (this.skipSymbol('for')) {
|
|
166
|
+
node = new nodes_1.For(forTok?.lineno, forTok?.colno);
|
|
167
|
+
endBlock = 'endfor';
|
|
168
|
+
}
|
|
169
|
+
else if (this.skipSymbol('asyncEach')) {
|
|
170
|
+
node = new nodes_1.AsyncEach(forTok?.lineno, forTok?.colno);
|
|
171
|
+
endBlock = 'endeach';
|
|
172
|
+
}
|
|
173
|
+
else if (this.skipSymbol('asyncAll')) {
|
|
174
|
+
node = new nodes_1.AsyncAll(forTok?.lineno, forTok?.colno);
|
|
175
|
+
endBlock = 'endall';
|
|
176
|
+
}
|
|
177
|
+
else {
|
|
178
|
+
this.fail('parseFor: expected for{Async}', forTok?.lineno, forTok?.colno);
|
|
179
|
+
}
|
|
180
|
+
node.name = this.parsePrimary();
|
|
181
|
+
if (!(node.name instanceof nodes_1.Symbol)) {
|
|
182
|
+
this.fail('parseFor: variable name expected for loop');
|
|
183
|
+
}
|
|
184
|
+
const type = this.peekToken().type;
|
|
185
|
+
if (type === lexer_1.TOKEN_COMMA) {
|
|
186
|
+
// key/value iteration
|
|
187
|
+
const key = node.name;
|
|
188
|
+
node.name = new nodes_1.ArrayNode(key?.lineno, key?.colno);
|
|
189
|
+
node.name.addChild(key);
|
|
190
|
+
while (this.skip(lexer_1.TOKEN_COMMA)) {
|
|
191
|
+
const prim = this.parsePrimary();
|
|
192
|
+
node.name.addChild(prim);
|
|
193
|
+
}
|
|
194
|
+
}
|
|
195
|
+
if (!this.skipSymbol('in')) {
|
|
196
|
+
this.fail('parseFor: expected "in" keyword for loop', forTok?.lineno, forTok?.colno);
|
|
197
|
+
}
|
|
198
|
+
node.arr = this.parseInlineIf();
|
|
199
|
+
this.advanceAfterBlockEnd(forTok.value);
|
|
200
|
+
node.body = this.parseUntilBlocks(endBlock, 'else');
|
|
201
|
+
if (this.skipSymbol('else')) {
|
|
202
|
+
this.advanceAfterBlockEnd('else');
|
|
203
|
+
node.else_ = this.parseUntilBlocks(endBlock);
|
|
204
|
+
}
|
|
205
|
+
this.advanceAfterBlockEnd();
|
|
206
|
+
return node;
|
|
207
|
+
}
|
|
208
|
+
parseMacro() {
|
|
209
|
+
const macroTok = this.peekToken();
|
|
210
|
+
if (!this.skipSymbol('macro')) {
|
|
211
|
+
this.fail('expected macro');
|
|
212
|
+
}
|
|
213
|
+
const name = this.parsePrimary(true);
|
|
214
|
+
const args = this.parseSignature();
|
|
215
|
+
const node = new nodes_1.Macro(macroTok?.lineno, macroTok?.colno, name, args, null);
|
|
216
|
+
this.advanceAfterBlockEnd(macroTok.value);
|
|
217
|
+
node.body = this.parseUntilBlocks('endmacro');
|
|
218
|
+
this.advanceAfterBlockEnd();
|
|
219
|
+
return node;
|
|
220
|
+
}
|
|
221
|
+
parseCall() {
|
|
222
|
+
// a call block is parsed as a normal FunCall, but with an added
|
|
223
|
+
// 'caller' kwarg which is a Caller node.
|
|
224
|
+
var callTok = this.peekToken();
|
|
225
|
+
if (!this.skipSymbol('call')) {
|
|
226
|
+
this.fail('expected call');
|
|
227
|
+
}
|
|
228
|
+
const callerArgs = this.parseSignature(true) || new nodes_1.NodeList();
|
|
229
|
+
const macroCall = this.parsePrimary();
|
|
230
|
+
this.advanceAfterBlockEnd(callTok.value);
|
|
231
|
+
const body = this.parseUntilBlocks('endcall');
|
|
232
|
+
this.advanceAfterBlockEnd();
|
|
233
|
+
const callerName = new nodes_1.Symbol(callTok?.lineno, callTok?.colno, 'caller');
|
|
234
|
+
const callerNode = new nodes_1.Caller(callTok?.lineno, callTok?.colno, callerName, callerArgs, body);
|
|
235
|
+
// add the additional caller kwarg, adding kwargs if necessary
|
|
236
|
+
const args = macroCall.args.children;
|
|
237
|
+
if (!(args[args?.length - 1] instanceof nodes_1.KeywordArgs)) {
|
|
238
|
+
args?.push(new nodes_1.KeywordArgs());
|
|
239
|
+
}
|
|
240
|
+
const kwargs = args[args?.length - 1];
|
|
241
|
+
kwargs.addChild(new nodes_1.Pair(callTok?.lineno, callTok?.colno, callerName, callerNode));
|
|
242
|
+
//Output takes multiple args
|
|
243
|
+
return new nodes_1.Output(callTok?.lineno, callTok?.colno, [macroCall]);
|
|
244
|
+
}
|
|
245
|
+
parseWithContext() {
|
|
246
|
+
var tok = this.peekToken();
|
|
247
|
+
var withContext = null;
|
|
248
|
+
if (this.skipSymbol('with')) {
|
|
249
|
+
withContext = true;
|
|
250
|
+
}
|
|
251
|
+
else if (this.skipSymbol('without')) {
|
|
252
|
+
withContext = false;
|
|
253
|
+
}
|
|
254
|
+
if (withContext !== null) {
|
|
255
|
+
if (!this.skipSymbol('context')) {
|
|
256
|
+
this.fail('parseFrom: expected context after with/without', tok?.lineno, tok?.colno);
|
|
257
|
+
}
|
|
258
|
+
}
|
|
259
|
+
return withContext;
|
|
260
|
+
}
|
|
261
|
+
parseImport() {
|
|
262
|
+
var importTok = this.peekToken();
|
|
263
|
+
if (!this.skipSymbol('import')) {
|
|
264
|
+
this.fail('parseImport: expected import', importTok?.lineno, importTok?.colno);
|
|
265
|
+
}
|
|
266
|
+
const template = this.parseInlineIf();
|
|
267
|
+
if (!this.skipSymbol('as')) {
|
|
268
|
+
this.fail('parseImport: expected "as" keyword', importTok?.lineno, importTok?.colno);
|
|
269
|
+
}
|
|
270
|
+
const target = this.parseInlineIf();
|
|
271
|
+
const withContext = this.parseWithContext();
|
|
272
|
+
const node = new nodes_1.Import(importTok?.lineno, importTok?.colno, template, target, withContext);
|
|
273
|
+
this.advanceAfterBlockEnd(importTok.value);
|
|
274
|
+
return node;
|
|
275
|
+
}
|
|
276
|
+
parseFrom() {
|
|
277
|
+
const fromTok = this.peekToken();
|
|
278
|
+
if (!this.skipSymbol('from')) {
|
|
279
|
+
this.fail('parseFrom: expected from');
|
|
280
|
+
}
|
|
281
|
+
const template = this.parseInlineIf();
|
|
282
|
+
if (!this.skipSymbol('import')) {
|
|
283
|
+
this.fail('parseFrom: expected import', fromTok?.lineno, fromTok?.colno);
|
|
284
|
+
}
|
|
285
|
+
const names = new nodes_1.NodeList();
|
|
286
|
+
let withContext;
|
|
287
|
+
while (1) {
|
|
288
|
+
// eslint-disable-line no-constant-condition
|
|
289
|
+
const nextTok = this.peekToken();
|
|
290
|
+
if (nextTok.type === lexer_1.TOKEN_BLOCK_END) {
|
|
291
|
+
if (!names.children?.length) {
|
|
292
|
+
this.fail('parseFrom: Expected at least one import name', fromTok?.lineno, fromTok?.colno);
|
|
293
|
+
}
|
|
294
|
+
// Since we are manually advancing past the block end,
|
|
295
|
+
// need to keep track of whitespace control (normally
|
|
296
|
+
// this is done in `advanceAfterBlockEnd`
|
|
297
|
+
if (nextTok.value.charAt(0) === '-') {
|
|
298
|
+
this.dropLeadingWhitespace = true;
|
|
299
|
+
}
|
|
300
|
+
this.nextToken();
|
|
301
|
+
break;
|
|
302
|
+
}
|
|
303
|
+
if (names.children?.length > 0 && !this.skip(lexer_1.TOKEN_COMMA)) {
|
|
304
|
+
this.fail('parseFrom: expected comma', fromTok?.lineno, fromTok?.colno);
|
|
305
|
+
}
|
|
306
|
+
const name = this.parsePrimary();
|
|
307
|
+
if (name.value.charAt(0) === '_') {
|
|
308
|
+
this.fail('parseFrom: names starting with an underscore cannot be imported', name?.lineno, name?.colno);
|
|
309
|
+
}
|
|
310
|
+
if (this.skipSymbol('as')) {
|
|
311
|
+
const alias = this.parsePrimary();
|
|
312
|
+
names.addChild(new nodes_1.Pair(name?.lineno, name?.colno, name, alias));
|
|
313
|
+
}
|
|
314
|
+
else {
|
|
315
|
+
names.addChild(name);
|
|
316
|
+
}
|
|
317
|
+
withContext = this.parseWithContext();
|
|
318
|
+
}
|
|
319
|
+
return new nodes_1.FromImport(fromTok?.lineno, fromTok?.colno, template, names, withContext);
|
|
320
|
+
}
|
|
321
|
+
parseBlock() {
|
|
322
|
+
const tag = this.peekToken();
|
|
323
|
+
if (!this.skipSymbol('block')) {
|
|
324
|
+
this.fail('parseBlock: expected block', tag?.lineno, tag?.colno);
|
|
325
|
+
}
|
|
326
|
+
const node = new nodes_1.Block(tag?.lineno, tag?.colno);
|
|
327
|
+
node.name = this.parsePrimary();
|
|
328
|
+
if (!(node.name instanceof nodes_1.Symbol)) {
|
|
329
|
+
this.fail('parseBlock: variable name expected', tag?.lineno, tag?.colno);
|
|
330
|
+
}
|
|
331
|
+
this.advanceAfterBlockEnd(tag.value);
|
|
332
|
+
node.body = this.parseUntilBlocks('endblock');
|
|
333
|
+
this.skipSymbol('endblock');
|
|
334
|
+
this.skipSymbol(node.name.value);
|
|
335
|
+
const tok = this.peekToken();
|
|
336
|
+
if (!tok) {
|
|
337
|
+
this.fail('parseBlock: expected endblock, got end of file');
|
|
338
|
+
}
|
|
339
|
+
this.advanceAfterBlockEnd(tok.value);
|
|
340
|
+
return node;
|
|
341
|
+
}
|
|
342
|
+
parseExtends() {
|
|
343
|
+
const tagName = 'extends';
|
|
344
|
+
lib_1.p.log('Trying to parse extend');
|
|
345
|
+
const tag = this.peekToken();
|
|
346
|
+
if (!this.skipSymbol(tagName)) {
|
|
347
|
+
this.fail('parseTemplateRef: expected ' + tagName);
|
|
348
|
+
}
|
|
349
|
+
const node = new nodes_1.Extends(tag?.lineno, tag?.colno);
|
|
350
|
+
lib_1.p.log('Node is here', node);
|
|
351
|
+
node.template = this.parseInlineIf();
|
|
352
|
+
lib_1.p.log('Template?', node.template);
|
|
353
|
+
this.advanceAfterBlockEnd(tag.value);
|
|
354
|
+
lib_1.p.log('Block end?', node.template);
|
|
355
|
+
return node;
|
|
356
|
+
}
|
|
357
|
+
parseInclude() {
|
|
358
|
+
const tagName = 'include';
|
|
359
|
+
const tag = this.peekToken();
|
|
360
|
+
if (!this.skipSymbol(tagName)) {
|
|
361
|
+
this.fail('parseInclude: expected ' + tagName);
|
|
362
|
+
}
|
|
363
|
+
const node = new nodes_1.Include(tag?.lineno, tag?.colno);
|
|
364
|
+
node.template = this.parseInlineIf();
|
|
365
|
+
if (this.skipSymbol('ignore') && this.skipSymbol('missing')) {
|
|
366
|
+
node.ignoreMissing = true;
|
|
367
|
+
}
|
|
368
|
+
this.advanceAfterBlockEnd(tag.value);
|
|
369
|
+
return node;
|
|
370
|
+
}
|
|
371
|
+
parseIf() {
|
|
372
|
+
const tag = this.peekToken();
|
|
373
|
+
let node;
|
|
374
|
+
if (this.skipSymbol('if') ||
|
|
375
|
+
this.skipSymbol('elif') ||
|
|
376
|
+
this.skipSymbol('elseif')) {
|
|
377
|
+
node = new nodes_1.If(tag?.lineno, tag?.colno);
|
|
378
|
+
}
|
|
379
|
+
else if (this.skipSymbol('ifAsync')) {
|
|
380
|
+
node = new nodes_1.IfAsync(tag?.lineno, tag?.colno);
|
|
381
|
+
}
|
|
382
|
+
else {
|
|
383
|
+
this.fail('parseIf: expected if, elif, or elseif', tag?.lineno, tag?.colno);
|
|
384
|
+
}
|
|
385
|
+
node.cond = this.parseInlineIf();
|
|
386
|
+
this.advanceAfterBlockEnd(tag.value);
|
|
387
|
+
node.body = this.parseUntilBlocks('elif', 'elseif', 'else', 'endif');
|
|
388
|
+
const tok = this.peekToken();
|
|
389
|
+
switch (tok && tok.value) {
|
|
390
|
+
case 'elseif':
|
|
391
|
+
case 'elif':
|
|
392
|
+
node.else_ = this.parseIf();
|
|
393
|
+
break;
|
|
394
|
+
case 'else':
|
|
395
|
+
this.advanceAfterBlockEnd();
|
|
396
|
+
node.else_ = this.parseUntilBlocks('endif');
|
|
397
|
+
this.advanceAfterBlockEnd();
|
|
398
|
+
break;
|
|
399
|
+
case 'endif':
|
|
400
|
+
node.else_ = null;
|
|
401
|
+
this.advanceAfterBlockEnd();
|
|
402
|
+
break;
|
|
403
|
+
default:
|
|
404
|
+
this.fail('parseIf: expected elif, else, or endif, got end of file');
|
|
405
|
+
}
|
|
406
|
+
return node;
|
|
407
|
+
}
|
|
408
|
+
parseSet() {
|
|
409
|
+
const tag = this.peekToken();
|
|
410
|
+
if (!this.skipSymbol('set')) {
|
|
411
|
+
this.fail('parseSet: expected set', tag?.lineno, tag?.colno);
|
|
412
|
+
}
|
|
413
|
+
const node = new nodes_1.Set(tag?.lineno, tag?.colno, []);
|
|
414
|
+
let target;
|
|
415
|
+
while ((target = this.parsePrimary())) {
|
|
416
|
+
node.targets?.push(target);
|
|
417
|
+
if (!this.skip(lexer_1.TOKEN_COMMA)) {
|
|
418
|
+
break;
|
|
419
|
+
}
|
|
420
|
+
}
|
|
421
|
+
if (!this.skipValue(lexer_1.TOKEN_OPERATOR, '=')) {
|
|
422
|
+
if (!this.skip(lexer_1.TOKEN_BLOCK_END)) {
|
|
423
|
+
this.fail('parseSet: expected = or block end in set tag', tag?.lineno, tag?.colno);
|
|
424
|
+
}
|
|
425
|
+
else {
|
|
426
|
+
node.body = new nodes_1.Capture(tag?.lineno, tag?.colno, this.parseUntilBlocks('endset'));
|
|
427
|
+
node.value = null;
|
|
428
|
+
this.advanceAfterBlockEnd();
|
|
429
|
+
}
|
|
430
|
+
}
|
|
431
|
+
else {
|
|
432
|
+
node.value = this.parseInlineIf();
|
|
433
|
+
this.advanceAfterBlockEnd(tag.value);
|
|
434
|
+
}
|
|
435
|
+
return node;
|
|
436
|
+
}
|
|
437
|
+
parseSwitch() {
|
|
438
|
+
/*
|
|
439
|
+
* Store the tag names in variables in case someone ever wants to
|
|
440
|
+
* customize this.
|
|
441
|
+
*/
|
|
442
|
+
const switchStart = 'switch';
|
|
443
|
+
const switchEnd = 'endswitch';
|
|
444
|
+
const caseStart = 'case';
|
|
445
|
+
const caseDefault = 'default';
|
|
446
|
+
// Get the switch tag.
|
|
447
|
+
const tag = this.peekToken();
|
|
448
|
+
// fail early if we get some unexpected tag.
|
|
449
|
+
if (!this.skipSymbol(switchStart) &&
|
|
450
|
+
!this.skipSymbol(caseStart) &&
|
|
451
|
+
!this.skipSymbol(caseDefault)) {
|
|
452
|
+
this.fail('parseSwitch: expected "switch," "case" or "default"', tag?.lineno, tag?.colno);
|
|
453
|
+
}
|
|
454
|
+
// parse the switch expression
|
|
455
|
+
const expr = this.parseInlineIf();
|
|
456
|
+
// advance until a start of a case, a default case or an endswitch.
|
|
457
|
+
this.advanceAfterBlockEnd(switchStart);
|
|
458
|
+
this.parseUntilBlocks(caseStart, caseDefault, switchEnd);
|
|
459
|
+
// this is the first case. it could also be an endswitch, we'll check.
|
|
460
|
+
let tok = this.peekToken();
|
|
461
|
+
// create new variables for our cases and default case.
|
|
462
|
+
const cases = [];
|
|
463
|
+
let defaultCase;
|
|
464
|
+
// while we're dealing with new cases nodes...
|
|
465
|
+
do {
|
|
466
|
+
// skip the start symbol and get the case expression
|
|
467
|
+
this.skipSymbol(caseStart);
|
|
468
|
+
const cond = this.parseInlineIf();
|
|
469
|
+
this.advanceAfterBlockEnd(switchStart);
|
|
470
|
+
// get the body of the case node and add it to the array of cases.
|
|
471
|
+
const body = this.parseUntilBlocks(caseStart, caseDefault, switchEnd);
|
|
472
|
+
cases?.push(new nodes_1.Case(tok?.lineno, tok?.colno, cond, body));
|
|
473
|
+
// get our next case
|
|
474
|
+
tok = this.peekToken();
|
|
475
|
+
} while (tok && tok.value === caseStart);
|
|
476
|
+
// we either have a default case or a switch end.
|
|
477
|
+
switch (tok.value) {
|
|
478
|
+
case caseDefault:
|
|
479
|
+
this.advanceAfterBlockEnd();
|
|
480
|
+
defaultCase = this.parseUntilBlocks(switchEnd);
|
|
481
|
+
this.advanceAfterBlockEnd();
|
|
482
|
+
break;
|
|
483
|
+
case switchEnd:
|
|
484
|
+
this.advanceAfterBlockEnd();
|
|
485
|
+
break;
|
|
486
|
+
default:
|
|
487
|
+
// otherwise bail because EOF
|
|
488
|
+
this.fail('parseSwitch: expected "case," "default" or "endswitch," got EOF.');
|
|
489
|
+
}
|
|
490
|
+
// and return the switch node.
|
|
491
|
+
return new nodes_1.Switch(tag?.lineno, tag?.colno, expr, cases, defaultCase);
|
|
492
|
+
}
|
|
493
|
+
parseStatement() {
|
|
494
|
+
let tok = this.peekToken();
|
|
495
|
+
let node;
|
|
496
|
+
if (tok.type !== lexer_1.TOKEN_SYMBOL) {
|
|
497
|
+
this.fail('tag name expected', tok?.lineno, tok?.colno);
|
|
498
|
+
}
|
|
499
|
+
if (this.breakOnBlocks && this.breakOnBlocks.indexOf(tok.value) !== -1) {
|
|
500
|
+
return null;
|
|
501
|
+
}
|
|
502
|
+
switch (tok.value) {
|
|
503
|
+
case 'raw':
|
|
504
|
+
case 'verbatim':
|
|
505
|
+
return this.parseRaw(tok.value); //'raw' | 'verbatim'
|
|
506
|
+
case 'if':
|
|
507
|
+
case 'ifAsync':
|
|
508
|
+
return this.parseIf();
|
|
509
|
+
case 'for':
|
|
510
|
+
case 'asyncEach':
|
|
511
|
+
case 'asyncAll':
|
|
512
|
+
return this.parseFor();
|
|
513
|
+
case 'block':
|
|
514
|
+
return this.parseBlock();
|
|
515
|
+
case 'extends':
|
|
516
|
+
return this.parseExtends();
|
|
517
|
+
case 'include':
|
|
518
|
+
return this.parseInclude();
|
|
519
|
+
case 'set':
|
|
520
|
+
return this.parseSet();
|
|
521
|
+
case 'macro':
|
|
522
|
+
return this.parseMacro();
|
|
523
|
+
case 'call':
|
|
524
|
+
return this.parseCall();
|
|
525
|
+
case 'import':
|
|
526
|
+
return this.parseImport();
|
|
527
|
+
case 'from':
|
|
528
|
+
return this.parseFrom();
|
|
529
|
+
case 'filter':
|
|
530
|
+
return this.parseFilterStatement();
|
|
531
|
+
case 'switch':
|
|
532
|
+
return this.parseSwitch();
|
|
533
|
+
default:
|
|
534
|
+
if (this.extensions?.length) {
|
|
535
|
+
for (let i = 0; i < this.extensions?.length; i++) {
|
|
536
|
+
const ext = this.extensions[i];
|
|
537
|
+
if ((ext.tags || []).indexOf(tok.value) !== -1) {
|
|
538
|
+
return ext.parse(this, node, lexer);
|
|
539
|
+
}
|
|
540
|
+
}
|
|
541
|
+
}
|
|
542
|
+
this.fail('unknown block tag: ' + tok.value, tok?.lineno, tok?.colno);
|
|
543
|
+
}
|
|
544
|
+
return node;
|
|
545
|
+
}
|
|
546
|
+
parseRaw(tagName = 'raw') {
|
|
547
|
+
const endTagName = 'end' + tagName;
|
|
548
|
+
// Look for upcoming raw blocks (ignore all other kinds of blocks)
|
|
549
|
+
const rawBlockRegex = new RegExp('([\\s\\S]*?){%\\s*(' + tagName + '|' + endTagName + ')\\s*(?=%})%}');
|
|
550
|
+
let rawLevel = 1;
|
|
551
|
+
let str = '';
|
|
552
|
+
let matches = null;
|
|
553
|
+
// Skip opening raw token
|
|
554
|
+
// Keep this token to track line and column numbers
|
|
555
|
+
const begun = this.advanceAfterBlockEnd();
|
|
556
|
+
// Exit when there's nothing to match
|
|
557
|
+
// or when we've found the matching "endraw" block
|
|
558
|
+
while ((matches = this.tokenizer._extractRegex(rawBlockRegex)) &&
|
|
559
|
+
rawLevel > 0) {
|
|
560
|
+
const all = matches[0];
|
|
561
|
+
const pre = matches[1];
|
|
562
|
+
const blockName = matches[2];
|
|
563
|
+
// Adjust rawlevel
|
|
564
|
+
if (blockName === tagName) {
|
|
565
|
+
rawLevel += 1;
|
|
566
|
+
}
|
|
567
|
+
else if (blockName === endTagName) {
|
|
568
|
+
rawLevel -= 1;
|
|
569
|
+
}
|
|
570
|
+
// Add to str
|
|
571
|
+
if (rawLevel === 0) {
|
|
572
|
+
// We want to exclude the last "endraw"
|
|
573
|
+
str += pre;
|
|
574
|
+
// Move tokenizer to beginning of endraw block
|
|
575
|
+
this.tokenizer.backN(all?.length - pre?.length);
|
|
576
|
+
}
|
|
577
|
+
else {
|
|
578
|
+
str += all;
|
|
579
|
+
}
|
|
580
|
+
}
|
|
581
|
+
return new nodes_1.Output(begun?.lineno, begun?.colno, [
|
|
582
|
+
new nodes_1.TemplateData(begun?.lineno, begun?.colno, str),
|
|
583
|
+
]);
|
|
584
|
+
}
|
|
585
|
+
parsePostfix(node) {
|
|
586
|
+
let lookup;
|
|
587
|
+
let tok = this.peekToken();
|
|
588
|
+
while (tok) {
|
|
589
|
+
if (tok.type === lexer_1.TOKEN_LEFT_PAREN) {
|
|
590
|
+
// Function call
|
|
591
|
+
node = new nodes_1.FunCall(tok?.lineno, tok?.colno, node, this.parseSignature());
|
|
592
|
+
}
|
|
593
|
+
else if (tok.type === lexer_1.TOKEN_LEFT_BRACKET) {
|
|
594
|
+
// Reference
|
|
595
|
+
lookup = this.parseAggregate();
|
|
596
|
+
if (lookup.children?.length > 1) {
|
|
597
|
+
this.fail('invalid index');
|
|
598
|
+
}
|
|
599
|
+
node = new nodes_1.LookupVal(tok?.lineno, tok?.colno, node, lookup.children[0]);
|
|
600
|
+
}
|
|
601
|
+
else if (tok.type === lexer_1.TOKEN_OPERATOR && tok.value === '.') {
|
|
602
|
+
// Reference
|
|
603
|
+
this.nextToken();
|
|
604
|
+
const val = this.nextToken();
|
|
605
|
+
if (val.type !== lexer_1.TOKEN_SYMBOL) {
|
|
606
|
+
this.fail('expected name as lookup value, got ' + val.value, val?.lineno, val?.colno);
|
|
607
|
+
}
|
|
608
|
+
// Make a literal string because it's not a variable
|
|
609
|
+
// reference
|
|
610
|
+
lookup = new nodes_1.Literal(val?.lineno, val?.colno, val.value);
|
|
611
|
+
node = new nodes_1.LookupVal(tok?.lineno, tok?.colno, node, lookup);
|
|
612
|
+
}
|
|
613
|
+
else {
|
|
614
|
+
break;
|
|
615
|
+
}
|
|
616
|
+
tok = this.peekToken();
|
|
617
|
+
}
|
|
618
|
+
return node;
|
|
619
|
+
}
|
|
620
|
+
parseInlineIf() {
|
|
621
|
+
let node = this.parseOr();
|
|
622
|
+
if (this.skipSymbol('if')) {
|
|
623
|
+
node = new nodes_1.InlineIf(node?.lineno, node?.colno, this.parseOr(), node, null);
|
|
624
|
+
if (this.skipSymbol('else')) {
|
|
625
|
+
node.else_ = this.parseOr();
|
|
626
|
+
}
|
|
627
|
+
}
|
|
628
|
+
return node;
|
|
629
|
+
}
|
|
630
|
+
parseOr() {
|
|
631
|
+
let node = this.parseAnd();
|
|
632
|
+
while (this.skipSymbol('or')) {
|
|
633
|
+
node = new nodes_1.Or(node?.lineno, node?.colno, node, this.parseAnd());
|
|
634
|
+
}
|
|
635
|
+
return node;
|
|
636
|
+
}
|
|
637
|
+
parseAnd() {
|
|
638
|
+
let node = this.parseNot();
|
|
639
|
+
while (this.skipSymbol('and')) {
|
|
640
|
+
node = new nodes_1.And(node?.lineno, node?.colno, node, this.parseNot());
|
|
641
|
+
}
|
|
642
|
+
return node;
|
|
643
|
+
}
|
|
644
|
+
parseNot() {
|
|
645
|
+
const tok = this.peekToken();
|
|
646
|
+
if (this.skipSymbol('not')) {
|
|
647
|
+
return new nodes_1.Not(tok?.lineno, tok?.colno, this.parseNot());
|
|
648
|
+
}
|
|
649
|
+
return this.parseIn();
|
|
650
|
+
}
|
|
651
|
+
parseIn() {
|
|
652
|
+
let node = this.parseIs();
|
|
653
|
+
while (1) {
|
|
654
|
+
// eslint-disable-line no-constant-condition
|
|
655
|
+
// check if the next token is 'not'
|
|
656
|
+
const tok = this.nextToken();
|
|
657
|
+
if (!tok) {
|
|
658
|
+
break;
|
|
659
|
+
}
|
|
660
|
+
const invert = tok.type === lexer_1.TOKEN_SYMBOL && tok.value === 'not';
|
|
661
|
+
// if it wasn't 'not', put it back
|
|
662
|
+
if (!invert) {
|
|
663
|
+
this?.pushToken(tok);
|
|
664
|
+
}
|
|
665
|
+
if (this.skipSymbol('in')) {
|
|
666
|
+
node = new nodes_1.In(node?.lineno, node?.colno, node, this.parseIs());
|
|
667
|
+
if (invert) {
|
|
668
|
+
node = new nodes_1.Not(node?.lineno, node?.colno, node);
|
|
669
|
+
}
|
|
670
|
+
}
|
|
671
|
+
else {
|
|
672
|
+
// if we'd found a 'not' but this wasn't an 'in', put back the 'not'
|
|
673
|
+
if (invert) {
|
|
674
|
+
this?.pushToken(tok);
|
|
675
|
+
}
|
|
676
|
+
break;
|
|
677
|
+
}
|
|
678
|
+
}
|
|
679
|
+
return node;
|
|
680
|
+
}
|
|
681
|
+
// I put this right after "in" in the operator precedence stack. That can
|
|
682
|
+
// obviously be changed to be closer to Jinja.
|
|
683
|
+
parseIs() {
|
|
684
|
+
let node = this.parseCompare();
|
|
685
|
+
// look for an is
|
|
686
|
+
if (this.skipSymbol('is')) {
|
|
687
|
+
// look for a not
|
|
688
|
+
const not = this.skipSymbol('not');
|
|
689
|
+
// get the next node
|
|
690
|
+
// create an Is node using the next node and the info from our Is node.
|
|
691
|
+
node = new nodes_1.Is(node?.lineno, node?.colno, node, this.parseCompare());
|
|
692
|
+
// if we have a Not, create a Not node from our Is node.
|
|
693
|
+
if (not) {
|
|
694
|
+
node = new nodes_1.Not(node?.lineno, node?.colno, node);
|
|
695
|
+
}
|
|
696
|
+
}
|
|
697
|
+
// return the node.
|
|
698
|
+
return node;
|
|
699
|
+
}
|
|
700
|
+
parseCompare() {
|
|
701
|
+
const compareOps = ['==', '===', '!=', '!==', '<', '>', '<=', '>='];
|
|
702
|
+
const expr = this.parseConcat();
|
|
703
|
+
const ops = [];
|
|
704
|
+
while (1) {
|
|
705
|
+
// eslint-disable-line no-constant-condition
|
|
706
|
+
const tok = this.nextToken();
|
|
707
|
+
if (!tok) {
|
|
708
|
+
break;
|
|
709
|
+
}
|
|
710
|
+
else if (compareOps.indexOf(tok.value) !== -1) {
|
|
711
|
+
ops?.push(new nodes_1.CompareOperand(tok?.lineno, tok?.colno, this.parseConcat(), tok.value));
|
|
712
|
+
}
|
|
713
|
+
else {
|
|
714
|
+
this?.pushToken(tok);
|
|
715
|
+
break;
|
|
716
|
+
}
|
|
717
|
+
}
|
|
718
|
+
if (ops?.length) {
|
|
719
|
+
return new nodes_1.Compare(ops[0]?.lineno, ops[0]?.colno, expr, ops);
|
|
720
|
+
}
|
|
721
|
+
else {
|
|
722
|
+
return expr;
|
|
723
|
+
}
|
|
724
|
+
}
|
|
725
|
+
// finds the '~' for string concatenation
|
|
726
|
+
parseConcat() {
|
|
727
|
+
let node = this.parseAdd();
|
|
728
|
+
while (this.skipValue(lexer_1.TOKEN_TILDE, '~')) {
|
|
729
|
+
node = new nodes_1.Concat(node?.lineno, node?.colno, node, this.parseAdd());
|
|
730
|
+
}
|
|
731
|
+
return node;
|
|
732
|
+
}
|
|
733
|
+
parseAdd() {
|
|
734
|
+
let node = this.parseSub();
|
|
735
|
+
while (this.skipValue(lexer_1.TOKEN_OPERATOR, '+')) {
|
|
736
|
+
node = new nodes_1.Add(node?.lineno, node?.colno, node, this.parseSub());
|
|
737
|
+
}
|
|
738
|
+
return node;
|
|
739
|
+
}
|
|
740
|
+
parseSub() {
|
|
741
|
+
let node = this.parseMul();
|
|
742
|
+
while (this.skipValue(lexer_1.TOKEN_OPERATOR, '-')) {
|
|
743
|
+
node = new nodes_1.Sub(node?.lineno, node?.colno, node, this.parseMul());
|
|
744
|
+
}
|
|
745
|
+
return node;
|
|
746
|
+
}
|
|
747
|
+
parseMul() {
|
|
748
|
+
let node = this.parseDiv();
|
|
749
|
+
while (this.skipValue(lexer_1.TOKEN_OPERATOR, '*')) {
|
|
750
|
+
node = new nodes_1.Mul(node?.lineno, node?.colno, node, this.parseDiv());
|
|
751
|
+
}
|
|
752
|
+
return node;
|
|
753
|
+
}
|
|
754
|
+
parseDiv() {
|
|
755
|
+
let node = this.parseFloorDiv();
|
|
756
|
+
while (this.skipValue(lexer_1.TOKEN_OPERATOR, '/')) {
|
|
757
|
+
node = new nodes_1.Div(node?.lineno, node?.colno, node, this.parseFloorDiv());
|
|
758
|
+
}
|
|
759
|
+
return node;
|
|
760
|
+
}
|
|
761
|
+
parseFloorDiv() {
|
|
762
|
+
let node = this.parseMod();
|
|
763
|
+
while (this.skipValue(lexer_1.TOKEN_OPERATOR, '//')) {
|
|
764
|
+
node = new nodes_1.FloorDiv(node?.lineno, node?.colno, node, this.parseMod());
|
|
765
|
+
}
|
|
766
|
+
return node;
|
|
767
|
+
}
|
|
768
|
+
parseMod() {
|
|
769
|
+
let node = this.parsePow();
|
|
770
|
+
while (this.skipValue(lexer_1.TOKEN_OPERATOR, '%')) {
|
|
771
|
+
const node2 = this.parsePow();
|
|
772
|
+
node = new nodes_1.Mod(node?.lineno, node?.colno, node, node2);
|
|
773
|
+
}
|
|
774
|
+
return node;
|
|
775
|
+
}
|
|
776
|
+
parsePow() {
|
|
777
|
+
let node = this.parseUnary();
|
|
778
|
+
while (this.skipValue(lexer_1.TOKEN_OPERATOR, '**')) {
|
|
779
|
+
node = new nodes_1.Pow(node?.lineno, node?.colno, node, this.parseUnary());
|
|
780
|
+
}
|
|
781
|
+
return node;
|
|
782
|
+
}
|
|
783
|
+
parseUnary(noFilters = false) {
|
|
784
|
+
const tok = this.peekToken();
|
|
785
|
+
let node;
|
|
786
|
+
if (this.skipValue(lexer_1.TOKEN_OPERATOR, '-')) {
|
|
787
|
+
node = new nodes_1.Neg(tok?.lineno, tok?.colno, this.parseUnary(true));
|
|
788
|
+
}
|
|
789
|
+
else if (this.skipValue(lexer_1.TOKEN_OPERATOR, '+')) {
|
|
790
|
+
node = new nodes_1.Pos(tok?.lineno, tok?.colno, this.parseUnary(true));
|
|
791
|
+
}
|
|
792
|
+
else {
|
|
793
|
+
node = this.parsePrimary();
|
|
794
|
+
}
|
|
795
|
+
if (!noFilters) {
|
|
796
|
+
node = this.parseFilter(node);
|
|
797
|
+
}
|
|
798
|
+
return node;
|
|
799
|
+
}
|
|
800
|
+
parsePrimary(noPostfix = false) {
|
|
801
|
+
const tok = this.nextToken();
|
|
802
|
+
let val;
|
|
803
|
+
let node = null;
|
|
804
|
+
if (!tok) {
|
|
805
|
+
this.fail('expected expression, got end of file');
|
|
806
|
+
}
|
|
807
|
+
else if (tok.type === lexer_1.TOKEN_STRING) {
|
|
808
|
+
val = tok.value;
|
|
809
|
+
}
|
|
810
|
+
else if (tok.type === lexer_1.TOKEN_INT) {
|
|
811
|
+
val = parseInt(tok.value, 10);
|
|
812
|
+
}
|
|
813
|
+
else if (tok.type === lexer_1.TOKEN_FLOAT) {
|
|
814
|
+
val = parseFloat(tok.value);
|
|
815
|
+
}
|
|
816
|
+
else if (tok.type === lexer_1.TOKEN_BOOLEAN) {
|
|
817
|
+
if (tok.value === 'true') {
|
|
818
|
+
val = true;
|
|
819
|
+
}
|
|
820
|
+
else if (tok.value === 'false') {
|
|
821
|
+
val = false;
|
|
822
|
+
}
|
|
823
|
+
else {
|
|
824
|
+
this.fail('invalid boolean: ' + tok.value, tok?.lineno, tok?.colno);
|
|
825
|
+
}
|
|
826
|
+
}
|
|
827
|
+
else if (tok.type === lexer_1.TOKEN_NONE) {
|
|
828
|
+
val = null;
|
|
829
|
+
}
|
|
830
|
+
else if (tok.type === lexer_1.TOKEN_REGEX) {
|
|
831
|
+
val = new RegExp(tok.value.body, tok.value.flags);
|
|
832
|
+
}
|
|
833
|
+
if (val !== undefined) {
|
|
834
|
+
node = new nodes_1.Literal(tok?.lineno, tok?.colno, val);
|
|
835
|
+
}
|
|
836
|
+
else if (tok.type === lexer_1.TOKEN_SYMBOL) {
|
|
837
|
+
node = new nodes_1.Symbol(tok?.lineno, tok?.colno, tok.value);
|
|
838
|
+
}
|
|
839
|
+
else {
|
|
840
|
+
// See if it's an aggregate type, we need to push the
|
|
841
|
+
// current delimiter token back on
|
|
842
|
+
this?.pushToken(tok);
|
|
843
|
+
node = this.parseAggregate();
|
|
844
|
+
}
|
|
845
|
+
if (!noPostfix) {
|
|
846
|
+
node = this.parsePostfix(node);
|
|
847
|
+
}
|
|
848
|
+
if (node) {
|
|
849
|
+
return node;
|
|
850
|
+
}
|
|
851
|
+
else {
|
|
852
|
+
throw this.error(`unexpected token: ${tok.value}`, tok?.lineno, tok?.colno);
|
|
853
|
+
}
|
|
854
|
+
}
|
|
855
|
+
parseFilterName() {
|
|
856
|
+
const tok = this.expect(lexer_1.TOKEN_SYMBOL);
|
|
857
|
+
let name = tok.value;
|
|
858
|
+
while (this.skipValue(lexer_1.TOKEN_OPERATOR, '.')) {
|
|
859
|
+
name += '.' + this.expect(lexer_1.TOKEN_SYMBOL).value;
|
|
860
|
+
}
|
|
861
|
+
return new nodes_1.Symbol(tok?.lineno, tok?.colno, name);
|
|
862
|
+
}
|
|
863
|
+
parseFilterArgs(node) {
|
|
864
|
+
if (this.peekToken().type === lexer_1.TOKEN_LEFT_PAREN) {
|
|
865
|
+
// Get a FunCall node and add the parameters to the
|
|
866
|
+
// filter
|
|
867
|
+
const call = this.parsePostfix(node);
|
|
868
|
+
return call.args.children;
|
|
869
|
+
}
|
|
870
|
+
return [];
|
|
871
|
+
}
|
|
872
|
+
parseFilter(node) {
|
|
873
|
+
while (this.skip(lexer_1.TOKEN_PIPE)) {
|
|
874
|
+
const name = this.parseFilterName();
|
|
875
|
+
node = new nodes_1.Filter(name?.lineno, name?.colno, name, new nodes_1.NodeList(name?.lineno, name?.colno, [node].concat(this.parseFilterArgs(node))), null // Todo recheck this
|
|
876
|
+
);
|
|
877
|
+
}
|
|
878
|
+
return node;
|
|
879
|
+
}
|
|
880
|
+
parseFilterStatement() {
|
|
881
|
+
var filterTok = this.peekToken();
|
|
882
|
+
if (!this.skipSymbol('filter')) {
|
|
883
|
+
this.fail('parseFilterStatement: expected filter');
|
|
884
|
+
}
|
|
885
|
+
const name = this.parseFilterName();
|
|
886
|
+
const args = this.parseFilterArgs(name);
|
|
887
|
+
this.advanceAfterBlockEnd(filterTok.value);
|
|
888
|
+
const body = new nodes_1.Capture(name?.lineno, name?.colno, this.parseUntilBlocks('endfilter'));
|
|
889
|
+
this.advanceAfterBlockEnd();
|
|
890
|
+
const node = new nodes_1.Filter(name?.lineno, name?.colno, name, new nodes_1.NodeList(name?.lineno, name?.colno, [body].concat(args)), null);
|
|
891
|
+
return new nodes_1.Output(name?.lineno, name?.colno, [node]);
|
|
892
|
+
}
|
|
893
|
+
parseAggregate() {
|
|
894
|
+
var tok = this.nextToken();
|
|
895
|
+
var node;
|
|
896
|
+
switch (tok.type) {
|
|
897
|
+
case lexer_1.TOKEN_LEFT_PAREN:
|
|
898
|
+
node = new nodes_1.Group(tok?.lineno, tok?.colno);
|
|
899
|
+
break;
|
|
900
|
+
case lexer_1.TOKEN_LEFT_BRACKET:
|
|
901
|
+
node = new nodes_1.ArrayNode(tok?.lineno, tok?.colno);
|
|
902
|
+
break;
|
|
903
|
+
case lexer_1.TOKEN_LEFT_CURLY:
|
|
904
|
+
node = new nodes_1.Dict(tok?.lineno, tok?.colno);
|
|
905
|
+
break;
|
|
906
|
+
default:
|
|
907
|
+
return null;
|
|
908
|
+
}
|
|
909
|
+
while (1) {
|
|
910
|
+
// eslint-disable-line no-constant-condition
|
|
911
|
+
const type = this.peekToken().type;
|
|
912
|
+
if (type === lexer.TOKEN_RIGHT_PAREN ||
|
|
913
|
+
type === lexer.TOKEN_RIGHT_BRACKET ||
|
|
914
|
+
type === lexer.TOKEN_RIGHT_CURLY) {
|
|
915
|
+
this.nextToken();
|
|
916
|
+
break;
|
|
917
|
+
}
|
|
918
|
+
if (node.children?.length > 0) {
|
|
919
|
+
if (!this.skip(lexer.TOKEN_COMMA)) {
|
|
920
|
+
this.fail('parseAggregate: expected comma after expression', tok?.lineno, tok?.colno);
|
|
921
|
+
}
|
|
922
|
+
}
|
|
923
|
+
if (node instanceof nodes_1.Dict) {
|
|
924
|
+
// TODO: check for errors
|
|
925
|
+
const key = this.parsePrimary();
|
|
926
|
+
// We expect a key/value pair for dicts, separated by a
|
|
927
|
+
// colon
|
|
928
|
+
if (!this.skip(lexer.TOKEN_COLON)) {
|
|
929
|
+
this.fail('parseAggregate: expected colon after dict key', tok?.lineno, tok?.colno);
|
|
930
|
+
}
|
|
931
|
+
// TODO: check for errors
|
|
932
|
+
const value = this.parseInlineIf();
|
|
933
|
+
node.addChild(new nodes_1.Pair(key?.lineno, key?.colno, key, value));
|
|
934
|
+
}
|
|
935
|
+
else {
|
|
936
|
+
// TODO: check for errors
|
|
937
|
+
const expr = this.parseInlineIf();
|
|
938
|
+
node.addChild(expr);
|
|
939
|
+
}
|
|
940
|
+
}
|
|
941
|
+
return node;
|
|
942
|
+
}
|
|
943
|
+
parseSignature(tolerant, noParens) {
|
|
944
|
+
let tok = this.peekToken();
|
|
945
|
+
if (!noParens && tok.type !== lexer.TOKEN_LEFT_PAREN) {
|
|
946
|
+
if (tolerant) {
|
|
947
|
+
return null;
|
|
948
|
+
}
|
|
949
|
+
else {
|
|
950
|
+
this.fail('expected arguments', tok?.lineno, tok?.colno);
|
|
951
|
+
}
|
|
952
|
+
}
|
|
953
|
+
if (tok.type === lexer.TOKEN_LEFT_PAREN) {
|
|
954
|
+
tok = this.nextToken();
|
|
955
|
+
}
|
|
956
|
+
const args = new nodes_1.NodeList(tok?.lineno, tok?.colno);
|
|
957
|
+
const kwargs = new nodes_1.KeywordArgs(tok?.lineno, tok?.colno);
|
|
958
|
+
let checkComma = false;
|
|
959
|
+
while (1) {
|
|
960
|
+
// eslint-disable-line no-constant-condition
|
|
961
|
+
tok = this.peekToken();
|
|
962
|
+
if (!noParens && tok.type === lexer.TOKEN_RIGHT_PAREN) {
|
|
963
|
+
this.nextToken();
|
|
964
|
+
break;
|
|
965
|
+
}
|
|
966
|
+
else if (noParens && tok.type === lexer.TOKEN_BLOCK_END) {
|
|
967
|
+
break;
|
|
968
|
+
}
|
|
969
|
+
if (checkComma && !this.skip(lexer_1.TOKEN_COMMA)) {
|
|
970
|
+
this.fail('parseSignature: expected comma after expression', tok?.lineno, tok?.colno);
|
|
971
|
+
}
|
|
972
|
+
else {
|
|
973
|
+
const arg = this.parseInlineIf();
|
|
974
|
+
if (this.skipValue(lexer.TOKEN_OPERATOR, '=')) {
|
|
975
|
+
kwargs.addChild(new nodes_1.Pair(arg?.lineno, arg?.colno, arg, this.parseInlineIf()));
|
|
976
|
+
}
|
|
977
|
+
else {
|
|
978
|
+
args.addChild(arg);
|
|
979
|
+
}
|
|
980
|
+
}
|
|
981
|
+
checkComma = true;
|
|
982
|
+
}
|
|
983
|
+
if (kwargs.children?.length) {
|
|
984
|
+
args.addChild(kwargs);
|
|
985
|
+
}
|
|
986
|
+
return args;
|
|
987
|
+
}
|
|
988
|
+
parseUntilBlocks(...blockNames) {
|
|
989
|
+
const prev = this.breakOnBlocks;
|
|
990
|
+
this.breakOnBlocks = blockNames;
|
|
991
|
+
const ret = this.parse();
|
|
992
|
+
this.breakOnBlocks = prev;
|
|
993
|
+
return ret;
|
|
994
|
+
}
|
|
995
|
+
parseNodes() {
|
|
996
|
+
let tok;
|
|
997
|
+
const buf = [];
|
|
998
|
+
while ((tok = this.nextToken())) {
|
|
999
|
+
if (tok.type === lexer.TOKEN_DATA) {
|
|
1000
|
+
let data = tok.value;
|
|
1001
|
+
const nextToken = this.peekToken();
|
|
1002
|
+
const nextVal = nextToken && nextToken.value;
|
|
1003
|
+
// If the last token has "-" we need to trim the
|
|
1004
|
+
// leading whitespace of the data. This is marked with
|
|
1005
|
+
// the `dropLeadingWhitespace` variable.
|
|
1006
|
+
if (this.dropLeadingWhitespace) {
|
|
1007
|
+
// TODO: this could be optimized (don't use regex)
|
|
1008
|
+
data = data.replace(/^\s*/, '');
|
|
1009
|
+
this.dropLeadingWhitespace = false;
|
|
1010
|
+
}
|
|
1011
|
+
// Same for the succeeding block start token
|
|
1012
|
+
if (nextToken &&
|
|
1013
|
+
((nextToken.type === lexer.TOKEN_BLOCK_START &&
|
|
1014
|
+
nextVal.charAt(nextVal?.length - 1) === '-') ||
|
|
1015
|
+
(nextToken.type === lexer.TOKEN_VARIABLE_START &&
|
|
1016
|
+
nextVal.charAt(this.tokenizer.tags.VARIABLE_START?.length) ===
|
|
1017
|
+
'-') ||
|
|
1018
|
+
(nextToken.type === lexer.TOKEN_COMMENT &&
|
|
1019
|
+
nextVal.charAt(this.tokenizer.tags.COMMENT_START?.length) ===
|
|
1020
|
+
'-'))) {
|
|
1021
|
+
// TODO: this could be optimized (don't use regex)
|
|
1022
|
+
data = data.replace(/\s*$/, '');
|
|
1023
|
+
}
|
|
1024
|
+
buf?.push(new nodes_1.Output(tok?.lineno, tok?.colno, [
|
|
1025
|
+
new nodes_1.TemplateData(tok?.lineno, tok?.colno, data),
|
|
1026
|
+
]));
|
|
1027
|
+
}
|
|
1028
|
+
else if (tok.type === lexer.TOKEN_BLOCK_START) {
|
|
1029
|
+
this.dropLeadingWhitespace = false;
|
|
1030
|
+
const n = this.parseStatement();
|
|
1031
|
+
if (!n) {
|
|
1032
|
+
break;
|
|
1033
|
+
}
|
|
1034
|
+
buf?.push(n);
|
|
1035
|
+
}
|
|
1036
|
+
else if (tok.type === lexer.TOKEN_VARIABLE_START) {
|
|
1037
|
+
const e = this.parseInlineIf();
|
|
1038
|
+
this.dropLeadingWhitespace = false;
|
|
1039
|
+
this.advanceAfterVariableEnd();
|
|
1040
|
+
buf?.push(new nodes_1.Output(tok?.lineno, tok?.colno, [e]));
|
|
1041
|
+
}
|
|
1042
|
+
else if (tok.type === lexer.TOKEN_COMMENT) {
|
|
1043
|
+
this.dropLeadingWhitespace =
|
|
1044
|
+
tok.value.charAt(tok.value?.length - this.tokenizer.tags.COMMENT_END?.length - 1) === '-';
|
|
1045
|
+
}
|
|
1046
|
+
else {
|
|
1047
|
+
// Ignore comments, otherwise this should be an error
|
|
1048
|
+
this.fail('Unexpected token at top-level: ' + tok.type, tok?.lineno, tok?.colno);
|
|
1049
|
+
}
|
|
1050
|
+
}
|
|
1051
|
+
return buf;
|
|
1052
|
+
}
|
|
1053
|
+
parse() {
|
|
1054
|
+
return new nodes_1.NodeList(0, 0, this.parseNodes());
|
|
1055
|
+
}
|
|
1056
|
+
parseAsRoot() {
|
|
1057
|
+
return new nodes_1.Root(0, 0, this.parseNodes());
|
|
1058
|
+
}
|
|
1059
|
+
}
|
|
1060
|
+
exports.Parser = Parser;
|
|
1061
|
+
const parse = (src, extensions, opts) => {
|
|
1062
|
+
var p = new Parser(lexer.lex(src, opts));
|
|
1063
|
+
if (extensions !== undefined) {
|
|
1064
|
+
p.extensions = extensions;
|
|
1065
|
+
}
|
|
1066
|
+
return p.parseAsRoot();
|
|
1067
|
+
};
|
|
1068
|
+
exports.parse = parse;
|