liquid2 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- checksums.yaml.gz.sig +0 -0
- data/.rubocop.yml +46 -0
- data/.ruby-version +1 -0
- data/.vscode/settings.json +32 -0
- data/CHANGELOG.md +5 -0
- data/LICENSE.txt +21 -0
- data/LICENSE_SHOPIFY.txt +20 -0
- data/README.md +219 -0
- data/Rakefile +23 -0
- data/Steepfile +26 -0
- data/lib/liquid2/context.rb +297 -0
- data/lib/liquid2/environment.rb +287 -0
- data/lib/liquid2/errors.rb +79 -0
- data/lib/liquid2/expression.rb +20 -0
- data/lib/liquid2/expressions/arguments.rb +25 -0
- data/lib/liquid2/expressions/array.rb +20 -0
- data/lib/liquid2/expressions/blank.rb +41 -0
- data/lib/liquid2/expressions/boolean.rb +20 -0
- data/lib/liquid2/expressions/filtered.rb +136 -0
- data/lib/liquid2/expressions/identifier.rb +43 -0
- data/lib/liquid2/expressions/lambda.rb +53 -0
- data/lib/liquid2/expressions/logical.rb +71 -0
- data/lib/liquid2/expressions/loop.rb +79 -0
- data/lib/liquid2/expressions/path.rb +33 -0
- data/lib/liquid2/expressions/range.rb +28 -0
- data/lib/liquid2/expressions/relational.rb +119 -0
- data/lib/liquid2/expressions/template_string.rb +20 -0
- data/lib/liquid2/filter.rb +95 -0
- data/lib/liquid2/filters/array.rb +202 -0
- data/lib/liquid2/filters/date.rb +20 -0
- data/lib/liquid2/filters/default.rb +16 -0
- data/lib/liquid2/filters/json.rb +15 -0
- data/lib/liquid2/filters/math.rb +87 -0
- data/lib/liquid2/filters/size.rb +11 -0
- data/lib/liquid2/filters/slice.rb +17 -0
- data/lib/liquid2/filters/sort.rb +96 -0
- data/lib/liquid2/filters/string.rb +204 -0
- data/lib/liquid2/loader.rb +59 -0
- data/lib/liquid2/loaders/file_system_loader.rb +76 -0
- data/lib/liquid2/loaders/mixins.rb +52 -0
- data/lib/liquid2/node.rb +113 -0
- data/lib/liquid2/nodes/comment.rb +18 -0
- data/lib/liquid2/nodes/output.rb +24 -0
- data/lib/liquid2/nodes/tags/assign.rb +35 -0
- data/lib/liquid2/nodes/tags/block_comment.rb +26 -0
- data/lib/liquid2/nodes/tags/capture.rb +40 -0
- data/lib/liquid2/nodes/tags/case.rb +111 -0
- data/lib/liquid2/nodes/tags/cycle.rb +63 -0
- data/lib/liquid2/nodes/tags/decrement.rb +29 -0
- data/lib/liquid2/nodes/tags/doc.rb +24 -0
- data/lib/liquid2/nodes/tags/echo.rb +31 -0
- data/lib/liquid2/nodes/tags/extends.rb +3 -0
- data/lib/liquid2/nodes/tags/for.rb +155 -0
- data/lib/liquid2/nodes/tags/if.rb +84 -0
- data/lib/liquid2/nodes/tags/include.rb +123 -0
- data/lib/liquid2/nodes/tags/increment.rb +29 -0
- data/lib/liquid2/nodes/tags/inline_comment.rb +28 -0
- data/lib/liquid2/nodes/tags/liquid.rb +29 -0
- data/lib/liquid2/nodes/tags/macro.rb +3 -0
- data/lib/liquid2/nodes/tags/raw.rb +30 -0
- data/lib/liquid2/nodes/tags/render.rb +137 -0
- data/lib/liquid2/nodes/tags/tablerow.rb +143 -0
- data/lib/liquid2/nodes/tags/translate.rb +3 -0
- data/lib/liquid2/nodes/tags/unless.rb +23 -0
- data/lib/liquid2/nodes/tags/with.rb +3 -0
- data/lib/liquid2/parser.rb +917 -0
- data/lib/liquid2/scanner.rb +595 -0
- data/lib/liquid2/static_analysis.rb +301 -0
- data/lib/liquid2/tag.rb +22 -0
- data/lib/liquid2/template.rb +182 -0
- data/lib/liquid2/undefined.rb +131 -0
- data/lib/liquid2/utils/cache.rb +80 -0
- data/lib/liquid2/utils/chain_hash.rb +40 -0
- data/lib/liquid2/utils/unescape.rb +119 -0
- data/lib/liquid2/version.rb +5 -0
- data/lib/liquid2.rb +90 -0
- data/performance/benchmark.rb +73 -0
- data/performance/memory_profile.rb +62 -0
- data/performance/profile.rb +71 -0
- data/sig/liquid2.rbs +2348 -0
- data.tar.gz.sig +0 -0
- metadata +164 -0
- metadata.gz.sig +0 -0
@@ -0,0 +1,917 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "set"
|
4
|
+
require "strscan"
|
5
|
+
require_relative "expression"
|
6
|
+
require_relative "node"
|
7
|
+
require_relative "nodes/comment"
|
8
|
+
require_relative "nodes/output"
|
9
|
+
require_relative "expressions/arguments"
|
10
|
+
require_relative "expressions/array"
|
11
|
+
require_relative "expressions/blank"
|
12
|
+
require_relative "expressions/boolean"
|
13
|
+
require_relative "expressions/filtered"
|
14
|
+
require_relative "expressions/identifier"
|
15
|
+
require_relative "expressions/lambda"
|
16
|
+
require_relative "expressions/logical"
|
17
|
+
require_relative "expressions/loop"
|
18
|
+
require_relative "expressions/path"
|
19
|
+
require_relative "expressions/range"
|
20
|
+
require_relative "expressions/relational"
|
21
|
+
require_relative "expressions/template_string"
|
22
|
+
|
23
|
+
module Liquid2
|
24
|
+
# Liquid template parser.
|
25
|
+
class Parser
|
26
|
+
# Parse Liquid template text into a syntax tree.
|
27
|
+
# @param source [String]
|
28
|
+
# @return [Array[Node | String]]
|
29
|
+
def self.parse(env, source, scanner: nil)
|
30
|
+
new(env,
|
31
|
+
Liquid2::Scanner.tokenize(source, scanner || StringScanner.new("")),
|
32
|
+
source.length).parse
|
33
|
+
end
|
34
|
+
|
35
|
+
# @param env [Environment]
|
36
|
+
# @param tokens [Array[[Symbol, String?, Integer]]]
|
37
|
+
# @param length [Integer] Length of the source string.
|
38
|
+
def initialize(env, tokens, length)
|
39
|
+
@env = env
|
40
|
+
@tokens = tokens
|
41
|
+
@pos = 0
|
42
|
+
@eof = [:token_eof, nil, length - 1]
|
43
|
+
@whitespace_carry = nil
|
44
|
+
end
|
45
|
+
|
46
|
+
# Return the current token without advancing the pointer.
|
47
|
+
# An EOF token is returned if there are no tokens left.
|
48
|
+
def current = @tokens[@pos] || @eof
|
49
|
+
|
50
|
+
# Return the kind of the current token without advancing the pointer.
|
51
|
+
def current_kind = current.first
|
52
|
+
|
53
|
+
# Return the next token and advance the pointer.
|
54
|
+
def next
|
55
|
+
if (token = @tokens[@pos])
|
56
|
+
@pos += 1
|
57
|
+
token
|
58
|
+
else
|
59
|
+
@eof
|
60
|
+
end
|
61
|
+
end
|
62
|
+
|
63
|
+
# Return the kind of the next token and advance the pointer
|
64
|
+
def next_kind = self.next.first
|
65
|
+
|
66
|
+
def peek(offset = 1) = @tokens[@pos + offset] || @eof
|
67
|
+
|
68
|
+
def peek_kind(offset = 1) = peek(offset).first
|
69
|
+
|
70
|
+
def previous = @tokens[@pos - 1] || raise
|
71
|
+
|
72
|
+
# Consume the next token if its kind matches _kind_, raise an error if it does not.
|
73
|
+
# @param kind [Symbol]
|
74
|
+
# @param message [String?] An error message to use if the next token kind does not match _kind_.
|
75
|
+
# @return [Token] The consumed token.
|
76
|
+
def eat(kind, message = nil)
|
77
|
+
token = self.next
|
78
|
+
unless token.first == kind
|
79
|
+
raise LiquidSyntaxError.new(message || "unexpected #{token.first}", token)
|
80
|
+
end
|
81
|
+
|
82
|
+
token
|
83
|
+
end
|
84
|
+
|
85
|
+
# Consume the next token if its kind is in _kinds_, raise an error if it does not.
|
86
|
+
# @param kind [Symbol]
|
87
|
+
# @return [Token] The consumed token.
|
88
|
+
def eat_one_of(*kinds)
|
89
|
+
token = self.next
|
90
|
+
unless kinds.include? token.first
|
91
|
+
raise LiquidSyntaxError.new("unexpected #{token.first}", token)
|
92
|
+
end
|
93
|
+
|
94
|
+
token
|
95
|
+
end
|
96
|
+
|
97
|
+
# @param name [String]
|
98
|
+
# @return The :token_tag_name token.
|
99
|
+
def eat_empty_tag(name)
|
100
|
+
eat(:token_tag_start, "expected tag #{name}")
|
101
|
+
@pos += 1 if current_kind == :token_whitespace_control
|
102
|
+
name_token = eat(:token_tag_name, "expected tag #{name}")
|
103
|
+
|
104
|
+
unless name == name_token[1]
|
105
|
+
raise LiquidSyntaxError.new(
|
106
|
+
"unexpected tag #{name_token[1]}", name_token
|
107
|
+
)
|
108
|
+
end
|
109
|
+
|
110
|
+
carry_whitespace_control
|
111
|
+
eat(:token_tag_end, "expected tag #{name}")
|
112
|
+
name_token
|
113
|
+
end
|
114
|
+
|
115
|
+
# Return `true` if we're at the start of a tag named _name_.
|
116
|
+
# @param name [String]
|
117
|
+
# @return [bool]
|
118
|
+
def tag?(name)
|
119
|
+
token = peek # Whitespace control or tag name
|
120
|
+
token = peek(2) if token.first == :token_whitespace_control
|
121
|
+
token.first == :token_tag_name && token[1] == name
|
122
|
+
end
|
123
|
+
|
124
|
+
# Return `true` if the current token is a word matching _text_.
|
125
|
+
# @param text [String]
|
126
|
+
# @return [bool]
|
127
|
+
def word?(text)
|
128
|
+
token = current
|
129
|
+
token.first == :token_word && token[1] == text
|
130
|
+
end
|
131
|
+
|
132
|
+
# Return the next tag name without advancing the pointer.
|
133
|
+
# Assumes the current token is :token_tag_start.
|
134
|
+
# @return [String]
|
135
|
+
def peek_tag_name
|
136
|
+
token = current # Whitespace control or tag name
|
137
|
+
token = peek if token.first == :token_whitespace_control
|
138
|
+
unless token.first == :token_tag_name
|
139
|
+
raise LiquidSyntaxError.new("missing tag name #{token}",
|
140
|
+
token)
|
141
|
+
end
|
142
|
+
|
143
|
+
token[1] || raise
|
144
|
+
end
|
145
|
+
|
146
|
+
# Advance the pointer if the current token is a whitespace control token.
|
147
|
+
def skip_whitespace_control
|
148
|
+
@pos += 1 if current_kind == :token_whitespace_control
|
149
|
+
end
|
150
|
+
|
151
|
+
# Advance the pointer if the current token is a whitespace control token, and
|
152
|
+
# remember the token's value for the next text node.
|
153
|
+
def carry_whitespace_control
|
154
|
+
@whitespace_carry = current_kind == :token_whitespace_control ? self.next[1] : nil
|
155
|
+
end
|
156
|
+
|
157
|
+
def expect_expression
|
158
|
+
return unless TERMINATE_EXPRESSION.include?(current_kind)
|
159
|
+
|
160
|
+
raise LiquidSyntaxError.new("missing expression",
|
161
|
+
current)
|
162
|
+
end
|
163
|
+
|
164
|
+
# @return [Array[Node | String]]
|
165
|
+
def parse
|
166
|
+
nodes = [] # : Array[Node | String]
|
167
|
+
|
168
|
+
loop do
|
169
|
+
kind, value = self.next
|
170
|
+
@pos += 1 if current_kind == :token_whitespace_control
|
171
|
+
|
172
|
+
case kind
|
173
|
+
when :token_other
|
174
|
+
rstrip = peek[1] if peek_kind == :token_whitespace_control
|
175
|
+
@env.trim(value || raise, @whitespace_carry, rstrip)
|
176
|
+
nodes << (value || raise)
|
177
|
+
when :token_output_start
|
178
|
+
nodes << parse_output
|
179
|
+
when :token_tag_start
|
180
|
+
nodes << parse_tag
|
181
|
+
when :token_comment_start
|
182
|
+
nodes << parse_comment
|
183
|
+
when :token_eof
|
184
|
+
return nodes
|
185
|
+
else
|
186
|
+
raise LiquidSyntaxError.new("unexpected #{kind}", previous)
|
187
|
+
end
|
188
|
+
end
|
189
|
+
end
|
190
|
+
|
191
|
+
# Parse Liquid markup until we find a tag token in _end_block_.
|
192
|
+
# @param end_block [responds to include?] An array or set of tag names that will
|
193
|
+
# indicate the end of the block.
|
194
|
+
# @return [Block]
|
195
|
+
def parse_block(end_block)
|
196
|
+
token = current
|
197
|
+
nodes = [] # : Array[Node | String]
|
198
|
+
|
199
|
+
loop do
|
200
|
+
kind, value = self.next
|
201
|
+
|
202
|
+
case kind
|
203
|
+
when :token_other
|
204
|
+
rstrip = peek[1] if peek_kind == :token_whitespace_control
|
205
|
+
@env.trim(value || raise, @whitespace_carry, rstrip)
|
206
|
+
nodes << (value || raise)
|
207
|
+
when :token_output_start
|
208
|
+
@pos += 1 if current_kind == :token_whitespace_control
|
209
|
+
nodes << parse_output
|
210
|
+
when :token_tag_start
|
211
|
+
if end_block.include?(peek_tag_name)
|
212
|
+
@pos -= 1
|
213
|
+
break
|
214
|
+
end
|
215
|
+
|
216
|
+
@pos += 1 if current_kind == :token_whitespace_control
|
217
|
+
nodes << parse_tag
|
218
|
+
when :token_comment_start
|
219
|
+
nodes << parse_comment
|
220
|
+
when :token_eof
|
221
|
+
break
|
222
|
+
else
|
223
|
+
raise LiquidSyntaxError.new("unexpected token: #{token.inspect}", previous)
|
224
|
+
end
|
225
|
+
end
|
226
|
+
|
227
|
+
Block.new(token, nodes)
|
228
|
+
end
|
229
|
+
|
230
|
+
# @return [FilteredExpression|TernaryExpression]
|
231
|
+
def parse_filtered_expression
|
232
|
+
token = current
|
233
|
+
left = parse_primary
|
234
|
+
left = parse_array_literal(left) if current_kind == :token_comma
|
235
|
+
filters = parse_filters if current_kind == :token_pipe
|
236
|
+
filters ||= [] # : Array[Filter]
|
237
|
+
expr = FilteredExpression.new(token, left, filters)
|
238
|
+
|
239
|
+
if current_kind == :token_if
|
240
|
+
parse_ternary_expression(expr)
|
241
|
+
else
|
242
|
+
expr
|
243
|
+
end
|
244
|
+
end
|
245
|
+
|
246
|
+
# @return [LoopExpression]
|
247
|
+
def parse_loop_expression
|
248
|
+
identifier = parse_identifier
|
249
|
+
eat(:token_in, "missing 'in'")
|
250
|
+
expect_expression
|
251
|
+
enum = parse_primary
|
252
|
+
|
253
|
+
reversed = false
|
254
|
+
offset = nil # : (Expression | nil)
|
255
|
+
limit = nil # : (Expression | nil)
|
256
|
+
cols = nil # : (Expression | nil)
|
257
|
+
|
258
|
+
if current_kind == :token_comma
|
259
|
+
unless LOOP_KEYWORDS.member?(peek[1] || raise)
|
260
|
+
enum = parse_array_literal(enum)
|
261
|
+
return LoopExpression.new(identifier.token, identifier, enum,
|
262
|
+
limit: limit, offset: offset, reversed: reversed, cols: cols)
|
263
|
+
end
|
264
|
+
|
265
|
+
# A comma between the iterable and the first argument is OK.
|
266
|
+
@pos += 1 if current_kind == :token_comma
|
267
|
+
end
|
268
|
+
|
269
|
+
loop do
|
270
|
+
token = current
|
271
|
+
case token.first
|
272
|
+
when :token_word
|
273
|
+
case token[1]
|
274
|
+
when "reversed"
|
275
|
+
@pos += 1
|
276
|
+
reversed = true
|
277
|
+
when "limit"
|
278
|
+
@pos += 1
|
279
|
+
eat_one_of(:token_colon, :token_assign)
|
280
|
+
limit = parse_primary
|
281
|
+
when "cols"
|
282
|
+
@pos += 1
|
283
|
+
eat_one_of(:token_colon, :token_assign)
|
284
|
+
cols = parse_primary
|
285
|
+
when "offset"
|
286
|
+
@pos += 1
|
287
|
+
eat_one_of(:token_colon, :token_assign)
|
288
|
+
offset_token = current
|
289
|
+
offset = if offset_token.first == :token_word && offset_token[1] == "continue"
|
290
|
+
Identifier.new(self.next)
|
291
|
+
else
|
292
|
+
parse_primary
|
293
|
+
end
|
294
|
+
else
|
295
|
+
raise LiquidSyntaxError.new("expected 'reversed', 'offset' or 'limit'", token)
|
296
|
+
end
|
297
|
+
when :token_comma
|
298
|
+
@pos += 1
|
299
|
+
else
|
300
|
+
break
|
301
|
+
end
|
302
|
+
end
|
303
|
+
|
304
|
+
LoopExpression.new(identifier.token, identifier, enum,
|
305
|
+
limit: limit, offset: offset, reversed: reversed, cols: cols)
|
306
|
+
end
|
307
|
+
|
308
|
+
def parse_line_statements
|
309
|
+
token = previous
|
310
|
+
nodes = [] # : Array[Node]
|
311
|
+
|
312
|
+
loop do
|
313
|
+
case current_kind
|
314
|
+
when :token_tag_start
|
315
|
+
@pos += 1
|
316
|
+
nodes << parse_tag
|
317
|
+
when :token_whitespace_control, :token_tag_end
|
318
|
+
break
|
319
|
+
else
|
320
|
+
raise LiquidSyntaxError.new("unexpected #{current_kind}", current)
|
321
|
+
end
|
322
|
+
end
|
323
|
+
|
324
|
+
Block.new(token, nodes)
|
325
|
+
end
|
326
|
+
|
327
|
+
# Parse a _primary_ expression.
|
328
|
+
# A primary expression is a literal, a path (to a variable), or a logical
|
329
|
+
# expression composed of other primary expressions.
|
330
|
+
# @return [Node]
|
331
|
+
def parse_primary(precedence: Precedence::LOWEST, infix: true)
|
332
|
+
# Keywords followed by a dot or square bracket are parsed as paths.
|
333
|
+
looks_like_a_path = PATH_PUNCTUATION.include?(peek_kind)
|
334
|
+
|
335
|
+
# @type var kind: Symbol
|
336
|
+
kind = current_kind
|
337
|
+
|
338
|
+
left = case kind
|
339
|
+
when :token_true
|
340
|
+
self.next
|
341
|
+
looks_like_a_path ? parse_path : true
|
342
|
+
when :token_false
|
343
|
+
self.next
|
344
|
+
looks_like_a_path ? parse_path : false
|
345
|
+
when :token_nil
|
346
|
+
self.next
|
347
|
+
looks_like_a_path ? parse_path : nil
|
348
|
+
when :token_int
|
349
|
+
Liquid2.to_liquid_int(self.next[1])
|
350
|
+
when :token_float
|
351
|
+
Float(self.next[1])
|
352
|
+
when :token_blank
|
353
|
+
looks_like_a_path ? parse_path : Blank.new(self.next)
|
354
|
+
when :token_empty
|
355
|
+
looks_like_a_path ? parse_path : Empty.new(self.next)
|
356
|
+
when :token_single_quote_string, :token_double_quote_string
|
357
|
+
parse_string_literal
|
358
|
+
when :token_word, :token_lbracket
|
359
|
+
parse_path
|
360
|
+
when :token_lparen
|
361
|
+
parse_range_lambda_or_grouped_expression
|
362
|
+
when :token_not
|
363
|
+
parse_prefix_expression
|
364
|
+
else
|
365
|
+
unless looks_like_a_path && RESERVED_WORDS.include?(kind)
|
366
|
+
raise LiquidSyntaxError.new("unexpected #{current_kind}", current)
|
367
|
+
end
|
368
|
+
|
369
|
+
parse_path
|
370
|
+
end
|
371
|
+
|
372
|
+
return left unless infix
|
373
|
+
|
374
|
+
loop do
|
375
|
+
kind = current_kind
|
376
|
+
|
377
|
+
if kind == :token_unknown
|
378
|
+
raise LiquidSyntaxError.new("unexpected #{current[1]&.inspect || kind}",
|
379
|
+
current)
|
380
|
+
end
|
381
|
+
|
382
|
+
if kind == :token_eof ||
|
383
|
+
(PRECEDENCES[kind] || Precedence::LOWEST) < precedence ||
|
384
|
+
!BINARY_OPERATORS.member?(kind)
|
385
|
+
break
|
386
|
+
end
|
387
|
+
|
388
|
+
left = parse_infix_expression(left)
|
389
|
+
end
|
390
|
+
|
391
|
+
left
|
392
|
+
end
|
393
|
+
|
394
|
+
# Parse a string literal without interpolation..
|
395
|
+
# @return [String]
|
396
|
+
# @raises [LiquidTypeError].
|
397
|
+
def parse_string
|
398
|
+
node = parse_primary
|
399
|
+
raise LiquidTypeError, "expected a string" unless node.is_a?(String)
|
400
|
+
|
401
|
+
node
|
402
|
+
end
|
403
|
+
|
404
|
+
def parse_identifier(trailing_question: true)
|
405
|
+
token = eat(:token_word)
|
406
|
+
|
407
|
+
if PATH_PUNCTUATION.include?(current_kind)
|
408
|
+
raise LiquidSyntaxError.new("expected an identifier, found a path", current)
|
409
|
+
end
|
410
|
+
|
411
|
+
Identifier.new(token)
|
412
|
+
end
|
413
|
+
|
414
|
+
# Parse comma separated expression.
|
415
|
+
# Leading commas should be consumed by the caller.
|
416
|
+
# @return [Array<Expression>]
|
417
|
+
def parse_positional_arguments
|
418
|
+
args = [] # : Array[untyped]
|
419
|
+
|
420
|
+
loop do
|
421
|
+
args << parse_primary
|
422
|
+
break unless current_kind == :token_comma
|
423
|
+
|
424
|
+
@pos += 1
|
425
|
+
end
|
426
|
+
|
427
|
+
args
|
428
|
+
end
|
429
|
+
|
430
|
+
# Parse comma name/value pairs.
|
431
|
+
# Leading commas should be consumed by the caller, if allowed.
|
432
|
+
# @return [Array<KeywordArgument>]
|
433
|
+
def parse_keyword_arguments
|
434
|
+
args = [] # : Array[KeywordArgument]
|
435
|
+
|
436
|
+
loop do
|
437
|
+
break if TERMINATE_EXPRESSION.member?(current_kind)
|
438
|
+
|
439
|
+
word = eat(:token_word)
|
440
|
+
eat_one_of(:token_assign, :token_colon)
|
441
|
+
val = parse_primary
|
442
|
+
args << KeywordArgument.new(word, word[1] || raise, val)
|
443
|
+
|
444
|
+
break unless current_kind == :token_comma
|
445
|
+
|
446
|
+
@pos += 1
|
447
|
+
end
|
448
|
+
|
449
|
+
args
|
450
|
+
end
|
451
|
+
|
452
|
+
protected
|
453
|
+
|
454
|
+
class Precedence
|
455
|
+
LOWEST = 1
|
456
|
+
LOGICAL_RIGHT = 2
|
457
|
+
LOGICAL_OR = 3
|
458
|
+
LOGICAL_AND = 4
|
459
|
+
RELATIONAL = 5
|
460
|
+
MEMBERSHIP = 6
|
461
|
+
PREFIX = 7
|
462
|
+
end
|
463
|
+
|
464
|
+
PRECEDENCES = {
|
465
|
+
token_and: Precedence::LOGICAL_AND,
|
466
|
+
token_or: Precedence::LOGICAL_OR,
|
467
|
+
token_not: Precedence::PREFIX,
|
468
|
+
token_rparen: Precedence::LOWEST,
|
469
|
+
token_contains: Precedence::MEMBERSHIP,
|
470
|
+
token_in: Precedence::MEMBERSHIP,
|
471
|
+
token_eq: Precedence::RELATIONAL,
|
472
|
+
token_lt: Precedence::RELATIONAL,
|
473
|
+
token_gt: Precedence::RELATIONAL,
|
474
|
+
token_ne: Precedence::RELATIONAL,
|
475
|
+
token_lg: Precedence::RELATIONAL,
|
476
|
+
token_le: Precedence::RELATIONAL,
|
477
|
+
token_ge: Precedence::RELATIONAL
|
478
|
+
}.freeze
|
479
|
+
|
480
|
+
BINARY_OPERATORS = Set[
|
481
|
+
:token_eq,
|
482
|
+
:token_lt,
|
483
|
+
:token_gt,
|
484
|
+
:token_lg,
|
485
|
+
:token_ne,
|
486
|
+
:token_le,
|
487
|
+
:token_ge,
|
488
|
+
:token_contains,
|
489
|
+
:token_in,
|
490
|
+
:token_and,
|
491
|
+
:token_or
|
492
|
+
]
|
493
|
+
|
494
|
+
TERMINATE_EXPRESSION = Set[
|
495
|
+
:token_whitespace_control,
|
496
|
+
:token_output_end,
|
497
|
+
:token_tag_end,
|
498
|
+
:token_other
|
499
|
+
]
|
500
|
+
|
501
|
+
TERMINATE_FILTER = Set[
|
502
|
+
:token_whitespace_control,
|
503
|
+
:token_output_end,
|
504
|
+
:token_tag_end,
|
505
|
+
:token_pipe,
|
506
|
+
:token_double_pipe,
|
507
|
+
:token_if,
|
508
|
+
:token_else,
|
509
|
+
:token_other,
|
510
|
+
:token_eof,
|
511
|
+
:token_line_term,
|
512
|
+
:token_string_interpol_end
|
513
|
+
]
|
514
|
+
|
515
|
+
TERMINATE_GROUPED_EXPRESSION = Set[
|
516
|
+
:token_eof,
|
517
|
+
:token_other,
|
518
|
+
:token_rparen
|
519
|
+
]
|
520
|
+
|
521
|
+
TERMINATE_LAMBDA_PARAM = Set[
|
522
|
+
:token_rparen,
|
523
|
+
:token_word,
|
524
|
+
:token_comma,
|
525
|
+
:token_arrow
|
526
|
+
]
|
527
|
+
|
528
|
+
KEYWORD_ARGUMENT_DELIMITERS = Set[
|
529
|
+
:token_assign,
|
530
|
+
:token_colon
|
531
|
+
]
|
532
|
+
|
533
|
+
PRIMITIVE_TOKENS = Set[
|
534
|
+
:token_true,
|
535
|
+
:token_false,
|
536
|
+
:token_nil,
|
537
|
+
:token_int,
|
538
|
+
:token_float,
|
539
|
+
:token_single_quote,
|
540
|
+
:token_double_quote,
|
541
|
+
:token_word,
|
542
|
+
:token_lparen
|
543
|
+
]
|
544
|
+
|
545
|
+
RESERVED_WORDS = Set[
|
546
|
+
:token_true,
|
547
|
+
:token_false,
|
548
|
+
:token_nil,
|
549
|
+
:token_and,
|
550
|
+
:token_or,
|
551
|
+
:token_not,
|
552
|
+
:token_in,
|
553
|
+
:token_contains,
|
554
|
+
:token_if,
|
555
|
+
:token_else,
|
556
|
+
:token_with,
|
557
|
+
:token_required,
|
558
|
+
:token_as,
|
559
|
+
:token_for,
|
560
|
+
:token_blank,
|
561
|
+
:token_empty
|
562
|
+
]
|
563
|
+
|
564
|
+
LOOP_KEYWORDS = Set[
|
565
|
+
"limit",
|
566
|
+
"reversed",
|
567
|
+
"cols",
|
568
|
+
"offset"
|
569
|
+
]
|
570
|
+
|
571
|
+
WC_TOKENS = Set[
|
572
|
+
:token_output_start,
|
573
|
+
:token_comment_start,
|
574
|
+
:token_tag_tags
|
575
|
+
]
|
576
|
+
|
577
|
+
PATH_PUNCTUATION = Set[
|
578
|
+
:token_dot,
|
579
|
+
:token_lbracket
|
580
|
+
]
|
581
|
+
|
582
|
+
# @return [Output]
|
583
|
+
def parse_output
|
584
|
+
expr = parse_filtered_expression
|
585
|
+
carry_whitespace_control
|
586
|
+
eat(:token_output_end)
|
587
|
+
Output.new(expr.token, expr)
|
588
|
+
end
|
589
|
+
|
590
|
+
# @return [Node]
|
591
|
+
def parse_tag
|
592
|
+
token = eat(:token_tag_name, "missing tag name")
|
593
|
+
|
594
|
+
if (tag = @env.tags[token[1] || raise])
|
595
|
+
tag.parse(token, self)
|
596
|
+
else
|
597
|
+
raise LiquidSyntaxError.new("unexpected tag #{token[1].inspect}", token)
|
598
|
+
end
|
599
|
+
end
|
600
|
+
|
601
|
+
# @return [Node]
|
602
|
+
def parse_comment
|
603
|
+
skip_whitespace_control
|
604
|
+
token = eat(:token_comment)
|
605
|
+
carry_whitespace_control
|
606
|
+
eat(:token_comment_end)
|
607
|
+
Comment.new(token, token[1] || raise)
|
608
|
+
end
|
609
|
+
|
610
|
+
# @return [Node]
|
611
|
+
def parse_path
|
612
|
+
token = current
|
613
|
+
segments = [] # : Array[String | Integer | Path]
|
614
|
+
|
615
|
+
segments << (self.next[1] || raise) unless current_kind == :token_lbracket
|
616
|
+
|
617
|
+
loop do
|
618
|
+
case self.next.first
|
619
|
+
when :token_lbracket
|
620
|
+
segments << parse_bracketed_path_selector
|
621
|
+
when :token_dot
|
622
|
+
segments << parse_shorthand_path_selector
|
623
|
+
else
|
624
|
+
@pos -= 1
|
625
|
+
return Path.new(token, segments)
|
626
|
+
end
|
627
|
+
end
|
628
|
+
end
|
629
|
+
|
630
|
+
# @return [Node]
|
631
|
+
def parse_bracketed_path_selector
|
632
|
+
kind, value = self.next
|
633
|
+
|
634
|
+
segment = case kind
|
635
|
+
when :token_int
|
636
|
+
value.to_i
|
637
|
+
when :token_word
|
638
|
+
@pos -= 1
|
639
|
+
parse_path
|
640
|
+
when :token_double_quote_string, :token_single_quote_string
|
641
|
+
value || raise
|
642
|
+
when :token_rbracket
|
643
|
+
raise LiquidSyntaxError.new(
|
644
|
+
"empty bracketed segment", previous
|
645
|
+
)
|
646
|
+
else
|
647
|
+
raise LiquidSyntaxError.new(
|
648
|
+
"unexpected #{kind}", previous
|
649
|
+
)
|
650
|
+
end
|
651
|
+
|
652
|
+
eat(:token_rbracket)
|
653
|
+
segment
|
654
|
+
end
|
655
|
+
|
656
|
+
# @return [Node]
|
657
|
+
def parse_shorthand_path_selector
|
658
|
+
kind, value = self.next
|
659
|
+
case kind
|
660
|
+
when :token_int
|
661
|
+
unless @env.shorthand_indexes
|
662
|
+
raise LiquidSyntaxError.new("indexes must be surrounded by square brackets",
|
663
|
+
previous)
|
664
|
+
end
|
665
|
+
|
666
|
+
value.to_i
|
667
|
+
when :token_word
|
668
|
+
value || raise
|
669
|
+
else
|
670
|
+
unless RESERVED_WORDS.member?(kind)
|
671
|
+
raise LiquidSyntaxError.new("unexpected #{kind}", previous)
|
672
|
+
end
|
673
|
+
|
674
|
+
value || raise
|
675
|
+
end
|
676
|
+
end
|
677
|
+
|
678
|
+
# Parse a comma separated list of expressions. Assumes the next token is a comma.
|
679
|
+
# @param left [Expression] The first item in the array.
|
680
|
+
# @return [ArrayLiteral]
|
681
|
+
def parse_array_literal(left)
|
682
|
+
token = current
|
683
|
+
items = [left] # : Array[untyped]
|
684
|
+
|
685
|
+
loop do
|
686
|
+
break unless current_kind == :token_comma
|
687
|
+
|
688
|
+
@pos += 1
|
689
|
+
|
690
|
+
break if TERMINATE_FILTER.member?(current_kind)
|
691
|
+
|
692
|
+
items << parse_primary
|
693
|
+
end
|
694
|
+
|
695
|
+
ArrayLiteral.new(left.respond_to?(:token) ? left.token : token, items)
|
696
|
+
end
|
697
|
+
|
698
|
+
# @return [Node]
|
699
|
+
def parse_range_lambda_or_grouped_expression
|
700
|
+
token = eat(:token_lparen)
|
701
|
+
expr = parse_primary
|
702
|
+
|
703
|
+
if current_kind == :token_double_dot
|
704
|
+
@pos += 1
|
705
|
+
stop = parse_primary
|
706
|
+
eat(:token_rparen)
|
707
|
+
return RangeExpression.new(token, expr, stop)
|
708
|
+
end
|
709
|
+
|
710
|
+
kind = current_kind
|
711
|
+
|
712
|
+
# An arrow function, but we've already consumed lparen and the first parameter.
|
713
|
+
return parse_partial_arrow_function(expr) if kind == :token_comma
|
714
|
+
|
715
|
+
# An arrow function with a single parameter surrounded by parens.
|
716
|
+
if kind == :token_rparen && peek_kind == :token_arrow
|
717
|
+
return parse_partial_arrow_function(expr)
|
718
|
+
end
|
719
|
+
|
720
|
+
unless TERMINATE_GROUPED_EXPRESSION.member?(kind)
|
721
|
+
unless BINARY_OPERATORS.member?(kind)
|
722
|
+
raise LiquidSyntaxError.new("expected an infix operator, found #{kind}", current)
|
723
|
+
end
|
724
|
+
|
725
|
+
expr = parse_infix_expression(expr)
|
726
|
+
end
|
727
|
+
|
728
|
+
eat(:token_rparen)
|
729
|
+
GroupedExpression.new(token, expr)
|
730
|
+
end
|
731
|
+
|
732
|
+
# @return [Node]
|
733
|
+
def parse_prefix_expression
|
734
|
+
token = eat(:token_not)
|
735
|
+
expr = parse_primary
|
736
|
+
LogicalNot.new(token, expr)
|
737
|
+
end
|
738
|
+
|
739
|
+
# @param left [Expression]
|
740
|
+
# @return [Node]
|
741
|
+
def parse_infix_expression(left)
|
742
|
+
op_token = self.next
|
743
|
+
precedence = PRECEDENCES[op_token.first] || Precedence::LOWEST
|
744
|
+
right = parse_primary(precedence: precedence)
|
745
|
+
|
746
|
+
case op_token.first
|
747
|
+
when :token_eq
|
748
|
+
Eq.new(op_token, left, right)
|
749
|
+
when :token_lt
|
750
|
+
Lt.new(op_token, left, right)
|
751
|
+
when :token_gt
|
752
|
+
Gt.new(op_token, left, right)
|
753
|
+
when :token_ne, :token_lg
|
754
|
+
Ne.new(op_token, left, right)
|
755
|
+
when :token_le
|
756
|
+
Le.new(op_token, left, right)
|
757
|
+
when :token_ge
|
758
|
+
Ge.new(op_token, left, right)
|
759
|
+
when :token_contains
|
760
|
+
Contains.new(op_token, left, right)
|
761
|
+
when :token_in
|
762
|
+
In.new(op_token, left, right)
|
763
|
+
when :token_and
|
764
|
+
LogicalAnd.new(op_token, left, right)
|
765
|
+
when :token_or
|
766
|
+
LogicalOr.new(op_token, left, right)
|
767
|
+
else
|
768
|
+
raise LiquidSyntaxError.new("unexpected infix operator, #{op_token[1]}", op_token)
|
769
|
+
end
|
770
|
+
end
|
771
|
+
|
772
|
+
# @return [Array<Filter>]
|
773
|
+
def parse_filters
|
774
|
+
filters = [parse_filter] # first filter could start with a double pipe
|
775
|
+
filters << parse_filter while current_kind == :token_pipe
|
776
|
+
filters
|
777
|
+
end
|
778
|
+
|
779
|
+
# @return [Filter]
|
780
|
+
def parse_filter
|
781
|
+
@pos += 1 # pipe or double pipe
|
782
|
+
name = eat(:token_word)
|
783
|
+
|
784
|
+
unless current_kind == :token_colon || !TERMINATE_FILTER.member?(current_kind)
|
785
|
+
# No arguments
|
786
|
+
return Filter.new(name, name[1] || raise, []) # TODO: optimize
|
787
|
+
end
|
788
|
+
|
789
|
+
@pos += 1 # token_colon
|
790
|
+
args = [] # : Array[untyped]
|
791
|
+
|
792
|
+
loop do
|
793
|
+
token = current
|
794
|
+
case token.first
|
795
|
+
when :token_word
|
796
|
+
if KEYWORD_ARGUMENT_DELIMITERS.member?(peek_kind)
|
797
|
+
# A keyword argument
|
798
|
+
word = self.next
|
799
|
+
@pos += 1 # sep
|
800
|
+
val = parse_primary
|
801
|
+
args << KeywordArgument.new(word, word[1] || raise, val)
|
802
|
+
elsif peek_kind == :token_arrow
|
803
|
+
# A positional argument that is an arrow function with a single parameter.
|
804
|
+
args << parse_arrow_function
|
805
|
+
else
|
806
|
+
# A positional argument that is a path.
|
807
|
+
args << parse_path
|
808
|
+
end
|
809
|
+
when :token_lparen
|
810
|
+
# A grouped expression or range or arrow function
|
811
|
+
args << parse_primary
|
812
|
+
else
|
813
|
+
break if TERMINATE_FILTER.member?(current_kind)
|
814
|
+
|
815
|
+
args << parse_primary
|
816
|
+
end
|
817
|
+
|
818
|
+
break if TERMINATE_FILTER.member?(current_kind)
|
819
|
+
|
820
|
+
eat(:token_comma)
|
821
|
+
end
|
822
|
+
|
823
|
+
Filter.new(name, name[1] || raise, args)
|
824
|
+
end
|
825
|
+
|
826
|
+
# @return [Node]
|
827
|
+
def parse_arrow_function
|
828
|
+
token = current
|
829
|
+
params = [] # : Array[Identifier]
|
830
|
+
|
831
|
+
case token.first
|
832
|
+
when :token_word
|
833
|
+
# A single parameter without parens
|
834
|
+
params << parse_identifier
|
835
|
+
when :token_lparen
|
836
|
+
# One or move parameters separated by commas and surrounded by parentheses.
|
837
|
+
self.next
|
838
|
+
while current_kind != :token_rparen
|
839
|
+
params << parse_identifier
|
840
|
+
|
841
|
+
self.next if current_kind == :token_comma
|
842
|
+
end
|
843
|
+
|
844
|
+
eat(:token_rparen)
|
845
|
+
end
|
846
|
+
|
847
|
+
eat(:token_arrow)
|
848
|
+
Lambda.new(token, params, parse_primary)
|
849
|
+
end
|
850
|
+
|
851
|
+
# @param children [Array<Token | Node>] Child tokens already consumed by the caller.
|
852
|
+
# @param expr [Expression] The first parameter already passed by the caller.
|
853
|
+
# @return [Expression]
|
854
|
+
def parse_partial_arrow_function(expr)
|
855
|
+
token = previous
|
856
|
+
params = [] # : Array[Identifier]
|
857
|
+
|
858
|
+
# expr should be a single segment path, we need an Identifier.
|
859
|
+
params << Identifier.from(expr)
|
860
|
+
self.next if current_kind == :token_comma
|
861
|
+
|
862
|
+
while current_kind != :token_rparen
|
863
|
+
params << parse_identifier
|
864
|
+
self.next if current_kind == :token_comma
|
865
|
+
end
|
866
|
+
|
867
|
+
eat(:token_rparen)
|
868
|
+
eat(:token_arrow)
|
869
|
+
Lambda.new(token, params, parse_primary)
|
870
|
+
end
|
871
|
+
|
872
|
+
# @param left [Expression]
|
873
|
+
# @return [Node]
|
874
|
+
def parse_ternary_expression(left)
|
875
|
+
eat(:token_if)
|
876
|
+
condition = BooleanExpression.new(current, parse_primary)
|
877
|
+
alternative = nil # : Expression?
|
878
|
+
filters = [] # : Array[Filter]
|
879
|
+
tail_filters = [] # : Array[Filter]
|
880
|
+
|
881
|
+
if current_kind == :token_else
|
882
|
+
@pos += 1
|
883
|
+
alternative = parse_primary
|
884
|
+
filters = parse_filters if current_kind == :token_pipe
|
885
|
+
end
|
886
|
+
|
887
|
+
tail_filters = parse_filters if current_kind == :token_double_pipe
|
888
|
+
|
889
|
+
TernaryExpression.new(left.token, left, condition, alternative, filters, tail_filters)
|
890
|
+
end
|
891
|
+
|
892
|
+
def parse_string_literal
|
893
|
+
token = self.next # double or single quote string
|
894
|
+
return token[1] || raise unless current_kind == :token_string_interpol_start
|
895
|
+
|
896
|
+
segments = [] # : Array[untyped]
|
897
|
+
segments << token[1] unless (token[1] || raise).empty?
|
898
|
+
|
899
|
+
# TODO: Does this mean consecutive literal strings are implicitly combined into one?
|
900
|
+
# If there is at least one :token_string_interpol_start following the first string.
|
901
|
+
loop do
|
902
|
+
case current_kind
|
903
|
+
when :token_string_interpol_start
|
904
|
+
@pos += 1
|
905
|
+
segments << parse_filtered_expression
|
906
|
+
eat(:token_string_interpol_end)
|
907
|
+
when :token_double_quote_string, :token_single_quote_string
|
908
|
+
segments << self.next[1]
|
909
|
+
else
|
910
|
+
break
|
911
|
+
end
|
912
|
+
end
|
913
|
+
|
914
|
+
TemplateString.new(token, segments)
|
915
|
+
end
|
916
|
+
end
|
917
|
+
end
|