@valbuild/core 0.12.0 → 0.13.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (71) hide show
  1. package/jest.config.js +4 -0
  2. package/package.json +1 -1
  3. package/src/Json.ts +4 -0
  4. package/src/expr/README.md +193 -0
  5. package/src/expr/eval.test.ts +202 -0
  6. package/src/expr/eval.ts +248 -0
  7. package/src/expr/expr.ts +91 -0
  8. package/src/expr/index.ts +3 -0
  9. package/src/expr/parser.test.ts +158 -0
  10. package/src/expr/parser.ts +229 -0
  11. package/src/expr/repl.ts +93 -0
  12. package/src/expr/tokenizer.test.ts +539 -0
  13. package/src/expr/tokenizer.ts +117 -0
  14. package/src/fetchVal.test.ts +164 -0
  15. package/src/fetchVal.ts +211 -0
  16. package/src/fp/array.ts +30 -0
  17. package/src/fp/index.ts +3 -0
  18. package/src/fp/result.ts +214 -0
  19. package/src/fp/util.ts +52 -0
  20. package/src/index.ts +55 -0
  21. package/src/initSchema.ts +45 -0
  22. package/src/initVal.ts +96 -0
  23. package/src/module.test.ts +170 -0
  24. package/src/module.ts +333 -0
  25. package/src/patch/deref.test.ts +300 -0
  26. package/src/patch/deref.ts +128 -0
  27. package/src/patch/index.ts +11 -0
  28. package/src/patch/json.test.ts +583 -0
  29. package/src/patch/json.ts +304 -0
  30. package/src/patch/operation.ts +74 -0
  31. package/src/patch/ops.ts +83 -0
  32. package/src/patch/parse.test.ts +202 -0
  33. package/src/patch/parse.ts +187 -0
  34. package/src/patch/patch.ts +46 -0
  35. package/src/patch/util.ts +67 -0
  36. package/src/schema/array.ts +52 -0
  37. package/src/schema/boolean.ts +38 -0
  38. package/src/schema/i18n.ts +65 -0
  39. package/src/schema/image.ts +70 -0
  40. package/src/schema/index.ts +46 -0
  41. package/src/schema/literal.ts +42 -0
  42. package/src/schema/number.ts +45 -0
  43. package/src/schema/object.ts +67 -0
  44. package/src/schema/oneOf.ts +60 -0
  45. package/src/schema/richtext.ts +417 -0
  46. package/src/schema/string.ts +49 -0
  47. package/src/schema/union.ts +62 -0
  48. package/src/selector/ExprProxy.test.ts +203 -0
  49. package/src/selector/ExprProxy.ts +209 -0
  50. package/src/selector/SelectorProxy.test.ts +172 -0
  51. package/src/selector/SelectorProxy.ts +237 -0
  52. package/src/selector/array.ts +37 -0
  53. package/src/selector/boolean.ts +4 -0
  54. package/src/selector/file.ts +14 -0
  55. package/src/selector/i18n.ts +13 -0
  56. package/src/selector/index.ts +159 -0
  57. package/src/selector/number.ts +4 -0
  58. package/src/selector/object.ts +22 -0
  59. package/src/selector/primitive.ts +17 -0
  60. package/src/selector/remote.ts +9 -0
  61. package/src/selector/selector.test.ts +453 -0
  62. package/src/selector/selectorOf.ts +7 -0
  63. package/src/selector/string.ts +4 -0
  64. package/src/source/file.ts +45 -0
  65. package/src/source/i18n.ts +60 -0
  66. package/src/source/index.ts +50 -0
  67. package/src/source/remote.ts +54 -0
  68. package/src/val/array.ts +10 -0
  69. package/src/val/index.ts +90 -0
  70. package/src/val/object.ts +13 -0
  71. package/src/val/primitive.ts +8 -0
@@ -0,0 +1,539 @@
1
+ import { type Token, tokenize } from "./tokenizer";
2
+
3
+ const TokenizerTestCases: {
4
+ input: string;
5
+ expected: Token[];
6
+ endCursor?: number;
7
+ }[] = [
8
+ {
9
+ input: "!(",
10
+ expected: [
11
+ {
12
+ type: "!(",
13
+ span: [0, 1],
14
+ },
15
+ ],
16
+ },
17
+ {
18
+ input: "')'",
19
+ expected: [
20
+ {
21
+ type: "'",
22
+ span: [0, 0],
23
+ },
24
+ {
25
+ type: "string",
26
+ span: [1, 1],
27
+ value: ")",
28
+ },
29
+ {
30
+ type: "'",
31
+ span: [2, 2],
32
+ },
33
+ ],
34
+ },
35
+ {
36
+ input: "!(",
37
+ expected: [
38
+ {
39
+ type: "!(",
40
+ span: [0, 1],
41
+ },
42
+ ],
43
+ },
44
+ {
45
+ input: "(",
46
+ expected: [
47
+ {
48
+ type: "(",
49
+ span: [0, 0],
50
+ },
51
+ ],
52
+ },
53
+ {
54
+ input: "(b ab)", // single character tokens
55
+ expected: [
56
+ { type: "(", span: [0, 0] },
57
+ { type: "token", span: [1, 1], value: "b" },
58
+ { type: "ws", span: [2, 2] },
59
+ { type: "token", span: [3, 4], value: "ab" },
60
+ { type: ")", span: [5, 5] },
61
+ ],
62
+ },
63
+ {
64
+ input: "(map fn value)",
65
+ expected: [
66
+ { type: "(", span: [0, 0] },
67
+ { type: "token", span: [1, 3], value: "map" },
68
+ { type: "ws", span: [4, 4] },
69
+ { type: "token", span: [5, 6], value: "fn" },
70
+ { type: "ws", span: [7, 7] },
71
+ { type: "token", span: [8, 12], value: "value" },
72
+ { type: ")", span: [13, 13] },
73
+ ],
74
+ },
75
+ {
76
+ input: "(map (map foo bar) value)",
77
+ expected: [
78
+ { type: "(", span: [0, 0] },
79
+ { type: "token", span: [1, 3], value: "map" },
80
+ { type: "ws", span: [4, 4] },
81
+ { type: "(", span: [5, 5] },
82
+ { type: "token", span: [6, 8], value: "map" },
83
+ { type: "ws", span: [9, 9] },
84
+ { type: "token", span: [10, 12], value: "foo" },
85
+ { type: "ws", span: [13, 13] },
86
+ { type: "token", span: [14, 16], value: "bar" },
87
+ { type: ")", span: [17, 17] },
88
+ { type: "ws", span: [18, 18] },
89
+ { type: "token", span: [19, 23], value: "value" },
90
+ { type: ")", span: [24, 24] },
91
+ ],
92
+ },
93
+ {
94
+ input: "'foo'",
95
+ expected: [
96
+ {
97
+ type: "'",
98
+ span: [0, 0],
99
+ },
100
+ {
101
+ type: "string",
102
+ span: [1, 3],
103
+ value: "foo",
104
+ },
105
+ {
106
+ type: "'",
107
+ span: [4, 4],
108
+ },
109
+ ],
110
+ },
111
+ // this is the JS equivalent: 'f\'oo'
112
+ {
113
+ input: "'f\\'oo'",
114
+ expected: [
115
+ {
116
+ type: "'",
117
+ span: [0, 0],
118
+ },
119
+ {
120
+ type: "string",
121
+ span: [1, 5],
122
+ value: "f'oo",
123
+ unescapedValue: "f\\'oo",
124
+ },
125
+ {
126
+ type: "'",
127
+ span: [6, 6],
128
+ },
129
+ ],
130
+ },
131
+ // lenient tokenization (errors must handled by the parser)
132
+ // this is the JS equivalent: 'f\\'oofail'
133
+ {
134
+ input: "'f\\\\'oo'fail",
135
+ expected: [
136
+ { type: "'", span: [0, 0] },
137
+ { type: "string", span: [1, 3], value: "f\\", unescapedValue: "f\\\\" },
138
+ { type: "'", span: [4, 4] },
139
+ { type: "token", span: [5, 6], value: "oo" },
140
+ { type: "'", span: [7, 7] },
141
+ { type: "string", span: [8, 11], value: "fail" },
142
+ ],
143
+ endCursor: 12,
144
+ },
145
+ {
146
+ input: "(fo() (fail zoo))",
147
+ expected: [
148
+ { type: "(", span: [0, 0] },
149
+ { type: "token", span: [1, 3], value: "fo(" }, // tokenizer tries it best, even though ( is not allowed in tokens
150
+ { type: ")", span: [4, 4] },
151
+ { type: "ws", span: [5, 5] },
152
+ { type: "(", span: [6, 6] },
153
+ { type: "token", span: [7, 10], value: "fail" },
154
+ { type: "ws", span: [11, 11] },
155
+ { type: "token", span: [12, 14], value: "zoo" },
156
+ { type: ")", span: [15, 15] },
157
+ { type: ")", span: [16, 16] },
158
+ ],
159
+ },
160
+ // this is the JS equivalent: 'f\\\'oo'
161
+ {
162
+ input: "'f\\\\\\'oo'",
163
+ expected: [
164
+ {
165
+ type: "'",
166
+ span: [0, 0],
167
+ },
168
+ {
169
+ type: "string",
170
+ span: [1, 7],
171
+ value: "f\\'oo",
172
+ unescapedValue: "f\\\\\\'oo",
173
+ },
174
+ {
175
+ type: "'",
176
+ span: [8, 8],
177
+ },
178
+ ],
179
+ },
180
+ {
181
+ input: "(foo (bar zoo))",
182
+ expected: [
183
+ {
184
+ type: "(",
185
+ span: [0, 0],
186
+ },
187
+ {
188
+ type: "token",
189
+ span: [1, 3],
190
+ value: "foo",
191
+ },
192
+ {
193
+ type: "ws",
194
+ span: [4, 4],
195
+ },
196
+ {
197
+ type: "(",
198
+ span: [5, 5],
199
+ },
200
+ {
201
+ type: "token",
202
+ span: [6, 8],
203
+ value: "bar",
204
+ },
205
+ {
206
+ type: "ws",
207
+ span: [9, 9],
208
+ },
209
+ {
210
+ type: "token",
211
+ span: [10, 12],
212
+ value: "zoo",
213
+ },
214
+ {
215
+ type: ")",
216
+ span: [13, 13],
217
+ },
218
+ {
219
+ type: ")",
220
+ span: [14, 14],
221
+ },
222
+ ],
223
+ },
224
+ {
225
+ input: " (foo (bar 'he\\'pp' zoo)) ",
226
+ expected: [
227
+ {
228
+ type: "ws",
229
+ span: [0, 1],
230
+ },
231
+ {
232
+ type: "(",
233
+ span: [2, 2],
234
+ },
235
+ {
236
+ type: "token",
237
+ span: [3, 5],
238
+ value: "foo",
239
+ },
240
+ {
241
+ type: "ws",
242
+ span: [6, 6],
243
+ },
244
+ {
245
+ type: "(",
246
+ span: [7, 7],
247
+ },
248
+ {
249
+ type: "token",
250
+ span: [8, 10],
251
+ value: "bar",
252
+ },
253
+ {
254
+ type: "ws",
255
+ span: [11, 11],
256
+ },
257
+ {
258
+ type: "'",
259
+ span: [12, 12],
260
+ },
261
+ {
262
+ type: "string",
263
+ span: [13, 18],
264
+ value: "he'pp",
265
+ unescapedValue: "he\\'pp",
266
+ },
267
+ {
268
+ type: "'",
269
+ span: [19, 19],
270
+ },
271
+ {
272
+ type: "ws",
273
+ span: [20, 22],
274
+ },
275
+ {
276
+ type: "token",
277
+ span: [23, 25],
278
+ value: "zoo",
279
+ },
280
+ {
281
+ type: ")",
282
+ span: [26, 26],
283
+ },
284
+ {
285
+ type: ")",
286
+ span: [27, 27],
287
+ },
288
+ {
289
+ type: "ws",
290
+ span: [28, 29],
291
+ },
292
+ ],
293
+ },
294
+ {
295
+ input: "''",
296
+ expected: [
297
+ {
298
+ type: "'",
299
+ span: [0, 0],
300
+ },
301
+ {
302
+ type: "'",
303
+ span: [1, 1],
304
+ },
305
+ ],
306
+ },
307
+ //interpolation:
308
+ {
309
+ input: "'${'str'}'",
310
+ expected: [
311
+ {
312
+ type: "'",
313
+ span: [0, 0],
314
+ },
315
+ {
316
+ type: "${",
317
+ span: [1, 2],
318
+ },
319
+ {
320
+ type: "'",
321
+ span: [3, 3],
322
+ },
323
+ {
324
+ type: "string",
325
+ span: [4, 6],
326
+ value: "str",
327
+ },
328
+ {
329
+ type: "'",
330
+ span: [7, 7],
331
+ },
332
+ {
333
+ type: "}",
334
+ span: [8, 8],
335
+ },
336
+ {
337
+ type: "'",
338
+ span: [9, 9],
339
+ },
340
+ ],
341
+ },
342
+ {
343
+ input: "'${'${(foo bar)}'}'",
344
+ expected: [
345
+ { type: "'", span: [0, 0] },
346
+ { type: "${", span: [1, 2] },
347
+ { type: "'", span: [3, 3] },
348
+ { type: "${", span: [4, 5] },
349
+ { type: "(", span: [6, 6] },
350
+ { type: "token", span: [7, 9], value: "foo" },
351
+ { type: "ws", span: [10, 10] },
352
+ { type: "token", span: [11, 13], value: "bar" },
353
+ { type: ")", span: [14, 14] },
354
+ { type: "}", span: [15, 15] },
355
+ { type: "'", span: [16, 16] },
356
+ { type: "}", span: [17, 17] },
357
+ { type: "'", span: [18, 18] },
358
+ ],
359
+ },
360
+ {
361
+ input: "'foo${(bar baz)}'",
362
+ expected: [
363
+ { type: "'", span: [0, 0] },
364
+ { type: "string", span: [1, 3], value: "foo" },
365
+ { type: "${", span: [4, 5] },
366
+ { type: "(", span: [6, 6] },
367
+ { type: "token", span: [7, 9], value: "bar" },
368
+ { type: "ws", span: [10, 10] },
369
+ { type: "token", span: [11, 13], value: "baz" },
370
+ { type: ")", span: [14, 14] },
371
+ { type: "}", span: [15, 15] },
372
+ { type: "'", span: [16, 16] },
373
+ ],
374
+ },
375
+ {
376
+ input: "'${'foo ${(foo bar)}'}'",
377
+ expected: [
378
+ { type: "'", span: [0, 0] },
379
+ { type: "${", span: [1, 2] },
380
+ { type: "'", span: [3, 3] },
381
+ { type: "string", span: [4, 7], value: "foo " },
382
+ { type: "${", span: [8, 9] },
383
+ { type: "(", span: [10, 10] },
384
+ { type: "token", span: [11, 13], value: "foo" },
385
+ { type: "ws", span: [14, 14] },
386
+ { type: "token", span: [15, 17], value: "bar" },
387
+ { type: ")", span: [18, 18] },
388
+ { type: "}", span: [19, 19] },
389
+ { type: "'", span: [20, 20] },
390
+ { type: "}", span: [21, 21] },
391
+ { type: "'", span: [22, 22] },
392
+ ],
393
+ },
394
+ {
395
+ input: "'${'${'${(foo bar)}'}'}'",
396
+ expected: [
397
+ { type: "'", span: [0, 0] },
398
+ { type: "${", span: [1, 2] },
399
+ { type: "'", span: [3, 3] },
400
+ { type: "${", span: [4, 5] },
401
+ { type: "'", span: [6, 6] },
402
+ { type: "${", span: [7, 8] },
403
+ { type: "(", span: [9, 9] },
404
+ { type: "token", span: [10, 12], value: "foo" },
405
+ { type: "ws", span: [13, 13] },
406
+ { type: "token", span: [14, 16], value: "bar" },
407
+ { type: ")", span: [17, 17] },
408
+ { type: "}", span: [18, 18] },
409
+ { type: "'", span: [19, 19] },
410
+ { type: "}", span: [20, 20] },
411
+ { type: "'", span: [21, 21] },
412
+ { type: "}", span: [22, 22] },
413
+ { type: "'", span: [23, 23] },
414
+ ],
415
+ },
416
+ {
417
+ input: `(json '{"foo": "bar"}')`,
418
+ expected: [
419
+ { type: "(", span: [0, 0] },
420
+ { type: "token", span: [1, 4], value: "json" },
421
+ { type: "ws", span: [5, 5] },
422
+ { type: "'", span: [6, 6] },
423
+ { type: "string", span: [7, 20], value: '{"foo": "bar"}' },
424
+ { type: "'", span: [21, 21] },
425
+ { type: ")", span: [22, 22] },
426
+ ],
427
+ },
428
+ {
429
+ input: `(json '{"foo": \${(foo bar)}}')`,
430
+ expected: [
431
+ { type: "(", span: [0, 0] },
432
+ { type: "token", span: [1, 4], value: "json" },
433
+ { type: "ws", span: [5, 5] },
434
+ { type: "'", span: [6, 6] },
435
+ { type: "string", span: [7, 14], value: '{"foo": ' },
436
+ { type: "${", span: [15, 16] },
437
+ { type: "(", span: [17, 17] },
438
+ { type: "token", span: [18, 20], value: "foo" },
439
+ { type: "ws", span: [21, 21] },
440
+ { type: "token", span: [22, 24], value: "bar" },
441
+ { type: ")", span: [25, 25] },
442
+ { type: "}", span: [26, 26] },
443
+ { type: "string", span: [27, 27], value: "}" },
444
+ { type: "'", span: [28, 28] },
445
+ { type: ")", span: [29, 29] },
446
+ ],
447
+ },
448
+ {
449
+ input: `(json '{"foo": \${(foo bar)}, "baz": "baz"}')`,
450
+ expected: [
451
+ { type: "(", span: [0, 0] },
452
+ { type: "token", span: [1, 4], value: "json" },
453
+ { type: "ws", span: [5, 5] },
454
+ { type: "'", span: [6, 6] },
455
+ { type: "string", span: [7, 14], value: '{"foo": ' },
456
+ { type: "${", span: [15, 16] },
457
+ { type: "(", span: [17, 17] },
458
+ { type: "token", span: [18, 20], value: "foo" },
459
+ { type: "ws", span: [21, 21] },
460
+ { type: "token", span: [22, 24], value: "bar" },
461
+ { type: ")", span: [25, 25] },
462
+ { type: "}", span: [26, 26] },
463
+ { type: "string", span: [27, 41], value: ', "baz": "baz"}' },
464
+ { type: "'", span: [42, 42] },
465
+ { type: ")", span: [43, 43] },
466
+ ],
467
+ },
468
+ {
469
+ input: `(map
470
+ (ref '/foo/bar')
471
+ ('title' @0)
472
+ )`,
473
+ expected: [
474
+ { type: "(", span: [0, 0] },
475
+ { type: "token", span: [1, 3], value: "map" },
476
+ { type: "ws", span: [4, 6] },
477
+ { type: "(", span: [7, 7] },
478
+ { type: "token", span: [8, 10], value: "ref" },
479
+ { type: "ws", span: [11, 11] },
480
+ { type: "'", span: [12, 12] },
481
+ { type: "string", span: [13, 20], value: "/foo/bar" },
482
+ { type: "'", span: [21, 21] },
483
+ { type: ")", span: [22, 22] },
484
+ { type: "ws", span: [23, 25] },
485
+ { type: "(", span: [26, 26] },
486
+ { type: "'", span: [27, 27] },
487
+ { type: "string", span: [28, 32], value: "title" },
488
+ { type: "'", span: [33, 33] },
489
+ { type: "ws", span: [34, 34] },
490
+ { type: "token", span: [35, 36], value: "@0" },
491
+ { type: ")", span: [37, 37] },
492
+ { type: "ws", span: [38, 38] },
493
+ { type: ")", span: [39, 39] },
494
+ ],
495
+ },
496
+ ];
497
+
498
+ describe("tokenizer", () => {
499
+ test.each(TokenizerTestCases)('tokens: "$input"', ({ input, expected }) => {
500
+ const [tokens] = tokenize(input);
501
+ expect(tokens).toStrictEqual(expected);
502
+ });
503
+
504
+ test.each(TokenizerTestCases)(
505
+ 'end cursor: "$input"',
506
+ ({ input, endCursor }) => {
507
+ const [, cursor] = tokenize(input);
508
+ expect(cursor).toStrictEqual(endCursor || input.length);
509
+ }
510
+ );
511
+
512
+ test.each(TokenizerTestCases)(
513
+ 'expected spans overlap: "$input"', // checks if the EXPECTED spans in the test cases, not the code, to avoid PEBKAC test cases
514
+ ({ input, expected, endCursor }) => {
515
+ let [, stop] = expected[0].span;
516
+ for (let i = 1; i < expected.length; i++) {
517
+ const [nextStart, nextStop] = expected[i].span;
518
+ expect(nextStop).toBeGreaterThanOrEqual(nextStart);
519
+ expect(stop + 1).toBe(nextStart);
520
+ stop = nextStop;
521
+ }
522
+ if (endCursor === undefined) {
523
+ expect(stop + 1).toBe(input.length);
524
+ }
525
+ }
526
+ );
527
+
528
+ test.each(TokenizerTestCases)(
529
+ 'expected span equals input at span positions: "$input"',
530
+ ({ input, expected }) => {
531
+ for (const token of expected) {
532
+ if (token.type === "ws") continue;
533
+ expect(input.slice(token.span[0], token.span[1] + 1)).toBe(
534
+ token.unescapedValue || token.value || token.type
535
+ );
536
+ }
537
+ }
538
+ );
539
+ });
@@ -0,0 +1,117 @@
1
+ export type Token = {
2
+ readonly type:
3
+ | "!("
4
+ | "("
5
+ | ")"
6
+ | "string"
7
+ | "token"
8
+ | "ws"
9
+ | "${"
10
+ | "}"
11
+ | "'";
12
+ readonly span: [start: number, stop: number]; // inclusive start, inclusive stop
13
+ readonly value?: string;
14
+ readonly unescapedValue?: string;
15
+ };
16
+
17
+ const WHITE_SPACE = ["\n", "\r", "\t", " "];
18
+ export function tokenize(input: string): [tokens: Token[], endCursor: number] {
19
+ const tokens: Token[] = [];
20
+ let cursor = 0;
21
+ while (cursor < input.length) {
22
+ let char = input[cursor];
23
+ let peek = input[cursor + 1];
24
+ // TODO: remove this not used any more
25
+ if (char === "!" && peek === "(") {
26
+ tokens.push({ type: "!(", span: [cursor, cursor + 1] });
27
+ cursor += 2;
28
+ } else if (char === "(") {
29
+ tokens.push({ type: "(", span: [cursor, cursor] });
30
+ cursor++;
31
+ } else if (char === ")") {
32
+ tokens.push({ type: ")", span: [cursor, cursor] });
33
+ cursor++;
34
+ } else if (char === "'" || char === "}") {
35
+ const start = cursor;
36
+ let value = "";
37
+ let unescapedValue = "";
38
+ let escaped = false;
39
+ if (char === "}") {
40
+ tokens.push({ type: "}", span: [cursor, cursor] });
41
+ } else if (char === "'") {
42
+ tokens.push({ type: "'", span: [cursor, cursor] });
43
+ }
44
+ while (cursor < input.length) {
45
+ if (char === "\\") {
46
+ escaped = !escaped;
47
+ } else {
48
+ escaped = false;
49
+ }
50
+ if (peek === "'" && !escaped) {
51
+ cursor += 2;
52
+ break;
53
+ } else if (char === "$" && peek === "{") {
54
+ cursor += 2;
55
+ break;
56
+ }
57
+ cursor++;
58
+ char = input[cursor];
59
+ peek = input[cursor + 1];
60
+ if (!(char === "$" && peek === "{") && cursor < input.length) {
61
+ if (
62
+ !(
63
+ (char === "\\" && !escaped) // counter-intuitive, but escape just became false if this was a backslash we want to escape
64
+ )
65
+ ) {
66
+ value += char;
67
+ }
68
+ unescapedValue += char;
69
+ }
70
+ }
71
+ const cursorOffset =
72
+ peek === "'" && !escaped ? 2 : char === "$" && peek === "{" ? 3 : 1;
73
+ if (value) {
74
+ tokens.push({
75
+ type: "string",
76
+ span: [start + 1, cursor - cursorOffset],
77
+ value,
78
+ ...(unescapedValue !== value && { unescapedValue }),
79
+ });
80
+ }
81
+ if (peek === "'" && !escaped) {
82
+ tokens.push({ type: "'", span: [cursor - 1, cursor - 1] });
83
+ } else if (char === "$" && peek === "{") {
84
+ tokens.push({
85
+ type: "${",
86
+ span: [cursor - cursorOffset + 1, cursor - 1],
87
+ });
88
+ }
89
+ } else if (WHITE_SPACE.includes(char)) {
90
+ const start = cursor;
91
+ while (WHITE_SPACE.includes(input[cursor]) && cursor < input.length) {
92
+ cursor++;
93
+ }
94
+ tokens.push({ type: "ws", span: [start, cursor - 1] });
95
+ } else {
96
+ let value = "";
97
+ const start = cursor;
98
+ do {
99
+ char = input[cursor];
100
+ peek = input[cursor + 1];
101
+ value += char;
102
+ cursor++;
103
+ } while (
104
+ !WHITE_SPACE.includes(peek) &&
105
+ peek !== ")" &&
106
+ peek !== "'" &&
107
+ cursor < input.length
108
+ );
109
+ tokens.push({
110
+ type: "token",
111
+ span: [start, cursor - 1],
112
+ value,
113
+ });
114
+ }
115
+ }
116
+ return [tokens, cursor];
117
+ }