@surrealdb/lezer 1.0.0-beta.9 → 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/tokens.js CHANGED
@@ -1,4 +1,4 @@
1
- import {ExternalTokenizer} from "@lezer/lr";
1
+ import { ExternalTokenizer } from "@lezer/lr";
2
2
 
3
3
  import {
4
4
  _break,
@@ -15,31 +15,40 @@ import {
15
15
  _with,
16
16
  access,
17
17
  algorithm,
18
+ all,
18
19
  alter,
20
+ always,
19
21
  analyzer,
20
22
  any,
23
+ api,
21
24
  as,
22
25
  asc,
23
26
  assert,
24
27
  at,
25
28
  authenticate,
26
29
  auto,
30
+ backend,
27
31
  begin,
28
32
  bm25,
33
+ bucket,
29
34
  by,
30
35
  cancel,
31
36
  capacity,
37
+ cascade,
32
38
  changefeed,
33
39
  changes,
40
+ collate,
34
41
  columns,
35
42
  comment,
36
43
  commit,
44
+ computed,
37
45
  concurrently,
38
46
  config,
39
47
  content,
40
48
  create,
41
49
  database,
42
50
  db,
51
+ defer,
43
52
  define,
44
53
  desc,
45
54
  dimension,
@@ -53,10 +62,12 @@ import {
53
62
  duration,
54
63
  efc,
55
64
  end,
65
+ enforced,
56
66
  event,
57
67
  exclude,
58
68
  exists,
59
69
  explain,
70
+ expunge,
60
71
  extend_candidates,
61
72
  fetch,
62
73
  field,
@@ -65,6 +76,7 @@ import {
65
76
  flexible,
66
77
  from,
67
78
  functions,
79
+ get,
68
80
  graphql,
69
81
  group,
70
82
  highlights,
@@ -86,6 +98,7 @@ import {
86
98
  m,
87
99
  m0,
88
100
  merge,
101
+ middleware,
89
102
  mtree_cache,
90
103
  mtree,
91
104
  namespace,
@@ -93,6 +106,8 @@ import {
93
106
  normal,
94
107
  not,
95
108
  ns,
109
+ numeric,
110
+ omit,
96
111
  on,
97
112
  only,
98
113
  option,
@@ -105,14 +120,19 @@ import {
105
120
  password,
106
121
  patch,
107
122
  permissions,
123
+ post,
108
124
  postings_cache,
109
125
  postings_order,
126
+ put,
110
127
  readonly,
111
128
  rebuild,
112
129
  record,
130
+ reference,
131
+ reject,
113
132
  relate,
114
133
  relation,
115
134
  remove,
135
+ replace,
116
136
  roles,
117
137
  root,
118
138
  sc,
@@ -126,7 +146,7 @@ import {
126
146
  show,
127
147
  signin,
128
148
  signup,
129
- since,
149
+ since as _since,
130
150
  sleep,
131
151
  split,
132
152
  start,
@@ -142,6 +162,7 @@ import {
142
162
  to,
143
163
  token,
144
164
  tokenizers,
165
+ trace,
145
166
  transaction,
146
167
  typeKeyword,
147
168
  unique,
@@ -153,6 +174,7 @@ import {
153
174
  user,
154
175
  valueKeyword,
155
176
  values,
177
+ version,
156
178
  when,
157
179
  where,
158
180
 
@@ -165,98 +187,58 @@ import {
165
187
  diff,
166
188
  full,
167
189
  none,
168
-
169
- f32,
170
- f64,
171
- i16,
172
- i32,
173
- i64,
174
-
175
- createPermissions,
176
- deletePermissions,
177
- selectPermissions,
178
- updatePermissions,
179
-
180
- jwks,
181
- eddsa,
182
- es256,
183
- es384,
184
- es512,
185
- ps256,
186
- ps384,
187
- ps512,
188
- rs256,
189
- rs384,
190
- rs512,
191
-
192
- allinside,
193
- and,
194
- anyinside,
195
- contains,
196
- containsall,
197
- containsany,
198
- containsnone,
199
- containsnot,
200
- inside,
201
- intersects,
190
+ IndexTypeClause,
191
+ TokenType,
202
192
  is,
203
- noneinside,
204
- notinside,
193
+ binaryOperatorKeyword,
205
194
  opIn,
206
195
  opNot,
207
- or,
208
- outside,
209
-
210
- chebyshev,
211
- cosine,
212
- euclidean,
213
- hamming,
214
- jaccard,
215
- manhattan,
196
+ Distance,
216
197
  minkowski,
217
- pearson,
218
-
219
- ascii,
220
- edgengram,
221
- ngram,
222
- snowball,
223
- uppercase,
224
-
225
- _class,
226
- blank,
227
- camel,
228
- punct,
229
-
198
+ Filter,
199
+ AnalyzerTokenizer,
230
200
  _function,
231
201
  rand,
232
202
  count,
233
-
234
203
  objectOpen,
204
+
205
+ rangeOp,
206
+ rangeOpOpenLeft,
207
+ rangeOpOpenRight,
208
+ rangeOpOpenBoth
235
209
  } from "./parser.terms";
236
210
 
237
211
  const tokenMap = {
238
212
  access,
239
213
  algorithm,
214
+ all,
240
215
  alter,
216
+ always,
241
217
  analyzer,
242
218
  any,
219
+ api,
243
220
  as,
244
221
  asc,
245
222
  assert,
246
223
  at,
247
224
  authenticate,
248
225
  auto,
226
+ backend,
249
227
  begin,
250
228
  bm25,
251
229
  break: _break,
230
+ bucket,
252
231
  by,
253
232
  cancel,
254
233
  capacity,
234
+ cascade,
255
235
  changefeed,
256
236
  changes,
237
+ collate,
257
238
  columns,
258
239
  comment,
259
240
  commit,
241
+ computed,
260
242
  concurrently,
261
243
  config,
262
244
  content,
@@ -265,6 +247,7 @@ const tokenMap = {
265
247
  database,
266
248
  db,
267
249
  default: _default,
250
+ defer,
268
251
  define,
269
252
  delete: _delete,
270
253
  desc,
@@ -280,10 +263,12 @@ const tokenMap = {
280
263
  efc,
281
264
  else: _else,
282
265
  end,
266
+ enforced,
283
267
  event,
284
268
  exclude,
285
269
  exists,
286
270
  explain,
271
+ expunge,
287
272
  extend_candidates,
288
273
  fetch,
289
274
  field,
@@ -293,6 +278,7 @@ const tokenMap = {
293
278
  for: _for,
294
279
  from,
295
280
  functions,
281
+ get,
296
282
  graphql,
297
283
  group,
298
284
  highlights,
@@ -317,6 +303,7 @@ const tokenMap = {
317
303
  m,
318
304
  m0,
319
305
  merge,
306
+ middleware,
320
307
  mtree_cache,
321
308
  mtree,
322
309
  namespace,
@@ -324,6 +311,8 @@ const tokenMap = {
324
311
  normal,
325
312
  not,
326
313
  ns,
314
+ numeric,
315
+ omit,
327
316
  on,
328
317
  only,
329
318
  option,
@@ -336,14 +325,19 @@ const tokenMap = {
336
325
  password,
337
326
  patch,
338
327
  permissions,
328
+ post,
339
329
  postings_cache,
340
330
  postings_order,
331
+ put,
341
332
  readonly,
342
333
  rebuild,
343
334
  record,
335
+ reference,
336
+ reject,
344
337
  relate,
345
338
  relation,
346
339
  remove,
340
+ replace,
347
341
  return: _return,
348
342
  roles,
349
343
  root,
@@ -358,7 +352,7 @@ const tokenMap = {
358
352
  show,
359
353
  signin,
360
354
  signup,
361
- since,
355
+ since: _since,
362
356
  sleep,
363
357
  split,
364
358
  start,
@@ -375,6 +369,7 @@ const tokenMap = {
375
369
  to,
376
370
  token,
377
371
  tokenizers,
372
+ trace,
378
373
  transaction,
379
374
  type: typeKeyword,
380
375
  unique,
@@ -386,6 +381,7 @@ const tokenMap = {
386
381
  user,
387
382
  value: valueKeyword,
388
383
  values,
384
+ version,
389
385
  when,
390
386
  where,
391
387
  with: _with,
@@ -400,59 +396,58 @@ const tokenMap = {
400
396
  null: _null,
401
397
  true: _true,
402
398
 
403
- f32,
404
- f64,
405
- i16,
406
- i32,
407
- i64,
399
+ f32: IndexTypeClause,
400
+ f64: IndexTypeClause,
401
+ i16: IndexTypeClause,
402
+ i32: IndexTypeClause,
403
+ i64: IndexTypeClause,
408
404
 
409
- jwks,
410
- eddsa,
411
- es256,
412
- es384,
413
- es512,
414
- ps256,
415
- ps384,
416
- ps512,
417
- rs256,
418
- rs384,
419
- rs512,
405
+ jwks: TokenType,
406
+ eddsa: TokenType,
407
+ es256: TokenType,
408
+ es384: TokenType,
409
+ es512: TokenType,
410
+ ps256: TokenType,
411
+ ps384: TokenType,
412
+ ps512: TokenType,
413
+ rs256: TokenType,
414
+ rs384: TokenType,
415
+ rs512: TokenType,
420
416
 
421
- allinside,
422
- and,
423
- anyinside,
424
- contains,
425
- containsall,
426
- containsany,
427
- containsnone,
428
- containsnot,
429
- inside,
430
- intersects,
417
+ and: binaryOperatorKeyword,
418
+ or: binaryOperatorKeyword,
419
+ contains: binaryOperatorKeyword,
420
+ containsnot: binaryOperatorKeyword,
421
+ containsall: binaryOperatorKeyword,
422
+ containsany: binaryOperatorKeyword,
423
+ containsnone: binaryOperatorKeyword,
424
+ inside: binaryOperatorKeyword, notinside: binaryOperatorKeyword,
425
+ allinside: binaryOperatorKeyword,
426
+ anyinside: binaryOperatorKeyword,
427
+ noneinside: binaryOperatorKeyword,
428
+ outside: binaryOperatorKeyword,
429
+ intersects: binaryOperatorKeyword,
431
430
  is,
432
- noneinside,
433
- notinside,
434
- or,
435
- outside,
436
431
 
437
- chebyshev,
438
- cosine,
439
- euclidean,
440
- hamming,
441
- jaccard,
442
- manhattan,
432
+ chebyshev: Distance,
433
+ cosine: Distance,
434
+ euclidean: Distance,
435
+ hamming: Distance,
436
+ jaccard: Distance,
437
+ manhattan: Distance,
443
438
  minkowski,
444
- pearson,
439
+ pearson: Distance,
445
440
 
446
- ascii,
447
- edgengram,
448
- ngram,
449
- snowball,
450
- uppercase,
441
+ ascii: Filter,
442
+ edgengram: Filter,
443
+ ngram: Filter,
444
+ snowball: Filter,
445
+ uppercase: Filter,
451
446
 
452
- blank,
453
- camel,
454
- class: _class,
455
- punct,
447
+ blank: AnalyzerTokenizer,
448
+ camel: AnalyzerTokenizer,
449
+ class: AnalyzerTokenizer,
450
+ punct: AnalyzerTokenizer,
456
451
 
457
452
  // Function names
458
453
  function: _function,
@@ -460,32 +455,26 @@ const tokenMap = {
460
455
  count,
461
456
  };
462
457
 
463
- const tryMapped = new Map([
464
- ["select", [selectPermissions]],
465
- ["create", [createPermissions]],
466
- ["update", [updatePermissions]],
467
- ["delete", [deletePermissions]],
468
- ["not", [opNot]],
469
- ["in", [opIn]],
470
- ]);
471
-
472
- export const tokens = function(t, stack) {
473
- for (const tk of tryMapped.get(t.toLowerCase()) ?? []) {
474
- if (stack.canShift(tk)) return tk;
475
- }
476
-
458
+ export const tokens = function (t, stack) {
477
459
  return tokenMap[t.toLowerCase()] ?? -1;
460
+ };
461
+
462
+ function isSpace(ch) {
463
+ return ch === 32 || ch === 9 || ch === 10 || ch === 13
478
464
  }
479
465
 
480
466
  function skipSpace(input, off) {
481
- for (;;) {
467
+ for (; ;) {
482
468
  let next = input.peek(off);
483
- if (next === 32 || next === 9 || next === 10 || next === 13) {
469
+ if (isSpace(next)) {
484
470
  off++;
485
- } else if (next === 35 /* '#' */ ||
486
- (next === 47 /* '/' */ || next === 45 /* '-' */) && input.peek(off + 1) === next) {
471
+ } else if (
472
+ next === 35 /* '#' */ ||
473
+ ((next === 47 /* '/' */ || next === 45) /* '-' */ &&
474
+ input.peek(off + 1) === next)
475
+ ) {
487
476
  off++;
488
- for (;;) {
477
+ for (; ;) {
489
478
  let next = input.peek(off);
490
479
  if (next < 0 || next === 10 || next === 13) break;
491
480
  off++;
@@ -497,7 +486,12 @@ function skipSpace(input, off) {
497
486
  }
498
487
 
499
488
  function isIdentifierChar(ch) {
500
- return ch === 95 || ch >= 65 && ch <= 90 || ch >= 97 && ch <= 122 || ch >= 48 && ch <= 57;
489
+ return (
490
+ ch === 95 ||
491
+ (ch >= 65 && ch <= 90) ||
492
+ (ch >= 97 && ch <= 122) ||
493
+ (ch >= 48 && ch <= 57)
494
+ );
501
495
  }
502
496
 
503
497
  function skipObjKey(input, off) {
@@ -507,17 +501,19 @@ function skipObjKey(input, off) {
507
501
  off++;
508
502
  } while (isIdentifierChar(input.peek(off)));
509
503
  return off;
510
- } else if (first === 38 /* "'" */ || first === 34 /* '"' */) {
511
- for (let escaped = false;;) {
504
+ } else if (first === 39 /* "'" */ || first === 34 /* '"' */) {
505
+ for (let escaped = false; ;) {
512
506
  let next = input.peek(++off);
513
507
  if (next < 0) return off;
514
508
  if (next === first && !escaped) return off + 1;
515
- escaped = next === 92 /* '\\' */
509
+ escaped = next === 92; /* '\\' */
516
510
  }
517
511
  }
512
+
513
+ return null;
518
514
  }
519
515
 
520
- export const objectToken = new ExternalTokenizer((input, _stack) => {
516
+ export const objectToken = new ExternalTokenizer(input => {
521
517
  if (input.next === 123 /* '{' */) {
522
518
  let off = skipSpace(input, 1);
523
519
 
@@ -546,3 +542,30 @@ export const objectToken = new ExternalTokenizer((input, _stack) => {
546
542
  }
547
543
  }
548
544
  });
545
+
546
+ function closedRangeBefore(ch) {
547
+ return isSpace(ch) || ch < 0 || ch === 91 /* '[' */ || ch === 44 /* ',' */ ||
548
+ ch === 123 /* '{' */ || ch === 40 /* '(' */ || ch === 59 /* ';' */ || ch === 58 /* ':' */
549
+ }
550
+
551
+ function closedRangeAfter(ch) {
552
+ return isSpace(ch) || ch < 0 || ch === 93 /* ']' */ || ch === 44 /* ',' */ ||
553
+ ch === 125 /* '}' */ || ch === 41 /* ')' */ || ch === 59 /* ';' */ || ch === 58 /* ':' */
554
+ }
555
+
556
+ export const rangeOperator = new ExternalTokenizer(input => {
557
+ if (input.next === 46 /* '.' */ && input.peek(1) === 46 ||
558
+ input.next === 62 /* '>' */ && input.peek(1) === 46 && input.peek(1) === 46) {
559
+ let inclStart = input.next !== 62
560
+ let closedBefore = closedRangeBefore(input.peek(-1))
561
+ if (!inclStart && closedBefore) return
562
+ input.advance(inclStart ? 2 : 3)
563
+ let inclEnd = input.next === 61 /* '=' */
564
+ if (inclEnd) input.advance()
565
+ let closedAfter = closedRangeAfter(input.next) || input.next < 0
566
+ if (inclEnd && closedAfter) return
567
+ input.acceptToken(closedBefore && closedAfter ? rangeOpOpenBoth
568
+ : closedBefore ? rangeOpOpenLeft
569
+ : closedAfter ? rangeOpOpenRight : rangeOp)
570
+ }
571
+ })
@@ -0,0 +1,6 @@
1
+ import type { NodeProp } from "@lezer/common";
2
+ import type { LRParser } from "@lezer/lr";
3
+
4
+ export const parser: LRParser;
5
+ export const sinceProp: NodeProp<string>;
6
+ export const untilProp: NodeProp<string>;
package/src/version.js ADDED
@@ -0,0 +1,15 @@
1
+ import { NodeProp } from '@lezer/common';
2
+
3
+ export const since = new NodeProp({
4
+ perNode: false,
5
+ deserialize(str) {
6
+ return str.replace(/_/g, '.').toLowerCase();
7
+ },
8
+ });
9
+
10
+ export const until = new NodeProp({
11
+ perNode: false,
12
+ deserialize(str) {
13
+ return str.replace(/_/g, '.').toLowerCase();
14
+ },
15
+ });
@@ -45,3 +45,88 @@ SurrealQL(
45
45
  LiveSelectStatement(Keyword,Keyword,Any,Keyword,Ident),
46
46
  LiveSelectStatement(Keyword,Keyword,Any,Keyword,RecordId(RecordTbIdent,Colon,RecordIdIdent)),
47
47
  )
48
+
49
+ # Let Substatements
50
+
51
+ LET $bla = (SELECT * FROM 123);
52
+ LET $bla = SELECT * FROM 123;
53
+
54
+ ==>
55
+
56
+ SurrealQL(
57
+ LetStatement(Keyword,VariableName,SubQuery(SelectStatement(Keyword,Any,Keyword,Int))),
58
+ LetStatement(Keyword,VariableName,SelectStatement(Keyword,Any,Keyword,Int)))
59
+
60
+ # Block Comments
61
+
62
+ 1 /* this is
63
+ a block
64
+ comment */ + 2;
65
+
66
+ ==>
67
+
68
+ SurrealQL(BinaryExpression(Int,BlockComment,Operator,Int))
69
+
70
+ # Version Clause
71
+
72
+ SELECT * FROM person VERSION d'2024-10-10T14:37:08.444Z'
73
+
74
+ ==>
75
+
76
+ SurrealQL(SelectStatement(Keyword,Any,Keyword,Ident,VersionClause(Keyword,String)))
77
+
78
+
79
+ # Recurse part
80
+
81
+ person:tobie.{..}->knows->person;
82
+ person:tobie.{..}(->knows->person).name;
83
+ person:tobie.{..}.{ id, name, knows: ->knows->person.@ };
84
+ a:1.{1};
85
+ a:1.{1..};
86
+ a:1.{1..2};
87
+ a:1.{..2};
88
+ @.name;
89
+ @name;
90
+
91
+ ==>
92
+
93
+ SurrealQL(
94
+ Path(
95
+ RecordId(RecordTbIdent,Colon,RecordIdIdent),
96
+ Subscript(Recurse(BraceOpen,RecurseRange(RangeOp),BraceClose)),
97
+ GraphPath(ArrowRight,Ident), GraphPath(ArrowRight,Ident)
98
+ ),
99
+ Path(
100
+ RecordId(RecordTbIdent,Colon,RecordIdIdent),
101
+ Subscript(
102
+ Recurse(
103
+ BraceOpen, RecurseRange(RangeOp), BraceClose,
104
+ Path(GraphPath(ArrowRight,Ident), GraphPath(ArrowRight,Ident))
105
+ )
106
+ ),
107
+ Subscript(Ident)
108
+ ),
109
+ Path(
110
+ RecordId(RecordTbIdent,Colon,RecordIdIdent),
111
+ Subscript(Recurse(BraceOpen,RecurseRange(RangeOp),BraceClose)),
112
+ Subscript(Destructure(BraceOpen,Ident,Ident,Ident,Colon,GraphPath(ArrowRight,Ident),GraphPath(ArrowRight,Ident),Subscript(At),BraceClose))
113
+ ),
114
+ Path(
115
+ RecordId(RecordTbIdent,Colon,Int),
116
+ Subscript(Recurse(BraceOpen,RecurseRange(Int),BraceClose))
117
+ ),
118
+ Path(
119
+ RecordId(RecordTbIdent,Colon,Int),
120
+ Subscript(Recurse(BraceOpen,RecurseRange(Int,RangeOp),BraceClose))
121
+ ),
122
+ Path(
123
+ RecordId(RecordTbIdent,Colon,Int),
124
+ Subscript(Recurse(BraceOpen,RecurseRange(Int,RangeOp,Int),BraceClose))
125
+ ),
126
+ Path(
127
+ RecordId(RecordTbIdent,Colon,Int),
128
+ Subscript(Recurse(BraceOpen,RecurseRange(RangeOp,Int),BraceClose))
129
+ ),
130
+ Path(At,Subscript(Ident)),
131
+ Path(At,Ident)
132
+ )
@@ -1,9 +1,9 @@
1
- import {parser} from "../dist/index.js";
2
- import {fileTests} from "@lezer/generator/dist/test";
1
+ import { parser } from "../dist/index.js";
2
+ import { fileTests } from "@lezer/generator/dist/test";
3
3
 
4
4
  import * as fs from "fs";
5
5
  import * as path from "path";
6
- import {fileURLToPath} from "url";
6
+ import { fileURLToPath } from "url";
7
7
  let caseDir = path.dirname(fileURLToPath(import.meta.url));
8
8
 
9
9
  let filter = process.argv[2];
@@ -11,12 +11,12 @@ let filter = process.argv[2];
11
11
  for (let file of fs.readdirSync(caseDir)) {
12
12
  if (!/\.txt$/.test(file)) continue;
13
13
  console.log("File " + file + ":");
14
- for (let {name, run} of fileTests(fs.readFileSync(path.join(caseDir, file), "utf8"), file)) {
14
+ for (let { name, run } of fileTests(fs.readFileSync(path.join(caseDir, file), "utf8"), file)) {
15
15
  if (!filter || name.indexOf(filter) > -1) {
16
16
  try {
17
17
  run(parser);
18
18
  console.log(" ✔ " + name);
19
- } catch(e) {
19
+ } catch (e) {
20
20
  console.log(" ✘ " + name + "\n " + String(e.message || e).replace(/\n/g, "\n "))
21
21
  }
22
22
  }