bobe 0.0.41 → 0.0.42

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -235,7 +235,7 @@ class Tokenizer {
235
235
  if (!this.token) return false;
236
236
  return this.token.type & TokenType.Identifier && this.token.value === Tokenizer.EofId;
237
237
  }
238
- setToken(type, value) {
238
+ setToken(type, value, dt = 1) {
239
239
  this.token = {
240
240
  type,
241
241
  typeName: TokenType[type],
@@ -247,11 +247,11 @@ class Tokenizer {
247
247
  column: this.preCol
248
248
  },
249
249
  end: {
250
- offset: this.i + 1,
250
+ offset: this.i + dt,
251
251
  line: this.line,
252
- column: this.column + 1
252
+ column: this.column + dt
253
253
  },
254
- source: this.code.slice(this.preI, this.i + 1)
254
+ source: this.code.slice(this.preI, this.i + dt)
255
255
  } : null
256
256
  };
257
257
  this.isFirstToken = false;
@@ -361,26 +361,22 @@ class Tokenizer {
361
361
  }
362
362
  let value = '';
363
363
  this.token = null;
364
- try {
365
- if (this.code[this.i] === '\n') {
366
- this.setToken(TokenType.Identifier, true);
367
- return this.token;
368
- }
369
- while (this.code[this.i + 1] !== '\n') {
370
- value += this.code[this.i];
371
- this.next();
364
+ let char = this.code[this.i];
365
+ while (char !== '\n') {
366
+ if (char === '"' || char === "'") {
367
+ value += char + this.getStr(char);
372
368
  }
373
369
  value += this.code[this.i];
374
- const trimmed = value.replace(/\/\/[\s\S]+/, '').trim();
375
- this.setToken(TokenType.Identifier, trimmed ? trimmed : true);
376
- return this.token;
377
- } finally {
378
370
  this.next();
379
- this.handledTokens.push(this.token);
380
- {
381
- this.needLoc = false;
382
- }
371
+ char = this.code[this.i];
372
+ }
373
+ const trimmed = value.replace(/\/\/[\s\S]+/, '').trim();
374
+ this.setToken(TokenType.Identifier, trimmed ? trimmed : true, 0);
375
+ this.handledTokens.push(this.token);
376
+ {
377
+ this.needLoc = false;
383
378
  }
379
+ return this.token;
384
380
  }
385
381
  isEol(i) {
386
382
  return this.code[i] === '\n' || this.code[i] === '/';
@@ -393,28 +389,21 @@ class Tokenizer {
393
389
  }
394
390
  this.token = null;
395
391
  let value = '';
396
- try {
397
- const char = this.code[this.i];
398
- if (char === ';' || char === '\n') {
399
- this.setToken(TokenType.Identifier, value);
400
- return this.token;
401
- }
402
- let nextC = this.code[this.i + 1];
403
- while (nextC !== ';' && nextC !== '\n') {
404
- value += this.code[this.i];
405
- this.next();
406
- nextC = this.code[this.i + 1];
392
+ let char = this.code[this.i];
393
+ while (char !== ';' && char !== '\n') {
394
+ if (char === '"' || char === "'") {
395
+ value += char + this.getStr(char);
407
396
  }
408
397
  value += this.code[this.i];
409
- this.setToken(TokenType.Identifier, value);
410
- return this.token;
411
- } finally {
412
398
  this.next();
413
- this.handledTokens.push(this.token);
414
- {
415
- this.needLoc = false;
416
- }
399
+ char = this.code[this.i];
400
+ }
401
+ this.setToken(TokenType.Identifier, value, 0);
402
+ this.handledTokens.push(this.token);
403
+ {
404
+ this.needLoc = false;
417
405
  }
406
+ return this.token;
418
407
  }
419
408
  peekChar() {
420
409
  let i = this.i;
@@ -472,10 +461,8 @@ class Tokenizer {
472
461
  startLine = this.line,
473
462
  startCol = this.preCol;
474
463
  let inComment,
475
- inString,
476
464
  count = 0,
477
- value = '',
478
- backslashCount = 0;
465
+ value = '';
479
466
  while (1) {
480
467
  const char = this.code[this.i];
481
468
  if (char === undefined) {
@@ -488,11 +475,6 @@ class Tokenizer {
488
475
  inComment = null;
489
476
  value += this.code[this.i];
490
477
  this.next();
491
- } else if (inString) {
492
- if (char === inString && backslashCount % 2 === 0) {
493
- inString = null;
494
- }
495
- backslashCount = char === '\\' ? backslashCount + 1 : 0;
496
478
  } else {
497
479
  if (char === '/' && nextChar === '/') {
498
480
  inComment = 'single';
@@ -502,15 +484,15 @@ class Tokenizer {
502
484
  inComment = 'multi';
503
485
  value += this.code[this.i];
504
486
  this.next();
505
- } else if (char === "'" || char === '"' || char === '`') {
506
- inString = char;
487
+ } else if (char === "'" || char === '"') {
488
+ value += char + this.getStr(char);
507
489
  } else if (char === '{') {
508
490
  count++;
509
491
  } else if (char === '}') {
510
492
  count--;
511
493
  }
512
494
  }
513
- if (count === 0 && inString == null && inComment == null) {
495
+ if (count === 0 && inComment == null) {
514
496
  return value.slice(1);
515
497
  }
516
498
  value += this.code[this.i];
@@ -706,7 +688,7 @@ class Tokenizer {
706
688
  }
707
689
  this.setToken(tokenType, realValue);
708
690
  }
709
- str(char) {
691
+ getStr(head, parseEscape = true) {
710
692
  const startOffset = this.preI,
711
693
  startLine = this.line,
712
694
  startCol = this.preCol;
@@ -725,11 +707,18 @@ class Tokenizer {
725
707
  continuousBackslashCount = 0;
726
708
  }
727
709
  this.next();
728
- if (nextC === char && memoCount % 2 === 0) {
710
+ if (nextC === head && memoCount % 2 === 0) {
729
711
  break;
730
712
  }
731
- value += nextC;
713
+ bobeShared.escapeMap[nextC];
714
+ {
715
+ value += nextC;
716
+ }
732
717
  }
718
+ return value;
719
+ }
720
+ str(char) {
721
+ const value = this.getStr(char, false);
733
722
  this.setToken(TokenType.String, value);
734
723
  }
735
724
  number(char) {
@@ -1169,7 +1158,7 @@ class Compiler {
1169
1158
  }
1170
1159
  parseLoopNode(node) {
1171
1160
  const forLoc = this.tokenizer.token.loc ?? this.tokenizer.emptyLoc();
1172
- this.tokenizer.nextToken();
1161
+ this.tokenizer.jsExp();
1173
1162
  const collection = this.parseJsExp();
1174
1163
  if (!collection.value && collection.value !== 0) {
1175
1164
  this.addError(ParseErrorCode.MISSING_FOR_COLLECTION, '"for" 缺少集合表达式', forLoc, node);
@@ -1630,7 +1619,7 @@ class Interpreter {
1630
1619
  return _node;
1631
1620
  }
1632
1621
  forDeclaration() {
1633
- const arrExp = this.tokenizer.nextToken().value;
1622
+ const arrExp = this.tokenizer.jsExp().value;
1634
1623
  this.tokenizer.nextToken();
1635
1624
  const itemToken = this.tokenizer.nextToken();
1636
1625
  const isDestruct = itemToken.type === TokenType.InsertionExp;