@xano/xanoscript-language-server 11.0.4 → 11.0.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -26,7 +26,9 @@
26
26
  "Bash(npm whoami:*)",
27
27
  "Bash(npm view:*)",
28
28
  "Bash(npm version:*)",
29
- "Bash(npm publish:*)"
29
+ "Bash(npm publish:*)",
30
+ "Bash(wc:*)",
31
+ "Bash(git push:*)"
30
32
  ],
31
33
  "deny": [],
32
34
  "ask": []
@@ -0,0 +1,91 @@
1
+ import { lexDocument } from "../lexer/lexer.js";
2
+ import { xanoscriptParser } from "../parser/parser.js";
3
+ import { getSchemeFromContent } from "../utils.js";
4
+
5
+ /**
6
+ * Cache for parsed documents to avoid redundant parsing.
7
+ * Stores parse results keyed by document URI + version.
8
+ * Note: We cache a snapshot of parser state, not the parser instance itself,
9
+ * since the parser is a singleton that gets mutated on each parse.
10
+ */
11
+ class DocumentCache {
12
+ constructor() {
13
+ // Map<uri, { version: number, lexResult: Object, parserState: Object, scheme: string }>
14
+ this.cache = new Map();
15
+ }
16
+
17
+ /**
18
+ * Get cached parse result or parse and cache if not available.
19
+ * @param {string} uri - Document URI
20
+ * @param {number} version - Document version
21
+ * @param {string} text - Document text
22
+ * @returns {{ lexResult: Object, parser: Object, scheme: string }}
23
+ */
24
+ getOrParse(uri, version, text) {
25
+ const cached = this.cache.get(uri);
26
+ const textLength = text.length;
27
+
28
+ // Check version and text length to detect same URI with different content (e.g., in tests)
29
+ if (
30
+ cached &&
31
+ cached.version === version &&
32
+ cached.textLength === textLength
33
+ ) {
34
+ // Return cached result with a proxy parser object containing cached state
35
+ return {
36
+ lexResult: cached.lexResult,
37
+ parser: cached.parserState,
38
+ scheme: cached.scheme,
39
+ };
40
+ }
41
+
42
+ // Parse and cache a snapshot of the parser state
43
+ const scheme = getSchemeFromContent(text);
44
+ const lexResult = lexDocument(text);
45
+ const parser = xanoscriptParser(text, scheme, lexResult);
46
+
47
+ // Create a snapshot of the parser's state including symbol table
48
+ const parserState = {
49
+ errors: [...parser.errors],
50
+ warnings: [...parser.warnings],
51
+ informations: [...parser.informations],
52
+ hints: [...parser.hints],
53
+ __symbolTable: parser.__symbolTable
54
+ ? JSON.parse(JSON.stringify(parser.__symbolTable))
55
+ : null,
56
+ };
57
+
58
+ const cacheEntry = {
59
+ version,
60
+ textLength,
61
+ lexResult,
62
+ parserState,
63
+ scheme,
64
+ };
65
+
66
+ this.cache.set(uri, cacheEntry);
67
+
68
+ return {
69
+ lexResult,
70
+ parser: parserState,
71
+ scheme,
72
+ };
73
+ }
74
+
75
+ /**
76
+ * Invalidate cache for a document.
77
+ * @param {string} uri - Document URI
78
+ */
79
+ invalidate(uri) {
80
+ this.cache.delete(uri);
81
+ }
82
+
83
+ /**
84
+ * Clear all cached documents.
85
+ */
86
+ clear() {
87
+ this.cache.clear();
88
+ }
89
+ }
90
+
91
+ export const documentCache = new DocumentCache();
package/debug.js ADDED
@@ -0,0 +1,18 @@
1
+ /* global process */
2
+ /**
3
+ * Debug logging utility.
4
+ * Set XS_DEBUG=1 environment variable to enable debug logging.
5
+ */
6
+ const isDebug = process.env.XS_DEBUG === "1";
7
+
8
+ export function debugLog(...args) {
9
+ if (isDebug) {
10
+ console.log(...args);
11
+ }
12
+ }
13
+
14
+ export function debugError(...args) {
15
+ if (isDebug) {
16
+ console.error(...args);
17
+ }
18
+ }
package/lexer/tokens.js CHANGED
@@ -447,80 +447,91 @@ const tokenMappers = [
447
447
  mapZipTokenToType,
448
448
  ];
449
449
 
450
- /**
451
- * Map a token to a type (e.g., keyword, variable, etc.)
452
- * @param {import('chevrotain').TokenType} token
453
- * @returns string | null | undefined The type of the token
454
- */
455
- export function mapTokenToType(token) {
456
- // Check if the token is a keyword
457
- for (const mapper of tokenMappers) {
458
- const type = mapper(token);
459
- if (type) {
460
- return type;
461
- }
462
- }
463
-
464
- switch (token) {
465
- // Structural and control keywords (e.g., query blocks, conditionals)
466
- case Cachetoken.name:
467
- case HistoryToken.name:
468
- case IndexToken.name:
469
- case InputToken.name:
470
- case MiddlewareToken.name:
471
- case MockToken.name:
472
- case ResponseToken.name:
473
- case ViewToken.name:
474
- case SchemaToken.name:
475
- case SecurityToken.name:
476
- case StackToken.name:
477
- case TestToken.name:
478
- case FiltersToken.name:
479
- return "keyword";
480
-
481
- case DbLinkToken.name:
482
- return "function";
450
+ // Pre-built token type map for O(1) lookups
451
+ const tokenTypeMap = new Map();
452
+
453
+ // Build the map at module initialization time
454
+ function buildTokenTypeMap() {
455
+ // Add local token mappings first
456
+ const localMappings = {
457
+ // Structural and control keywords
458
+ [Cachetoken.name]: "keyword",
459
+ [HistoryToken.name]: "keyword",
460
+ [IndexToken.name]: "keyword",
461
+ [InputToken.name]: "keyword",
462
+ [MiddlewareToken.name]: "keyword",
463
+ [MockToken.name]: "keyword",
464
+ [ResponseToken.name]: "keyword",
465
+ [ViewToken.name]: "keyword",
466
+ [SchemaToken.name]: "keyword",
467
+ [SecurityToken.name]: "keyword",
468
+ [StackToken.name]: "keyword",
469
+ [TestToken.name]: "keyword",
470
+ [FiltersToken.name]: "keyword",
471
+
472
+ [DbLinkToken.name]: "function",
483
473
 
484
474
  // Variable-related tokens
485
- case AuthToken.name:
486
- case DbIdentifier.name:
487
- case DbReturnAggregateToken.name:
488
- case DescriptionToken.name:
489
- case DisabledToken.name:
490
- case DocsToken.name:
491
- case FieldToken.name: // field is also used as variable name in index definitio.namen
492
- case GuidToken.name:
493
- case SensitiveToken.name:
494
- case TagsToken.name:
495
- case TypeToken.name: // type is used as a variable name in index definitio.namen
496
- case ValueToken.name:
497
- case ValuesToken.name:
498
- return "variable";
499
-
500
- case Identifier.name:
501
- return "property";
502
-
503
- case FalseToken.name:
504
- case NowToken.name:
505
- case NullToken.name:
506
- case TrueToken.name:
507
- return "enumMember";
508
-
509
- case DotToken.name:
510
- return "punctuation";
511
-
512
- case RegExpToken.name:
513
- return "regexp";
514
-
515
- case JsonInToken.name:
516
- return "operator";
475
+ [AuthToken.name]: "variable",
476
+ [DbIdentifier.name]: "variable",
477
+ [DbReturnAggregateToken.name]: "variable",
478
+ [DescriptionToken.name]: "variable",
479
+ [DisabledToken.name]: "variable",
480
+ [DocsToken.name]: "variable",
481
+ [FieldToken.name]: "variable",
482
+ [GuidToken.name]: "variable",
483
+ [SensitiveToken.name]: "variable",
484
+ [TagsToken.name]: "variable",
485
+ [TypeToken.name]: "variable",
486
+ [ValueToken.name]: "variable",
487
+ [ValuesToken.name]: "variable",
488
+
489
+ [Identifier.name]: "property",
490
+
491
+ [FalseToken.name]: "enumMember",
492
+ [NowToken.name]: "enumMember",
493
+ [NullToken.name]: "enumMember",
494
+ [TrueToken.name]: "enumMember",
495
+
496
+ [DotToken.name]: "punctuation",
497
+ [RegExpToken.name]: "regexp",
498
+ [JsonInToken.name]: "operator",
517
499
 
518
500
  // Skip whitespace and newlines
519
- case NewlineToken.name:
520
- case WhiteSpace.name:
521
- return null;
501
+ [NewlineToken.name]: null,
502
+ [WhiteSpace.name]: null,
503
+ };
504
+
505
+ for (const [tokenName, type] of Object.entries(localMappings)) {
506
+ tokenTypeMap.set(tokenName, type);
507
+ }
508
+
509
+ // Iterate through all tokens and populate the map using the mappers
510
+ for (const token of allTokens) {
511
+ if (tokenTypeMap.has(token.name)) continue;
522
512
 
523
- default:
524
- return undefined; // Skip unmapped or unknown tokens
513
+ for (const mapper of tokenMappers) {
514
+ const type = mapper(token.name);
515
+ if (type) {
516
+ tokenTypeMap.set(token.name, type);
517
+ break;
518
+ }
519
+ }
520
+ }
521
+ }
522
+
523
+ // Initialize the map at module load
524
+ buildTokenTypeMap();
525
+
526
+ /**
527
+ * Map a token to a type (e.g., keyword, variable, etc.)
528
+ * Uses pre-built map for O(1) lookup instead of O(n) iteration.
529
+ * @param {string} token - Token name
530
+ * @returns {string | null | undefined} The type of the token
531
+ */
532
+ export function mapTokenToType(token) {
533
+ if (tokenTypeMap.has(token)) {
534
+ return tokenTypeMap.get(token);
525
535
  }
536
+ return undefined; // Skip unmapped or unknown tokens
526
537
  }
@@ -109,22 +109,23 @@ function isAfterPipeToken(tokens) {
109
109
  return lastToken.tokenType === PipeToken;
110
110
  }
111
111
 
112
- function createFilterSuggestions() {
113
- return filterNames.map((filterName) => {
112
+ // Pre-computed filter suggestions - frozen to prevent accidental mutations
113
+ const filterSuggestions = Object.freeze(
114
+ filterNames.map((filterName) => {
114
115
  const documentation = filterMessageProvider.__filterDoc[filterName];
115
116
 
116
- return {
117
+ return Object.freeze({
117
118
  label: filterName,
118
119
  kind: encodeTokenType("function"), // Filters are function-like
119
120
  documentation: documentation
120
- ? {
121
+ ? Object.freeze({
121
122
  kind: "markdown",
122
123
  value: documentation,
123
- }
124
+ })
124
125
  : undefined,
125
- };
126
- });
127
- }
126
+ });
127
+ })
128
+ );
128
129
 
129
130
  export function getContentAssistSuggestions(text, scheme) {
130
131
  try {
@@ -134,7 +135,7 @@ export function getContentAssistSuggestions(text, scheme) {
134
135
 
135
136
  // Check if we're after a pipe token - if so, suggest filters
136
137
  if (isAfterPipeToken(partialTokenVector)) {
137
- return createFilterSuggestions();
138
+ return filterSuggestions;
138
139
  }
139
140
 
140
141
  let syntacticSuggestions;
@@ -1,5 +1,55 @@
1
- import { xanoscriptParser } from "../parser/parser";
2
- import { getSchemeFromContent } from "../utils";
1
+ import { documentCache } from "../cache/documentCache.js";
2
+ import { debugError, debugLog } from "../debug.js";
3
+
4
+ // Diagnostic severity constants
5
+ const SEVERITY = {
6
+ ERROR: 1,
7
+ WARNING: 2,
8
+ INFORMATION: 3,
9
+ HINT: 4,
10
+ };
11
+
12
+ /**
13
+ * Creates diagnostics from parser results in a single pass.
14
+ * @param {Object} parser - The parser with errors, warnings, informations, hints
15
+ * @param {Object} document - The text document for position conversion
16
+ * @returns {Array} Array of diagnostic objects
17
+ */
18
+ function createDiagnostics(parser, document) {
19
+ const diagnostics = [];
20
+ const defaultRange = {
21
+ start: { line: 0, character: 0 },
22
+ end: { line: 0, character: 1 },
23
+ };
24
+
25
+ const addDiagnostic = (item, severity) => {
26
+ diagnostics.push({
27
+ severity,
28
+ range: item.token
29
+ ? {
30
+ start: document.positionAt(item.token.startOffset),
31
+ end: document.positionAt(item.token.endOffset + 1),
32
+ }
33
+ : defaultRange,
34
+ message: item.message,
35
+ });
36
+ };
37
+
38
+ for (const error of parser.errors) {
39
+ addDiagnostic(error, SEVERITY.ERROR);
40
+ }
41
+ for (const warning of parser.warnings) {
42
+ addDiagnostic(warning, SEVERITY.WARNING);
43
+ }
44
+ for (const info of parser.informations) {
45
+ addDiagnostic(info, SEVERITY.INFORMATION);
46
+ }
47
+ for (const hint of parser.hints) {
48
+ addDiagnostic(hint, SEVERITY.HINT);
49
+ }
50
+
51
+ return diagnostics;
52
+ }
3
53
 
4
54
  /**
5
55
  *
@@ -11,7 +61,7 @@ export function onDidChangeContent(params, connection) {
11
61
  const document = params.document;
12
62
 
13
63
  if (!document) {
14
- console.error(
64
+ debugError(
15
65
  "onDidChangeContent(): Document not found for URI:",
16
66
  params.textDocument.uri
17
67
  );
@@ -20,96 +70,36 @@ export function onDidChangeContent(params, connection) {
20
70
 
21
71
  const text = document.getText();
22
72
 
23
- const scheme = getSchemeFromContent(text);
24
-
25
73
  try {
26
- // Parse the XanoScript file
27
- const parser = xanoscriptParser(text, scheme);
74
+ // Parse the XanoScript file using cache
75
+ const { parser, scheme } = documentCache.getOrParse(
76
+ document.uri,
77
+ document.version,
78
+ text
79
+ );
28
80
 
29
- if (parser.errors.length > 0) {
30
- // If parsing succeeds, send an empty diagnostics array (no errors)
81
+ if (parser.errors.length === 0) {
82
+ // If parsing succeeds with no errors, send an empty diagnostics array
31
83
  connection.sendDiagnostics({ uri: document.uri, diagnostics: [] });
32
84
  }
33
85
 
34
86
  for (const error of parser.errors) {
35
- console.error(
87
+ debugError(
36
88
  `onDidChangeContent(): Error parsing document: ${error.name}`
37
89
  );
38
90
  }
39
91
 
40
- // If parsing fails, create a diagnostic (error message) to display in VS Code
41
- const errors = parser.errors.map((error) => {
42
- return {
43
- severity: 1,
44
- range: error.token
45
- ? {
46
- start: document.positionAt(error.token.startOffset),
47
- end: document.positionAt(error.token.endOffset + 1),
48
- }
49
- : {
50
- start: { line: 0, character: 0 },
51
- end: { line: 0, character: 1 },
52
- },
53
- message: error.message,
54
- };
55
- });
56
-
57
- const warnings = parser.warnings.map((warning) => {
58
- return {
59
- severity: 2,
60
- range: warning.token
61
- ? {
62
- start: document.positionAt(warning.token.startOffset),
63
- end: document.positionAt(warning.token.endOffset + 1),
64
- }
65
- : {
66
- start: { line: 0, character: 0 },
67
- end: { line: 0, character: 1 },
68
- },
69
- message: warning.message,
70
- };
71
- });
72
-
73
- const informations = parser.informations.map((info) => {
74
- return {
75
- severity: 3,
76
- range: info.token
77
- ? {
78
- start: document.positionAt(info.token.startOffset),
79
- end: document.positionAt(info.token.endOffset + 1),
80
- }
81
- : {
82
- start: { line: 0, character: 0 },
83
- end: { line: 0, character: 1 },
84
- },
85
- message: info.message,
86
- };
87
- });
88
-
89
- const hints = parser.hints.map((hint) => {
90
- return {
91
- severity: 4,
92
- range: hint.token
93
- ? {
94
- start: document.positionAt(hint.token.startOffset),
95
- end: document.positionAt(hint.token.endOffset + 1),
96
- }
97
- : {
98
- start: { line: 0, character: 0 },
99
- end: { line: 0, character: 1 },
100
- },
101
- message: hint.message,
102
- };
103
- });
92
+ // Create diagnostics in a single pass
93
+ const diagnostics = createDiagnostics(parser, document);
104
94
 
105
- console.log(
95
+ debugLog(
106
96
  `onDidChangeContent(): sending diagnostic (${parser.errors.length} errors) for scheme:`,
107
97
  scheme
108
98
  );
109
99
 
110
100
  connection.sendDiagnostics({
111
101
  uri: document.uri,
112
- diagnostics: [...errors, ...warnings, ...informations, ...hints],
102
+ diagnostics,
113
103
  });
114
104
  } catch (error) {
115
105
  // If parsing fails, create a diagnostic (error message) to display in VS Code
@@ -1,6 +1,32 @@
1
- import { lexDocument } from "../lexer/lexer.js";
2
- import { xanoscriptParser } from "../parser/parser.js";
3
- import { getSchemeFromContent } from "../utils.js";
1
+ import { documentCache } from "../cache/documentCache.js";
2
+ import { debugError } from "../debug.js";
3
+
4
+ /**
5
+ * Binary search to find token index at the given offset.
6
+ * Reduces complexity from O(n) to O(log n).
7
+ * @param {Array} tokens - Array of tokens sorted by startOffset
8
+ * @param {number} offset - Cursor offset position
9
+ * @returns {number} Token index or -1 if not found
10
+ */
11
+ function findTokenAtOffset(tokens, offset) {
12
+ let left = 0;
13
+ let right = tokens.length - 1;
14
+
15
+ while (left <= right) {
16
+ const mid = Math.floor((left + right) / 2);
17
+ const token = tokens[mid];
18
+
19
+ if (token.startOffset <= offset && token.endOffset >= offset) {
20
+ return mid;
21
+ } else if (token.endOffset < offset) {
22
+ left = mid + 1;
23
+ } else {
24
+ right = mid - 1;
25
+ }
26
+ }
27
+
28
+ return -1;
29
+ }
4
30
 
5
31
  /**
6
32
  *
@@ -12,7 +38,7 @@ export function onHoverDocument(params, documents, hoverProviders = []) {
12
38
  const document = documents.get(params.textDocument.uri);
13
39
 
14
40
  if (!document) {
15
- console.error(
41
+ debugError(
16
42
  "onHover(): Document not found for URI:",
17
43
  params.textDocument.uri
18
44
  );
@@ -22,21 +48,19 @@ export function onHoverDocument(params, documents, hoverProviders = []) {
22
48
  const text = document.getText();
23
49
  const offset = document.offsetAt(params.position);
24
50
 
25
- // Tokenize the document
26
- const lexResult = lexDocument(text);
27
- if (lexResult.errors.length > 0) return null;
51
+ // Get cached parse result or parse and cache
52
+ const { lexResult, parser } = documentCache.getOrParse(
53
+ params.textDocument.uri,
54
+ document.version,
55
+ text
56
+ );
28
57
 
29
- // attempt to get the scheme from the document uri
30
- const scheme = getSchemeFromContent(text);
58
+ if (lexResult.errors.length > 0) return null;
31
59
 
32
- // Parse the XanoScript file
33
- const parser = xanoscriptParser(text, scheme);
34
60
  const tokens = lexResult.tokens;
35
61
 
36
- // Find the token under the cursor
37
- const tokenIdx = tokens.findIndex(
38
- (token) => token.startOffset <= offset && token.endOffset >= offset
39
- );
62
+ // Find the token under the cursor using binary search (O(log n))
63
+ const tokenIdx = findTokenAtOffset(tokens, offset);
40
64
 
41
65
  if (tokenIdx === -1) {
42
66
  return null;
@@ -1,3 +1,4 @@
1
+ import { debugLog } from "../debug.js";
1
2
  import { lexDocument } from "../lexer/lexer.js";
2
3
  import { mapTokenToType } from "../lexer/tokens.js";
3
4
  import { encodeTokenType } from "./tokens.js";
@@ -33,7 +34,7 @@ function higlightDefault(text, SemanticTokensBuilder) {
33
34
  0 // No modifiers for now
34
35
  );
35
36
  } else if (tokenType === undefined) {
36
- console.log(
37
+ debugLog(
37
38
  `token type not mapped to a type: ${JSON.stringify(
38
39
  token.tokenType.name
39
40
  )}`
@@ -1,3 +1,4 @@
1
+ import { debugError } from "../debug.js";
1
2
  import { getSchemeFromContent } from "../utils";
2
3
  import { higlightText } from "./highlight";
3
4
 
@@ -11,7 +12,7 @@ export function onSemanticCheck(params, documents, SemanticTokensBuilder) {
11
12
  const document = documents.get(params.textDocument.uri);
12
13
 
13
14
  if (!document) {
14
- console.error(
15
+ debugError(
15
16
  "onSemanticCheck(): Document not found for URI:",
16
17
  params.textDocument.uri
17
18
  );
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@xano/xanoscript-language-server",
3
- "version": "11.0.4",
3
+ "version": "11.0.6",
4
4
  "description": "Language Server Protocol implementation for XanoScript",
5
5
  "type": "module",
6
6
  "main": "server.js",
@@ -24,10 +24,8 @@
24
24
  "prepare": "husky"
25
25
  },
26
26
  "dependencies": {
27
- "chai": "^5.2.0",
28
27
  "chevrotain": "^11.0.3",
29
28
  "lodash-es": "^4.17.21",
30
- "mocha": "^11.1.0",
31
29
  "vscode-languageserver": "^9.0.1",
32
30
  "vscode-languageserver-textdocument": "^1.0.12"
33
31
  },
@@ -38,11 +36,13 @@
38
36
  },
39
37
  "devDependencies": {
40
38
  "@eslint/js": "^9.22.0",
39
+ "chai": "^5.2.0",
41
40
  "eslint": "^9.22.0",
42
41
  "eslint-plugin-simple-import-sort": "^12.1.1",
43
42
  "eslint-plugin-unused-imports": "^4.2.0",
44
43
  "globals": "^16.0.0",
45
44
  "husky": "^9.1.7",
46
- "lint-staged": "^16.2.4"
45
+ "lint-staged": "^16.2.4",
46
+ "mocha": "^11.1.0"
47
47
  }
48
48
  }
package/parser/parser.js CHANGED
@@ -5,15 +5,16 @@ import { XanoBaseParser } from "./base_parser.js";
5
5
 
6
6
  /**
7
7
  * Will parse the content of the file based on its scheme (db:/, task:/, api:/...)
8
- * @param {string} scheme
9
8
  * @param {string} text
9
+ * @param {string} scheme
10
+ * @param {Object} [preTokenized] - Optional pre-tokenized result from lexDocument to avoid re-lexing
10
11
  * @returns
11
12
  */
12
- export function xanoscriptParser(text, scheme) {
13
+ export function xanoscriptParser(text, scheme, preTokenized = null) {
13
14
  if (!scheme) {
14
15
  scheme = getSchemeFromContent(text);
15
16
  }
16
- const lexResult = lexDocument(text);
17
+ const lexResult = preTokenized || lexDocument(text);
17
18
  parser.input = lexResult.tokens;
18
19
  switch (scheme.toLowerCase()) {
19
20
  case "addon":
package/server.js CHANGED
@@ -5,6 +5,7 @@ import {
5
5
  TextDocuments,
6
6
  } from "vscode-languageserver/node.js";
7
7
  import { TextDocument } from "vscode-languageserver-textdocument";
8
+ import { debugLog } from "./debug.js";
8
9
  import { onCompletion } from "./onCompletion/onCompletion.js";
9
10
  import { onDidChangeContent } from "./onDidChangeContent/onDidChangeContent.js";
10
11
  import { onHover } from "./onHover/onHover.js";
@@ -50,7 +51,7 @@ documents.onDidChangeContent((params) =>
50
51
  onDidChangeContent(params, connection)
51
52
  );
52
53
  connection.onDidOpenTextDocument((params) => {
53
- console.log("Document opened:", params.textDocument.uri);
54
+ debugLog("Document opened:", params.textDocument.uri);
54
55
  // Existing handler logic
55
56
  });
56
57