@xano/xanoscript-language-server 11.8.4 → 11.9.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/settings.local.json +2 -1
- package/cache/documentCache.js +58 -10
- package/lexer/comment.js +14 -24
- package/lexer/db.js +1 -2
- package/lexer/security.js +16 -0
- package/onCompletion/onCompletion.js +61 -1
- package/onDefinition/onDefinition.js +150 -0
- package/onDefinition/onDefinition.spec.js +313 -0
- package/onDidChangeContent/onDidChangeContent.js +53 -6
- package/onHover/functions.md +28 -0
- package/package.json +1 -1
- package/parser/base_parser.js +61 -3
- package/parser/clauses/middlewareClause.js +16 -0
- package/parser/definitions/columnDefinition.js +5 -0
- package/parser/functions/api/apiCallFn.js +5 -3
- package/parser/functions/controls/functionCallFn.js +5 -3
- package/parser/functions/controls/functionRunFn.js +61 -5
- package/parser/functions/controls/taskCallFn.js +5 -3
- package/parser/functions/db/captureFieldName.js +63 -0
- package/parser/functions/db/dbAddFn.js +5 -3
- package/parser/functions/db/dbAddOrEditFn.js +13 -3
- package/parser/functions/db/dbBulkAddFn.js +5 -3
- package/parser/functions/db/dbBulkDeleteFn.js +5 -3
- package/parser/functions/db/dbBulkPatchFn.js +5 -3
- package/parser/functions/db/dbBulkUpdateFn.js +5 -3
- package/parser/functions/db/dbDelFn.js +10 -3
- package/parser/functions/db/dbEditFn.js +13 -3
- package/parser/functions/db/dbGetFn.js +10 -3
- package/parser/functions/db/dbHasFn.js +9 -3
- package/parser/functions/db/dbPatchFn.js +10 -3
- package/parser/functions/db/dbQueryFn.js +29 -3
- package/parser/functions/db/dbSchemaFn.js +5 -3
- package/parser/functions/db/dbTruncateFn.js +5 -3
- package/parser/functions/middlewareCallFn.js +3 -1
- package/parser/functions/security/register.js +19 -9
- package/parser/functions/security/securityCreateAuthTokenFn.js +22 -0
- package/parser/functions/security/securityJweDecodeLegacyFn.js +24 -0
- package/parser/functions/security/securityJweDecodeLegacyFn.spec.js +26 -0
- package/parser/functions/security/securityJweEncodeLegacyFn.js +24 -0
- package/parser/functions/security/securityJweEncodeLegacyFn.spec.js +25 -0
- package/parser/functions/securityFn.js +2 -0
- package/parser/functions/varFn.js +1 -1
- package/parser/generic/asVariable.js +2 -0
- package/parser/generic/assignableVariableAs.js +1 -0
- package/parser/generic/assignableVariableProperty.js +5 -2
- package/parser/task_parser.js +2 -1
- package/parser/tests/task/valid_sources/create_leak.xs +165 -0
- package/parser/tests/variable_test/coverage_check.xs +293 -0
- package/parser/variableScanner.js +64 -0
- package/parser/variableValidator.js +44 -0
- package/parser/variableValidator.spec.js +179 -0
- package/server.js +206 -18
- package/utils.js +32 -0
- package/utils.spec.js +93 -1
- package/workspace/crossFileValidator.js +166 -0
- package/workspace/crossFileValidator.spec.js +654 -0
- package/workspace/referenceTracking.spec.js +420 -0
- package/workspace/workspaceIndex.js +149 -0
- package/workspace/workspaceIndex.spec.js +189 -0
package/server.js
CHANGED
|
@@ -1,3 +1,7 @@
|
|
|
1
|
+
import { readdirSync, readFileSync } from "fs";
|
|
2
|
+
import { extname, join } from "path";
|
|
3
|
+
import { setImmediate } from "timers";
|
|
4
|
+
import { fileURLToPath, pathToFileURL } from "url";
|
|
1
5
|
import {
|
|
2
6
|
createConnection,
|
|
3
7
|
ProposedFeatures,
|
|
@@ -5,11 +9,30 @@ import {
|
|
|
5
9
|
TextDocuments,
|
|
6
10
|
} from "vscode-languageserver/node.js";
|
|
7
11
|
import { TextDocument } from "vscode-languageserver-textdocument";
|
|
12
|
+
import { documentCache } from "./cache/documentCache.js";
|
|
8
13
|
import { onCompletion } from "./onCompletion/onCompletion.js";
|
|
14
|
+
import { findDefinition } from "./onDefinition/onDefinition.js";
|
|
9
15
|
import { onDidChangeContent } from "./onDidChangeContent/onDidChangeContent.js";
|
|
10
16
|
import { onHover } from "./onHover/onHover.js";
|
|
11
17
|
import { onSemanticCheck } from "./onSemanticCheck/onSemanticCheck.js";
|
|
12
18
|
import { TOKEN_TYPES } from "./onSemanticCheck/tokens.js";
|
|
19
|
+
import { xanoscriptParser } from "./parser/parser.js";
|
|
20
|
+
import { getSchemeFromContent } from "./utils.js";
|
|
21
|
+
import { workspaceIndex } from "./workspace/workspaceIndex.js";
|
|
22
|
+
|
|
23
|
+
// Feature toggles for debugging memory issues.
|
|
24
|
+
// Disable one at a time to isolate the culprit.
|
|
25
|
+
const FEATURES = {
|
|
26
|
+
workspaceScan: true, // initial .xs file scan on startup
|
|
27
|
+
fileWatcher: true, // onDidChangeWatchedFiles
|
|
28
|
+
completion: true, // onCompletion (content assist)
|
|
29
|
+
semanticTokens: true, // semantic highlighting
|
|
30
|
+
hover: true, // onHover
|
|
31
|
+
definition: true, // onDefinition (go-to-definition)
|
|
32
|
+
diagnostics: true, // onDidChangeContent (parse + diagnostics)
|
|
33
|
+
crossFileValidation: true, // cross-file reference validation
|
|
34
|
+
variableValidation: true, // variable usage validation
|
|
35
|
+
};
|
|
13
36
|
|
|
14
37
|
// Create a connection to the VS Code client
|
|
15
38
|
const connection = createConnection(ProposedFeatures.all);
|
|
@@ -18,40 +41,205 @@ const connection = createConnection(ProposedFeatures.all);
|
|
|
18
41
|
export const documents = new TextDocuments(TextDocument);
|
|
19
42
|
|
|
20
43
|
// Initialize the server
|
|
21
|
-
connection.onInitialize(() => {
|
|
44
|
+
connection.onInitialize((params) => {
|
|
22
45
|
connection.console.log("XanoScript Language Server initialized");
|
|
23
46
|
|
|
47
|
+
// Scan workspace for .xs files to populate the index
|
|
48
|
+
if (FEATURES.workspaceScan) {
|
|
49
|
+
if (params.workspaceFolders) {
|
|
50
|
+
for (const folder of params.workspaceFolders) {
|
|
51
|
+
scanWorkspaceFolder(folder.uri);
|
|
52
|
+
}
|
|
53
|
+
} else if (params.rootUri) {
|
|
54
|
+
scanWorkspaceFolder(params.rootUri);
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
|
|
24
58
|
return {
|
|
25
59
|
capabilities: {
|
|
26
60
|
completionProvider: {
|
|
27
|
-
resolveProvider: false,
|
|
28
|
-
triggerCharacters: [".", ":", "$", "|"],
|
|
61
|
+
resolveProvider: false,
|
|
62
|
+
triggerCharacters: [".", ":", "$", "|"],
|
|
29
63
|
},
|
|
30
|
-
textDocumentSync: 1,
|
|
64
|
+
textDocumentSync: 1,
|
|
31
65
|
semanticTokensProvider: {
|
|
32
66
|
documentSelector: [{ language: "xanoscript" }],
|
|
33
67
|
legend: {
|
|
34
68
|
tokenTypes: TOKEN_TYPES,
|
|
35
|
-
tokenModifiers: [],
|
|
69
|
+
tokenModifiers: [],
|
|
70
|
+
},
|
|
71
|
+
full: true,
|
|
72
|
+
},
|
|
73
|
+
hoverProvider: true,
|
|
74
|
+
definitionProvider: true,
|
|
75
|
+
workspace: {
|
|
76
|
+
workspaceFolders: {
|
|
77
|
+
supported: true,
|
|
36
78
|
},
|
|
37
|
-
full: true, // Support full document highlighting
|
|
38
79
|
},
|
|
39
|
-
hoverProvider: true, // Enable hover support
|
|
40
80
|
},
|
|
41
81
|
};
|
|
42
82
|
});
|
|
43
83
|
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
84
|
+
/**
|
|
85
|
+
* Recursively scan a folder for .xs files and add them to the workspace index.
|
|
86
|
+
* Uses lightweight regex-only indexing (no parsing) and yields to the event loop
|
|
87
|
+
* in batches to avoid blocking and allow GC to run.
|
|
88
|
+
* @param {string} folderUri - file:// URI of the folder
|
|
89
|
+
*/
|
|
90
|
+
function scanWorkspaceFolder(folderUri) {
|
|
91
|
+
try {
|
|
92
|
+
const folderPath = fileURLToPath(folderUri);
|
|
93
|
+
const filePaths = collectXsFiles(folderPath);
|
|
94
|
+
indexFilesInBatches(filePaths);
|
|
95
|
+
} catch (err) {
|
|
96
|
+
connection.console.error(`Error scanning workspace: ${err.message}`);
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
/**
|
|
101
|
+
* Collect all .xs file paths recursively (fast, no parsing).
|
|
102
|
+
*/
|
|
103
|
+
function collectXsFiles(dirPath) {
|
|
104
|
+
const result = [];
|
|
105
|
+
const stack = [dirPath];
|
|
106
|
+
while (stack.length > 0) {
|
|
107
|
+
const dir = stack.pop();
|
|
108
|
+
let entries;
|
|
109
|
+
try {
|
|
110
|
+
entries = readdirSync(dir, { withFileTypes: true });
|
|
111
|
+
} catch {
|
|
112
|
+
continue;
|
|
113
|
+
}
|
|
114
|
+
for (const entry of entries) {
|
|
115
|
+
const fullPath = join(dir, entry.name);
|
|
116
|
+
if (
|
|
117
|
+
entry.isDirectory() &&
|
|
118
|
+
!entry.name.startsWith(".") &&
|
|
119
|
+
entry.name !== "node_modules"
|
|
120
|
+
) {
|
|
121
|
+
stack.push(fullPath);
|
|
122
|
+
} else if (entry.isFile() && extname(entry.name) === ".xs") {
|
|
123
|
+
result.push(fullPath);
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
return result;
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
/** Number of files to index per event-loop tick */
|
|
131
|
+
const SCAN_BATCH_SIZE = 50;
|
|
132
|
+
|
|
133
|
+
/**
|
|
134
|
+
* Index files in batches, yielding to the event loop between batches
|
|
135
|
+
* so GC can run and the server stays responsive.
|
|
136
|
+
*/
|
|
137
|
+
function indexFilesInBatches(filePaths) {
|
|
138
|
+
let offset = 0;
|
|
139
|
+
function processBatch() {
|
|
140
|
+
const end = Math.min(offset + SCAN_BATCH_SIZE, filePaths.length);
|
|
141
|
+
for (let i = offset; i < end; i++) {
|
|
142
|
+
try {
|
|
143
|
+
const content = readFileSync(filePaths[i], "utf-8");
|
|
144
|
+
const uri = pathToFileURL(filePaths[i]).toString();
|
|
145
|
+
workspaceIndex.addFile(uri, content);
|
|
146
|
+
} catch {
|
|
147
|
+
// Skip unreadable files
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
offset = end;
|
|
151
|
+
if (offset < filePaths.length) {
|
|
152
|
+
setImmediate(processBatch);
|
|
153
|
+
}
|
|
154
|
+
}
|
|
155
|
+
if (filePaths.length > 0) {
|
|
156
|
+
processBatch();
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
connection.onDidChangeWatchedFiles((params) => {
|
|
161
|
+
if (!FEATURES.fileWatcher) return;
|
|
162
|
+
connection.console.log(`[FILEWATCHER] ${params.changes.length} changes`);
|
|
163
|
+
for (const change of params.changes) {
|
|
164
|
+
if (!change.uri.endsWith(".xs")) continue;
|
|
165
|
+
switch (change.type) {
|
|
166
|
+
case 3: // Deleted
|
|
167
|
+
workspaceIndex.removeFile(change.uri);
|
|
168
|
+
break;
|
|
169
|
+
case 1: // Created
|
|
170
|
+
case 2: // Changed (from external editor)
|
|
171
|
+
try {
|
|
172
|
+
const filePath = fileURLToPath(change.uri);
|
|
173
|
+
const content = readFileSync(filePath, "utf-8");
|
|
174
|
+
const scheme = getSchemeFromContent(content);
|
|
175
|
+
const parser = xanoscriptParser(content, scheme);
|
|
176
|
+
workspaceIndex.addParsed(change.uri, content, parser.__symbolTable);
|
|
177
|
+
} catch {
|
|
178
|
+
// Skip unreadable
|
|
179
|
+
}
|
|
180
|
+
break;
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
});
|
|
184
|
+
|
|
185
|
+
connection.onHover((params) => {
|
|
186
|
+
if (!FEATURES.hover) return null;
|
|
187
|
+
connection.console.log(
|
|
188
|
+
`[HOVER] ${params.textDocument.uri} pos=${JSON.stringify(params.position)}`,
|
|
189
|
+
);
|
|
190
|
+
return onHover(params, documents);
|
|
191
|
+
});
|
|
192
|
+
connection.onCompletion((params) => {
|
|
193
|
+
if (!FEATURES.completion) return null;
|
|
194
|
+
connection.console.log(
|
|
195
|
+
`[COMPLETION] ${params.textDocument.uri} pos=${JSON.stringify(params.position)}`,
|
|
196
|
+
);
|
|
197
|
+
return onCompletion(params, documents);
|
|
198
|
+
});
|
|
199
|
+
connection.onDefinition((params) => {
|
|
200
|
+
if (!FEATURES.definition) return null;
|
|
201
|
+
connection.console.log(`[DEFINITION] ${params.textDocument.uri}`);
|
|
202
|
+
const document = documents.get(params.textDocument.uri);
|
|
203
|
+
if (!document) return null;
|
|
204
|
+
|
|
205
|
+
const text = document.getText();
|
|
206
|
+
const offset = document.offsetAt(params.position);
|
|
207
|
+
const cached = documentCache.getOrParse(document.uri, document.version, text);
|
|
208
|
+
const symbolTable = cached.parser?.__symbolTable;
|
|
209
|
+
const result = findDefinition(text, offset, workspaceIndex, symbolTable);
|
|
210
|
+
|
|
211
|
+
if (!result) return null;
|
|
212
|
+
|
|
213
|
+
// Same-file variable definition (has offset)
|
|
214
|
+
if (result.offset != null) {
|
|
215
|
+
const pos = document.positionAt(result.offset);
|
|
216
|
+
return {
|
|
217
|
+
uri: document.uri,
|
|
218
|
+
range: { start: pos, end: pos },
|
|
219
|
+
};
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
// Cross-file definition (has uri)
|
|
223
|
+
return {
|
|
224
|
+
uri: result.uri,
|
|
225
|
+
range: { start: { line: 0, character: 0 }, end: { line: 0, character: 0 } },
|
|
226
|
+
};
|
|
227
|
+
});
|
|
228
|
+
connection.onRequest("textDocument/semanticTokens/full", (params) => {
|
|
229
|
+
if (!FEATURES.semanticTokens) return null;
|
|
230
|
+
connection.console.log(`[SEMANTIC] ${params.textDocument.uri}`);
|
|
231
|
+
return onSemanticCheck(params, documents, SemanticTokensBuilder);
|
|
232
|
+
});
|
|
233
|
+
documents.onDidChangeContent((params) => {
|
|
234
|
+
if (!FEATURES.diagnostics) return;
|
|
235
|
+
connection.console.log(
|
|
236
|
+
`[DIAGNOSTICS] ${params.document.uri} v${params.document.version}`,
|
|
237
|
+
);
|
|
238
|
+
onDidChangeContent(params, connection, FEATURES);
|
|
239
|
+
});
|
|
240
|
+
documents.onDidClose((params) => {
|
|
241
|
+
const uri = params.document.uri;
|
|
242
|
+
documentCache.invalidate(uri);
|
|
55
243
|
});
|
|
56
244
|
|
|
57
245
|
// Bind the document manager to the connection and start listening
|
package/utils.js
CHANGED
|
@@ -54,3 +54,35 @@ export function getSchemeFromContent(source) {
|
|
|
54
54
|
const firstWord = source.match(firstWordRegex)?.[1];
|
|
55
55
|
return schemeByFirstWord[firstWord] || "cfn";
|
|
56
56
|
}
|
|
57
|
+
|
|
58
|
+
// Reverse map: scheme codes back to user-facing object type keywords
|
|
59
|
+
const typeByScheme = {
|
|
60
|
+
cfn: "function",
|
|
61
|
+
api: "query",
|
|
62
|
+
db: "table",
|
|
63
|
+
// All other schemes map 1:1 (addon, agent, task, etc.)
|
|
64
|
+
};
|
|
65
|
+
|
|
66
|
+
// Extends firstWordRegex to also capture the name (quoted or bare identifier)
|
|
67
|
+
const objectInfoRegex = /^(?:\s|\/\/[^\n]*\n)*(\w+)\s+(?:"([^"]+)"|(\w+))/;
|
|
68
|
+
|
|
69
|
+
/**
|
|
70
|
+
* Extract the object type and name from XanoScript file content.
|
|
71
|
+
* Reuses schemeByFirstWord for type detection, extends the regex to capture the name.
|
|
72
|
+
* @param {string} source - File content
|
|
73
|
+
* @returns {{ type: string, name: string } | null}
|
|
74
|
+
*/
|
|
75
|
+
export function getObjectInfoFromContent(source) {
|
|
76
|
+
const match = source.match(objectInfoRegex);
|
|
77
|
+
if (!match) return null;
|
|
78
|
+
|
|
79
|
+
const firstWord = match[1];
|
|
80
|
+
const scheme = schemeByFirstWord[firstWord];
|
|
81
|
+
if (!scheme) return null;
|
|
82
|
+
|
|
83
|
+
const name = match[2] ?? match[3];
|
|
84
|
+
if (!name) return null;
|
|
85
|
+
|
|
86
|
+
const type = typeByScheme[scheme] ?? scheme;
|
|
87
|
+
return { type, name };
|
|
88
|
+
}
|
package/utils.spec.js
CHANGED
|
@@ -1,6 +1,10 @@
|
|
|
1
1
|
import { expect } from "chai";
|
|
2
2
|
import { describe, it } from "mocha";
|
|
3
|
-
import {
|
|
3
|
+
import {
|
|
4
|
+
getObjectInfoFromContent,
|
|
5
|
+
getSchemeFromContent,
|
|
6
|
+
Iterator,
|
|
7
|
+
} from "./utils.js";
|
|
4
8
|
|
|
5
9
|
describe("getSchemeFromContent", () => {
|
|
6
10
|
describe("basic keyword detection", () => {
|
|
@@ -242,6 +246,94 @@ table products {
|
|
|
242
246
|
});
|
|
243
247
|
});
|
|
244
248
|
|
|
249
|
+
describe("getObjectInfoFromContent", () => {
|
|
250
|
+
it("should extract function with identifier name", () => {
|
|
251
|
+
const result = getObjectInfoFromContent(
|
|
252
|
+
"function my_func {\n stack {\n }\n}"
|
|
253
|
+
);
|
|
254
|
+
expect(result).to.deep.equal({ type: "function", name: "my_func" });
|
|
255
|
+
});
|
|
256
|
+
|
|
257
|
+
it("should extract function with string literal name", () => {
|
|
258
|
+
const result = getObjectInfoFromContent('function "my func" {\n}');
|
|
259
|
+
expect(result).to.deep.equal({ type: "function", name: "my func" });
|
|
260
|
+
});
|
|
261
|
+
|
|
262
|
+
it("should extract table", () => {
|
|
263
|
+
const result = getObjectInfoFromContent(
|
|
264
|
+
"table users {\n schema {\n }\n}"
|
|
265
|
+
);
|
|
266
|
+
expect(result).to.deep.equal({ type: "table", name: "users" });
|
|
267
|
+
});
|
|
268
|
+
|
|
269
|
+
it("should extract query with verb", () => {
|
|
270
|
+
const result = getObjectInfoFromContent('query "/users" GET {\n}');
|
|
271
|
+
expect(result).to.deep.equal({ type: "query", name: "/users" });
|
|
272
|
+
});
|
|
273
|
+
|
|
274
|
+
it("should extract task", () => {
|
|
275
|
+
const result = getObjectInfoFromContent('task "daily_cleanup" {\n}');
|
|
276
|
+
expect(result).to.deep.equal({ type: "task", name: "daily_cleanup" });
|
|
277
|
+
});
|
|
278
|
+
|
|
279
|
+
it("should extract api_group", () => {
|
|
280
|
+
const result = getObjectInfoFromContent('api_group "users" {\n}');
|
|
281
|
+
expect(result).to.deep.equal({ type: "api_group", name: "users" });
|
|
282
|
+
});
|
|
283
|
+
|
|
284
|
+
it("should handle leading comments", () => {
|
|
285
|
+
const result = getObjectInfoFromContent(
|
|
286
|
+
"// my function\nfunction my_func {\n}"
|
|
287
|
+
);
|
|
288
|
+
expect(result).to.deep.equal({ type: "function", name: "my_func" });
|
|
289
|
+
});
|
|
290
|
+
|
|
291
|
+
it("should handle leading whitespace and multiple comments", () => {
|
|
292
|
+
const result = getObjectInfoFromContent(
|
|
293
|
+
"\n// comment 1\n// comment 2\ntable users {\n}"
|
|
294
|
+
);
|
|
295
|
+
expect(result).to.deep.equal({ type: "table", name: "users" });
|
|
296
|
+
});
|
|
297
|
+
|
|
298
|
+
it("should return null for empty content", () => {
|
|
299
|
+
expect(getObjectInfoFromContent("")).to.be.null;
|
|
300
|
+
});
|
|
301
|
+
|
|
302
|
+
it("should return null for content with no name token", () => {
|
|
303
|
+
expect(getObjectInfoFromContent("function")).to.be.null;
|
|
304
|
+
});
|
|
305
|
+
|
|
306
|
+
it("should extract workflow_test", () => {
|
|
307
|
+
const result = getObjectInfoFromContent(
|
|
308
|
+
'workflow_test "login flow" {\n}'
|
|
309
|
+
);
|
|
310
|
+
expect(result).to.deep.equal({
|
|
311
|
+
type: "workflow_test",
|
|
312
|
+
name: "login flow",
|
|
313
|
+
});
|
|
314
|
+
});
|
|
315
|
+
|
|
316
|
+
it("should extract table_trigger", () => {
|
|
317
|
+
const result = getObjectInfoFromContent(
|
|
318
|
+
'table_trigger "on_user_create" {\n}'
|
|
319
|
+
);
|
|
320
|
+
expect(result).to.deep.equal({
|
|
321
|
+
type: "table_trigger",
|
|
322
|
+
name: "on_user_create",
|
|
323
|
+
});
|
|
324
|
+
});
|
|
325
|
+
|
|
326
|
+
it("should extract middleware", () => {
|
|
327
|
+
const result = getObjectInfoFromContent('middleware "auth_check" {\n}');
|
|
328
|
+
expect(result).to.deep.equal({ type: "middleware", name: "auth_check" });
|
|
329
|
+
});
|
|
330
|
+
|
|
331
|
+
it("should extract addon", () => {
|
|
332
|
+
const result = getObjectInfoFromContent('addon "my_addon" {\n}');
|
|
333
|
+
expect(result).to.deep.equal({ type: "addon", name: "my_addon" });
|
|
334
|
+
});
|
|
335
|
+
});
|
|
336
|
+
|
|
245
337
|
describe("Iterator", () => {
|
|
246
338
|
describe("basic functionality", () => {
|
|
247
339
|
it("should iterate through items", () => {
|
|
@@ -0,0 +1,166 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Levenshtein distance between two strings.
|
|
3
|
+
*/
|
|
4
|
+
function levenshtein(a, b) {
|
|
5
|
+
const m = a.length;
|
|
6
|
+
const n = b.length;
|
|
7
|
+
const dp = Array.from({ length: m + 1 }, () => new Array(n + 1));
|
|
8
|
+
for (let i = 0; i <= m; i++) dp[i][0] = i;
|
|
9
|
+
for (let j = 0; j <= n; j++) dp[0][j] = j;
|
|
10
|
+
for (let i = 1; i <= m; i++) {
|
|
11
|
+
for (let j = 1; j <= n; j++) {
|
|
12
|
+
dp[i][j] =
|
|
13
|
+
a[i - 1] === b[j - 1]
|
|
14
|
+
? dp[i - 1][j - 1]
|
|
15
|
+
: 1 + Math.min(dp[i - 1][j], dp[i][j - 1], dp[i - 1][j - 1]);
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
return dp[m][n];
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
/**
|
|
22
|
+
* Find the closest match from candidates using prefix check + edit distance.
|
|
23
|
+
*/
|
|
24
|
+
function findClosestMatch(input, candidates) {
|
|
25
|
+
const inputLower = input.toLowerCase();
|
|
26
|
+
|
|
27
|
+
// Prefix match: if input is a prefix of exactly one candidate, suggest it
|
|
28
|
+
const prefixMatches = candidates.filter((c) =>
|
|
29
|
+
c.toLowerCase().startsWith(inputLower)
|
|
30
|
+
);
|
|
31
|
+
if (prefixMatches.length === 1) return prefixMatches[0];
|
|
32
|
+
|
|
33
|
+
// Levenshtein fallback
|
|
34
|
+
const maxDistance = Math.max(3, Math.floor(input.length / 2));
|
|
35
|
+
let best = null;
|
|
36
|
+
let bestDist = Infinity;
|
|
37
|
+
for (const candidate of candidates) {
|
|
38
|
+
const dist = levenshtein(inputLower, candidate.toLowerCase());
|
|
39
|
+
if (dist < bestDist) {
|
|
40
|
+
bestDist = dist;
|
|
41
|
+
best = candidate;
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
return bestDist <= maxDistance ? best : null;
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
/**
|
|
48
|
+
* Validate cross-file references against the workspace index.
|
|
49
|
+
* @param {Array<{refType: string, name: string, startOffset: number, endOffset: number}>} references
|
|
50
|
+
* @param {import('./workspaceIndex.js').WorkspaceIndex} index
|
|
51
|
+
* @returns {Array<{message: string, startOffset: number, endOffset: number}>} warnings
|
|
52
|
+
*/
|
|
53
|
+
export function crossFileValidate(references, index) {
|
|
54
|
+
if (!references || references.length === 0) return [];
|
|
55
|
+
|
|
56
|
+
const warnings = [];
|
|
57
|
+
for (const ref of references) {
|
|
58
|
+
if (!ref.name) {
|
|
59
|
+
warnings.push({
|
|
60
|
+
message: `Empty ${ref.refType} reference`,
|
|
61
|
+
startOffset: ref.startOffset,
|
|
62
|
+
endOffset: ref.endOffset,
|
|
63
|
+
});
|
|
64
|
+
continue;
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
if (!index.has(ref.refType, ref.name)) {
|
|
68
|
+
let message = `Unknown ${ref.refType} "${ref.name}"`;
|
|
69
|
+
const allNames = index.getAllNames(ref.refType);
|
|
70
|
+
const suggestion = findClosestMatch(ref.name, allNames);
|
|
71
|
+
if (suggestion) {
|
|
72
|
+
message += `. Did you mean "${suggestion}"?`;
|
|
73
|
+
}
|
|
74
|
+
warnings.push({
|
|
75
|
+
message,
|
|
76
|
+
startOffset: ref.startOffset,
|
|
77
|
+
endOffset: ref.endOffset,
|
|
78
|
+
});
|
|
79
|
+
continue;
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
// Validate field_name against table columns
|
|
83
|
+
if (ref.refType === "table" && ref.fieldName) {
|
|
84
|
+
const entry = index.get(ref.refType, ref.name);
|
|
85
|
+
const columns = entry?.inputs;
|
|
86
|
+
if (columns && Object.keys(columns).length > 0) {
|
|
87
|
+
const columnNames = Object.keys(columns);
|
|
88
|
+
if (!columnNames.includes(ref.fieldName)) {
|
|
89
|
+
let message = `Unknown column "${ref.fieldName}" in table "${ref.name}"`;
|
|
90
|
+
const suggestion = findClosestMatch(ref.fieldName, columnNames);
|
|
91
|
+
if (suggestion) {
|
|
92
|
+
message += `. Did you mean "${suggestion}"?`;
|
|
93
|
+
}
|
|
94
|
+
warnings.push({
|
|
95
|
+
message,
|
|
96
|
+
startOffset: ref.fieldNameStartOffset ?? ref.startOffset,
|
|
97
|
+
endOffset: ref.fieldNameEndOffset ?? ref.endOffset,
|
|
98
|
+
});
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
// Validate data keys against table columns
|
|
104
|
+
if (ref.refType === "table" && ref.dataKeys) {
|
|
105
|
+
const entry = index.get(ref.refType, ref.name);
|
|
106
|
+
const columns = entry?.inputs;
|
|
107
|
+
if (columns && Object.keys(columns).length > 0) {
|
|
108
|
+
const columnNames = Object.keys(columns);
|
|
109
|
+
for (const dk of ref.dataKeys) {
|
|
110
|
+
if (!columnNames.includes(dk.name)) {
|
|
111
|
+
let message = `Unknown column "${dk.name}" in table "${ref.name}"`;
|
|
112
|
+
const suggestion = findClosestMatch(dk.name, columnNames);
|
|
113
|
+
if (suggestion) {
|
|
114
|
+
message += `. Did you mean "${suggestion}"?`;
|
|
115
|
+
}
|
|
116
|
+
warnings.push({
|
|
117
|
+
message,
|
|
118
|
+
startOffset: dk.startOffset ?? ref.startOffset,
|
|
119
|
+
endOffset: dk.endOffset ?? ref.endOffset,
|
|
120
|
+
});
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
// Validate input keys if args are present
|
|
127
|
+
if (ref.args) {
|
|
128
|
+
const entry = index.get(ref.refType, ref.name);
|
|
129
|
+
const declaredInputs = entry?.inputs;
|
|
130
|
+
if (!declaredInputs || Object.keys(declaredInputs).length === 0) continue;
|
|
131
|
+
|
|
132
|
+
const declaredKeys = Object.keys(declaredInputs);
|
|
133
|
+
for (const [argName, argInfo] of Object.entries(ref.args)) {
|
|
134
|
+
if (!(argName in declaredInputs)) {
|
|
135
|
+
let message = `Unknown input "${argName}" for ${ref.refType} "${ref.name}"`;
|
|
136
|
+
const suggestion = findClosestMatch(argName, declaredKeys);
|
|
137
|
+
if (suggestion) {
|
|
138
|
+
message += `. Did you mean "${suggestion}"?`;
|
|
139
|
+
}
|
|
140
|
+
warnings.push({
|
|
141
|
+
message,
|
|
142
|
+
startOffset: argInfo?.startOffset ?? ref.startOffset,
|
|
143
|
+
endOffset: argInfo?.endOffset ?? ref.endOffset,
|
|
144
|
+
});
|
|
145
|
+
} else if (argInfo?.type) {
|
|
146
|
+
// Type check: compare literal type against declared type
|
|
147
|
+
const declared = declaredInputs[argName];
|
|
148
|
+
const isEnumWithText =
|
|
149
|
+
declared?.type === "enum" && argInfo.type === "text";
|
|
150
|
+
if (
|
|
151
|
+
declared?.type &&
|
|
152
|
+
declared.type !== argInfo.type &&
|
|
153
|
+
!isEnumWithText
|
|
154
|
+
) {
|
|
155
|
+
warnings.push({
|
|
156
|
+
message: `Type mismatch for input "${argName}": expected ${declared.type}, got ${argInfo.type}`,
|
|
157
|
+
startOffset: argInfo?.startOffset ?? ref.startOffset,
|
|
158
|
+
endOffset: argInfo?.endOffset ?? ref.endOffset,
|
|
159
|
+
});
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
return warnings;
|
|
166
|
+
}
|