@jsleekr/graft 5.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +235 -0
- package/dist/analyzer/estimator.d.ts +33 -0
- package/dist/analyzer/estimator.js +273 -0
- package/dist/analyzer/graph-checker.d.ts +13 -0
- package/dist/analyzer/graph-checker.js +153 -0
- package/dist/analyzer/scope.d.ts +21 -0
- package/dist/analyzer/scope.js +324 -0
- package/dist/analyzer/types.d.ts +17 -0
- package/dist/analyzer/types.js +323 -0
- package/dist/codegen/agents.d.ts +2 -0
- package/dist/codegen/agents.js +109 -0
- package/dist/codegen/backend.d.ts +16 -0
- package/dist/codegen/backend.js +1 -0
- package/dist/codegen/claude-backend.d.ts +9 -0
- package/dist/codegen/claude-backend.js +47 -0
- package/dist/codegen/codegen.d.ts +10 -0
- package/dist/codegen/codegen.js +57 -0
- package/dist/codegen/hooks.d.ts +2 -0
- package/dist/codegen/hooks.js +165 -0
- package/dist/codegen/orchestration.d.ts +3 -0
- package/dist/codegen/orchestration.js +250 -0
- package/dist/codegen/settings.d.ts +36 -0
- package/dist/codegen/settings.js +87 -0
- package/dist/compiler.d.ts +21 -0
- package/dist/compiler.js +101 -0
- package/dist/constants.d.ts +9 -0
- package/dist/constants.js +13 -0
- package/dist/errors/diagnostics.d.ts +21 -0
- package/dist/errors/diagnostics.js +25 -0
- package/dist/format.d.ts +12 -0
- package/dist/format.js +46 -0
- package/dist/index.d.ts +2 -0
- package/dist/index.js +181 -0
- package/dist/lexer/lexer.d.ts +23 -0
- package/dist/lexer/lexer.js +268 -0
- package/dist/lexer/tokens.d.ts +96 -0
- package/dist/lexer/tokens.js +150 -0
- package/dist/lsp/features/code-actions.d.ts +7 -0
- package/dist/lsp/features/code-actions.js +58 -0
- package/dist/lsp/features/completions.d.ts +7 -0
- package/dist/lsp/features/completions.js +271 -0
- package/dist/lsp/features/definition.d.ts +3 -0
- package/dist/lsp/features/definition.js +32 -0
- package/dist/lsp/features/diagnostics.d.ts +4 -0
- package/dist/lsp/features/diagnostics.js +33 -0
- package/dist/lsp/features/hover.d.ts +7 -0
- package/dist/lsp/features/hover.js +88 -0
- package/dist/lsp/features/index.d.ts +9 -0
- package/dist/lsp/features/index.js +9 -0
- package/dist/lsp/features/references.d.ts +7 -0
- package/dist/lsp/features/references.js +53 -0
- package/dist/lsp/features/rename.d.ts +17 -0
- package/dist/lsp/features/rename.js +198 -0
- package/dist/lsp/features/symbols.d.ts +7 -0
- package/dist/lsp/features/symbols.js +74 -0
- package/dist/lsp/features/utils.d.ts +3 -0
- package/dist/lsp/features/utils.js +65 -0
- package/dist/lsp/features.d.ts +20 -0
- package/dist/lsp/features.js +513 -0
- package/dist/lsp/server.d.ts +2 -0
- package/dist/lsp/server.js +327 -0
- package/dist/parser/ast.d.ts +244 -0
- package/dist/parser/ast.js +10 -0
- package/dist/parser/parser.d.ts +95 -0
- package/dist/parser/parser.js +1175 -0
- package/dist/program-index.d.ts +21 -0
- package/dist/program-index.js +74 -0
- package/dist/resolver/resolver.d.ts +9 -0
- package/dist/resolver/resolver.js +136 -0
- package/dist/runner.d.ts +13 -0
- package/dist/runner.js +41 -0
- package/dist/runtime/executor.d.ts +56 -0
- package/dist/runtime/executor.js +285 -0
- package/dist/runtime/expr-eval.d.ts +3 -0
- package/dist/runtime/expr-eval.js +138 -0
- package/dist/runtime/flow-runner.d.ts +21 -0
- package/dist/runtime/flow-runner.js +230 -0
- package/dist/runtime/memory.d.ts +5 -0
- package/dist/runtime/memory.js +41 -0
- package/dist/runtime/prompt-builder.d.ts +12 -0
- package/dist/runtime/prompt-builder.js +66 -0
- package/dist/runtime/subprocess.d.ts +20 -0
- package/dist/runtime/subprocess.js +99 -0
- package/dist/runtime/token-tracker.d.ts +36 -0
- package/dist/runtime/token-tracker.js +56 -0
- package/dist/runtime/transforms.d.ts +2 -0
- package/dist/runtime/transforms.js +104 -0
- package/dist/types.d.ts +10 -0
- package/dist/types.js +1 -0
- package/dist/utils.d.ts +3 -0
- package/dist/utils.js +35 -0
- package/dist/version.d.ts +1 -0
- package/dist/version.js +11 -0
- package/package.json +70 -0
|
@@ -0,0 +1,327 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { createConnection, TextDocuments, ProposedFeatures, TextDocumentSyncKind, CodeActionKind, } from 'vscode-languageserver/node';
|
|
3
|
+
import { TextDocument } from 'vscode-languageserver-textdocument';
|
|
4
|
+
import { fileURLToPath, pathToFileURL } from 'node:url';
|
|
5
|
+
import * as fs from 'node:fs';
|
|
6
|
+
import * as path from 'node:path';
|
|
7
|
+
import { compileToProgram } from '../compiler.js';
|
|
8
|
+
import { Lexer } from '../lexer/lexer.js';
|
|
9
|
+
import { Parser } from '../parser/parser.js';
|
|
10
|
+
import { toDiagnostics, getHoverInfo, getDefinitionLocation, getWordAtPosition, getCompletions, buildAutoImportActions, getDocumentSymbols, isRenameable, buildRenameEdits, isReferable, findReferences } from './features/index.js';
|
|
11
|
+
const connection = createConnection(ProposedFeatures.all);
|
|
12
|
+
const documents = new TextDocuments(TextDocument);
|
|
13
|
+
const MAX_CACHE_SIZE = 50;
|
|
14
|
+
const cache = new Map();
|
|
15
|
+
function evictIfNeeded() {
|
|
16
|
+
if (cache.size <= MAX_CACHE_SIZE)
|
|
17
|
+
return;
|
|
18
|
+
let oldestKey = '';
|
|
19
|
+
let oldestTime = Infinity;
|
|
20
|
+
for (const [key, val] of cache) {
|
|
21
|
+
if (val.lastAccess < oldestTime) {
|
|
22
|
+
oldestTime = val.lastAccess;
|
|
23
|
+
oldestKey = key;
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
if (oldestKey)
|
|
27
|
+
cache.delete(oldestKey);
|
|
28
|
+
}
|
|
29
|
+
function touchCache(uri) {
|
|
30
|
+
const entry = cache.get(uri);
|
|
31
|
+
if (entry) {
|
|
32
|
+
entry.lastAccess = Date.now();
|
|
33
|
+
return entry;
|
|
34
|
+
}
|
|
35
|
+
return undefined;
|
|
36
|
+
}
|
|
37
|
+
const debounceTimers = new Map();
|
|
38
|
+
// Track import dependencies: importedFile → Set of URIs that import it
|
|
39
|
+
const importDeps = new Map();
|
|
40
|
+
// Workspace export cache for auto-import code actions
|
|
41
|
+
const workspaceExports = new Map();
|
|
42
|
+
let workspaceRoot = null;
|
|
43
|
+
let workspaceScanDone = false;
|
|
44
|
+
connection.onInitialize((params) => {
|
|
45
|
+
const folders = params.workspaceFolders;
|
|
46
|
+
if (folders && folders.length > 0) {
|
|
47
|
+
workspaceRoot = fileURLToPath(folders[0].uri);
|
|
48
|
+
}
|
|
49
|
+
else if (params.rootUri) {
|
|
50
|
+
workspaceRoot = fileURLToPath(params.rootUri);
|
|
51
|
+
}
|
|
52
|
+
return {
|
|
53
|
+
capabilities: {
|
|
54
|
+
textDocumentSync: TextDocumentSyncKind.Full,
|
|
55
|
+
hoverProvider: true,
|
|
56
|
+
definitionProvider: true,
|
|
57
|
+
completionProvider: {
|
|
58
|
+
triggerCharacters: ['.', '[', '{'],
|
|
59
|
+
},
|
|
60
|
+
documentSymbolProvider: true,
|
|
61
|
+
codeActionProvider: {
|
|
62
|
+
codeActionKinds: [CodeActionKind.QuickFix],
|
|
63
|
+
},
|
|
64
|
+
referencesProvider: true,
|
|
65
|
+
renameProvider: {
|
|
66
|
+
prepareProvider: true,
|
|
67
|
+
},
|
|
68
|
+
},
|
|
69
|
+
};
|
|
70
|
+
});
|
|
71
|
+
function validateDocument(doc) {
|
|
72
|
+
const uri = doc.uri;
|
|
73
|
+
const filePath = fileURLToPath(uri);
|
|
74
|
+
try {
|
|
75
|
+
const result = compileToProgram(doc.getText(), filePath);
|
|
76
|
+
const diagnostics = toDiagnostics(result.errors, result.warnings);
|
|
77
|
+
connection.sendDiagnostics({ uri, diagnostics });
|
|
78
|
+
if (result.program && result.index) {
|
|
79
|
+
cache.set(uri, { program: result.program, index: result.index, lastAccess: Date.now() });
|
|
80
|
+
evictIfNeeded();
|
|
81
|
+
// Track import dependencies
|
|
82
|
+
for (const imp of result.program.imports) {
|
|
83
|
+
if (imp.resolvedPath) {
|
|
84
|
+
const depUri = pathToFileURL(imp.resolvedPath).toString();
|
|
85
|
+
let dependents = importDeps.get(depUri);
|
|
86
|
+
if (!dependents) {
|
|
87
|
+
dependents = new Set();
|
|
88
|
+
importDeps.set(depUri, dependents);
|
|
89
|
+
}
|
|
90
|
+
dependents.add(uri);
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
catch {
|
|
96
|
+
connection.sendDiagnostics({ uri, diagnostics: [] });
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
documents.onDidChangeContent((change) => {
|
|
100
|
+
const uri = change.document.uri;
|
|
101
|
+
const existing = debounceTimers.get(uri);
|
|
102
|
+
if (existing)
|
|
103
|
+
clearTimeout(existing);
|
|
104
|
+
debounceTimers.set(uri, setTimeout(() => {
|
|
105
|
+
debounceTimers.delete(uri);
|
|
106
|
+
const doc = documents.get(uri);
|
|
107
|
+
if (doc) {
|
|
108
|
+
validateDocument(doc);
|
|
109
|
+
// Update workspace export cache for changed file
|
|
110
|
+
if (uri.startsWith('file:')) {
|
|
111
|
+
const changedPath = fileURLToPath(uri);
|
|
112
|
+
if (changedPath.endsWith('.gft'))
|
|
113
|
+
parseAndCacheExports(changedPath);
|
|
114
|
+
}
|
|
115
|
+
// Invalidate dependents of this file
|
|
116
|
+
const dependents = importDeps.get(uri);
|
|
117
|
+
if (dependents) {
|
|
118
|
+
for (const depUri of dependents) {
|
|
119
|
+
const depDoc = documents.get(depUri);
|
|
120
|
+
if (depDoc)
|
|
121
|
+
validateDocument(depDoc);
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
}, 200));
|
|
126
|
+
});
|
|
127
|
+
documents.onDidClose((e) => {
|
|
128
|
+
cache.delete(e.document.uri);
|
|
129
|
+
connection.sendDiagnostics({ uri: e.document.uri, diagnostics: [] });
|
|
130
|
+
});
|
|
131
|
+
connection.onHover((params) => {
|
|
132
|
+
const doc = documents.get(params.textDocument.uri);
|
|
133
|
+
const state = touchCache(params.textDocument.uri);
|
|
134
|
+
if (!doc || !state)
|
|
135
|
+
return null;
|
|
136
|
+
const word = getWordAtPosition(doc.getText(), params.position.line, params.position.character);
|
|
137
|
+
if (!word)
|
|
138
|
+
return null;
|
|
139
|
+
return getHoverInfo(word, state.index);
|
|
140
|
+
});
|
|
141
|
+
connection.onDefinition((params) => {
|
|
142
|
+
const doc = documents.get(params.textDocument.uri);
|
|
143
|
+
const state = touchCache(params.textDocument.uri);
|
|
144
|
+
if (!doc || !state)
|
|
145
|
+
return null;
|
|
146
|
+
const word = getWordAtPosition(doc.getText(), params.position.line, params.position.character);
|
|
147
|
+
if (!word)
|
|
148
|
+
return null;
|
|
149
|
+
return getDefinitionLocation(word, state.index, params.textDocument.uri);
|
|
150
|
+
});
|
|
151
|
+
connection.onCompletion((params) => {
|
|
152
|
+
const doc = documents.get(params.textDocument.uri);
|
|
153
|
+
if (!doc)
|
|
154
|
+
return [];
|
|
155
|
+
const state = touchCache(params.textDocument.uri);
|
|
156
|
+
const resolveImportNames = (importPath) => {
|
|
157
|
+
try {
|
|
158
|
+
const currentFilePath = fileURLToPath(params.textDocument.uri);
|
|
159
|
+
let resolved = path.resolve(path.dirname(currentFilePath), importPath);
|
|
160
|
+
if (!resolved.endsWith('.gft'))
|
|
161
|
+
resolved += '.gft';
|
|
162
|
+
const source = fs.readFileSync(resolved, 'utf-8');
|
|
163
|
+
const tokens = new Lexer(source).tokenize();
|
|
164
|
+
const result = new Parser(tokens).parse();
|
|
165
|
+
return [
|
|
166
|
+
...result.program.contexts.map(c => c.name),
|
|
167
|
+
...result.program.nodes.map(n => n.name),
|
|
168
|
+
];
|
|
169
|
+
}
|
|
170
|
+
catch {
|
|
171
|
+
return [];
|
|
172
|
+
}
|
|
173
|
+
};
|
|
174
|
+
return getCompletions(doc.getText(), params.position.line, params.position.character, state ?? null, resolveImportNames);
|
|
175
|
+
});
|
|
176
|
+
connection.onDocumentSymbol((params) => {
|
|
177
|
+
const state = touchCache(params.textDocument.uri);
|
|
178
|
+
if (!state)
|
|
179
|
+
return [];
|
|
180
|
+
return getDocumentSymbols(state.program, state.index);
|
|
181
|
+
});
|
|
182
|
+
// --- Workspace Export Scanning ---
|
|
183
|
+
function scanWorkspaceExports(rootDir, excludeFile) {
|
|
184
|
+
try {
|
|
185
|
+
scanDir(rootDir, excludeFile);
|
|
186
|
+
}
|
|
187
|
+
catch { /* best-effort */ }
|
|
188
|
+
}
|
|
189
|
+
function scanDir(dir, excludeFile) {
|
|
190
|
+
let entries;
|
|
191
|
+
try {
|
|
192
|
+
entries = fs.readdirSync(dir);
|
|
193
|
+
}
|
|
194
|
+
catch {
|
|
195
|
+
return;
|
|
196
|
+
}
|
|
197
|
+
for (const entry of entries) {
|
|
198
|
+
if (entry.startsWith('.') || entry === 'node_modules' || entry === 'dist')
|
|
199
|
+
continue;
|
|
200
|
+
const full = path.join(dir, entry);
|
|
201
|
+
let stat;
|
|
202
|
+
try {
|
|
203
|
+
stat = fs.statSync(full);
|
|
204
|
+
}
|
|
205
|
+
catch {
|
|
206
|
+
continue;
|
|
207
|
+
}
|
|
208
|
+
if (stat.isDirectory())
|
|
209
|
+
scanDir(full, excludeFile);
|
|
210
|
+
else if (entry.endsWith('.gft') && full !== excludeFile)
|
|
211
|
+
parseAndCacheExports(full);
|
|
212
|
+
}
|
|
213
|
+
}
|
|
214
|
+
function parseAndCacheExports(filePath) {
|
|
215
|
+
try {
|
|
216
|
+
// Prefer open document buffer over disk
|
|
217
|
+
const uri = pathToFileURL(filePath).toString();
|
|
218
|
+
const openDoc = documents.get(uri);
|
|
219
|
+
const source = openDoc ? openDoc.getText() : fs.readFileSync(filePath, 'utf-8');
|
|
220
|
+
const tokens = new Lexer(source).tokenize();
|
|
221
|
+
const { program } = new Parser(tokens).parse();
|
|
222
|
+
workspaceExports.set(filePath, [
|
|
223
|
+
...program.contexts.map(c => c.name),
|
|
224
|
+
...program.nodes.map(n => n.name),
|
|
225
|
+
]);
|
|
226
|
+
}
|
|
227
|
+
catch { /* skip unparseable files */ }
|
|
228
|
+
}
|
|
229
|
+
// --- Workspace Helpers ---
|
|
230
|
+
function ensureWorkspaceScan(currentFilePath) {
|
|
231
|
+
if (workspaceRoot && !workspaceScanDone) {
|
|
232
|
+
scanWorkspaceExports(workspaceRoot, currentFilePath);
|
|
233
|
+
workspaceScanDone = true;
|
|
234
|
+
}
|
|
235
|
+
}
|
|
236
|
+
function collectWorkspaceFileTexts(currentUri, currentFilePath, filter) {
|
|
237
|
+
ensureWorkspaceScan(currentFilePath);
|
|
238
|
+
const files = new Map();
|
|
239
|
+
if (!workspaceRoot)
|
|
240
|
+
return files;
|
|
241
|
+
for (const [filePath] of workspaceExports) {
|
|
242
|
+
const fileUri = pathToFileURL(filePath).toString();
|
|
243
|
+
if (fileUri === currentUri)
|
|
244
|
+
continue;
|
|
245
|
+
const openDoc = documents.get(fileUri);
|
|
246
|
+
let fileText;
|
|
247
|
+
try {
|
|
248
|
+
fileText = openDoc ? openDoc.getText() : fs.readFileSync(filePath, 'utf-8');
|
|
249
|
+
}
|
|
250
|
+
catch {
|
|
251
|
+
continue;
|
|
252
|
+
}
|
|
253
|
+
if (filter && !filter(fileText, filePath))
|
|
254
|
+
continue;
|
|
255
|
+
files.set(filePath, { text: fileText, uri: fileUri });
|
|
256
|
+
}
|
|
257
|
+
return files;
|
|
258
|
+
}
|
|
259
|
+
// --- Code Actions ---
|
|
260
|
+
connection.onCodeAction((params) => {
|
|
261
|
+
const doc = documents.get(params.textDocument.uri);
|
|
262
|
+
if (!doc || !params.textDocument.uri.startsWith('file:'))
|
|
263
|
+
return [];
|
|
264
|
+
const currentFilePath = fileURLToPath(params.textDocument.uri);
|
|
265
|
+
ensureWorkspaceScan(currentFilePath);
|
|
266
|
+
if (!workspaceRoot)
|
|
267
|
+
return [];
|
|
268
|
+
return buildAutoImportActions(doc.getText(), params.textDocument.uri, currentFilePath, params.context.diagnostics, workspaceExports);
|
|
269
|
+
});
|
|
270
|
+
// --- Rename ---
|
|
271
|
+
connection.onPrepareRename((params) => {
|
|
272
|
+
const doc = documents.get(params.textDocument.uri);
|
|
273
|
+
const state = touchCache(params.textDocument.uri);
|
|
274
|
+
if (!doc || !state)
|
|
275
|
+
return null;
|
|
276
|
+
const word = getWordAtPosition(doc.getText(), params.position.line, params.position.character);
|
|
277
|
+
if (!word || !isRenameable(word, state.index))
|
|
278
|
+
return null;
|
|
279
|
+
// Return the range of the word under cursor
|
|
280
|
+
const lines = doc.getText().split('\n');
|
|
281
|
+
const lineText = lines[params.position.line] ?? '';
|
|
282
|
+
const pattern = /[A-Za-z_][A-Za-z0-9_]*/g;
|
|
283
|
+
let match;
|
|
284
|
+
while ((match = pattern.exec(lineText)) !== null) {
|
|
285
|
+
const start = match.index;
|
|
286
|
+
const end = start + match[0].length;
|
|
287
|
+
if (params.position.character >= start && params.position.character < end) {
|
|
288
|
+
return {
|
|
289
|
+
start: { line: params.position.line, character: start },
|
|
290
|
+
end: { line: params.position.line, character: end },
|
|
291
|
+
};
|
|
292
|
+
}
|
|
293
|
+
}
|
|
294
|
+
return null;
|
|
295
|
+
});
|
|
296
|
+
connection.onRenameRequest((params) => {
|
|
297
|
+
const doc = documents.get(params.textDocument.uri);
|
|
298
|
+
const state = touchCache(params.textDocument.uri);
|
|
299
|
+
if (!doc || !state)
|
|
300
|
+
return null;
|
|
301
|
+
const word = getWordAtPosition(doc.getText(), params.position.line, params.position.character);
|
|
302
|
+
if (!word || !isRenameable(word, state.index))
|
|
303
|
+
return null;
|
|
304
|
+
const currentFilePath = fileURLToPath(params.textDocument.uri);
|
|
305
|
+
const escaped = word.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
|
306
|
+
const importPattern = new RegExp(`import\\s*\\{[^}]*\\b${escaped}\\b[^}]*\\}`);
|
|
307
|
+
const importingFiles = collectWorkspaceFileTexts(params.textDocument.uri, currentFilePath, (text) => importPattern.test(text));
|
|
308
|
+
const result = buildRenameEdits(word, params.newName, doc.getText(), params.textDocument.uri, currentFilePath, importingFiles);
|
|
309
|
+
if (!result || 'error' in result)
|
|
310
|
+
return null;
|
|
311
|
+
return result;
|
|
312
|
+
});
|
|
313
|
+
connection.onReferences((params) => {
|
|
314
|
+
const doc = documents.get(params.textDocument.uri);
|
|
315
|
+
const state = touchCache(params.textDocument.uri);
|
|
316
|
+
if (!doc || !state)
|
|
317
|
+
return null;
|
|
318
|
+
const word = getWordAtPosition(doc.getText(), params.position.line, params.position.character);
|
|
319
|
+
if (!word || !isReferable(word, state.index))
|
|
320
|
+
return null;
|
|
321
|
+
const currentFilePath = fileURLToPath(params.textDocument.uri);
|
|
322
|
+
const wsFiles = collectWorkspaceFileTexts(params.textDocument.uri, currentFilePath, (text) => text.includes(word));
|
|
323
|
+
const results = findReferences(word, doc.getText(), params.textDocument.uri, state.index, params.context.includeDeclaration, wsFiles);
|
|
324
|
+
return results.length > 0 ? results : null;
|
|
325
|
+
});
|
|
326
|
+
documents.listen(connection);
|
|
327
|
+
connection.listen();
|
|
@@ -0,0 +1,244 @@
|
|
|
1
|
+
import { SourceLocation } from '../errors/diagnostics.js';
|
|
2
|
+
export type TemplatePart = {
|
|
3
|
+
kind: 'text';
|
|
4
|
+
value: string;
|
|
5
|
+
} | {
|
|
6
|
+
kind: 'expr';
|
|
7
|
+
value: Expr;
|
|
8
|
+
};
|
|
9
|
+
export type Expr = {
|
|
10
|
+
kind: 'literal';
|
|
11
|
+
value: string | number | boolean;
|
|
12
|
+
location: SourceLocation;
|
|
13
|
+
} | {
|
|
14
|
+
kind: 'field_access';
|
|
15
|
+
segments: string[];
|
|
16
|
+
location: SourceLocation;
|
|
17
|
+
} | {
|
|
18
|
+
kind: 'binary';
|
|
19
|
+
op: '+' | '-' | '/' | '*' | '%' | '<' | '>' | '<=' | '>=' | '==' | '!=' | '&&' | '||' | '??';
|
|
20
|
+
left: Expr;
|
|
21
|
+
right: Expr;
|
|
22
|
+
location: SourceLocation;
|
|
23
|
+
} | {
|
|
24
|
+
kind: 'unary';
|
|
25
|
+
op: '-' | '!';
|
|
26
|
+
operand: Expr;
|
|
27
|
+
location: SourceLocation;
|
|
28
|
+
} | {
|
|
29
|
+
kind: 'group';
|
|
30
|
+
inner: Expr;
|
|
31
|
+
location: SourceLocation;
|
|
32
|
+
} | {
|
|
33
|
+
kind: 'call';
|
|
34
|
+
name: string;
|
|
35
|
+
args: Expr[];
|
|
36
|
+
location: SourceLocation;
|
|
37
|
+
} | {
|
|
38
|
+
kind: 'template';
|
|
39
|
+
parts: TemplatePart[];
|
|
40
|
+
location: SourceLocation;
|
|
41
|
+
} | {
|
|
42
|
+
kind: 'conditional';
|
|
43
|
+
condition: Expr;
|
|
44
|
+
consequent: Expr;
|
|
45
|
+
alternate: Expr;
|
|
46
|
+
location: SourceLocation;
|
|
47
|
+
};
|
|
48
|
+
/** Built-in expression functions with metadata for type checking and LSP. */
|
|
49
|
+
export declare const BUILTIN_FUNCTIONS: Record<string, {
|
|
50
|
+
arity: number;
|
|
51
|
+
returnType: 'number' | 'string' | 'unknown';
|
|
52
|
+
signature: string;
|
|
53
|
+
description: string;
|
|
54
|
+
}>;
|
|
55
|
+
export interface ImportDecl {
|
|
56
|
+
names: string[];
|
|
57
|
+
path: string;
|
|
58
|
+
resolvedPath?: string;
|
|
59
|
+
location: SourceLocation;
|
|
60
|
+
}
|
|
61
|
+
export interface MemoryDecl {
|
|
62
|
+
name: string;
|
|
63
|
+
maxTokens: number;
|
|
64
|
+
storage: 'file';
|
|
65
|
+
fields: Field[];
|
|
66
|
+
location: SourceLocation;
|
|
67
|
+
}
|
|
68
|
+
export interface Program {
|
|
69
|
+
imports: ImportDecl[];
|
|
70
|
+
memories: MemoryDecl[];
|
|
71
|
+
contexts: ContextDecl[];
|
|
72
|
+
nodes: NodeDecl[];
|
|
73
|
+
edges: EdgeDecl[];
|
|
74
|
+
graphs: GraphDecl[];
|
|
75
|
+
}
|
|
76
|
+
export interface ContextDecl {
|
|
77
|
+
name: string;
|
|
78
|
+
maxTokens: number;
|
|
79
|
+
fields: Field[];
|
|
80
|
+
location: SourceLocation;
|
|
81
|
+
sourceFile?: string;
|
|
82
|
+
}
|
|
83
|
+
export interface NodeDecl {
|
|
84
|
+
name: string;
|
|
85
|
+
model: string;
|
|
86
|
+
budgetIn: number;
|
|
87
|
+
budgetOut: number;
|
|
88
|
+
reads: ContextRef[];
|
|
89
|
+
tools: string[];
|
|
90
|
+
writes: WriteRef[];
|
|
91
|
+
onFailure?: FailureStrategy;
|
|
92
|
+
produces: ProducesDecl;
|
|
93
|
+
location: SourceLocation;
|
|
94
|
+
sourceFile?: string;
|
|
95
|
+
}
|
|
96
|
+
export interface ProducesDecl {
|
|
97
|
+
name: string;
|
|
98
|
+
fields: Field[];
|
|
99
|
+
location: SourceLocation;
|
|
100
|
+
}
|
|
101
|
+
export interface EdgeDecl {
|
|
102
|
+
source: string;
|
|
103
|
+
target: EdgeTarget;
|
|
104
|
+
transforms: Transform[];
|
|
105
|
+
location: SourceLocation;
|
|
106
|
+
}
|
|
107
|
+
export type EdgeTarget = {
|
|
108
|
+
kind: 'direct';
|
|
109
|
+
node: string;
|
|
110
|
+
} | {
|
|
111
|
+
kind: 'conditional';
|
|
112
|
+
branches: ConditionalBranch[];
|
|
113
|
+
};
|
|
114
|
+
export interface ConditionalBranch {
|
|
115
|
+
/** When undefined, this branch represents the `else` case (default target). */
|
|
116
|
+
condition?: Expr;
|
|
117
|
+
target: string;
|
|
118
|
+
}
|
|
119
|
+
export type FlowNode = {
|
|
120
|
+
kind: 'node';
|
|
121
|
+
name: string;
|
|
122
|
+
location?: SourceLocation;
|
|
123
|
+
} | {
|
|
124
|
+
kind: 'parallel';
|
|
125
|
+
branches: string[];
|
|
126
|
+
location?: SourceLocation;
|
|
127
|
+
} | {
|
|
128
|
+
kind: 'foreach';
|
|
129
|
+
source: string;
|
|
130
|
+
field: string;
|
|
131
|
+
binding: string;
|
|
132
|
+
maxIterations: number;
|
|
133
|
+
body: FlowNode[];
|
|
134
|
+
location?: SourceLocation;
|
|
135
|
+
} | {
|
|
136
|
+
kind: 'let';
|
|
137
|
+
name: string;
|
|
138
|
+
value: Expr;
|
|
139
|
+
location?: SourceLocation;
|
|
140
|
+
} | {
|
|
141
|
+
kind: 'graph_call';
|
|
142
|
+
name: string;
|
|
143
|
+
args: GraphArg[];
|
|
144
|
+
location?: SourceLocation;
|
|
145
|
+
};
|
|
146
|
+
export interface GraphDecl {
|
|
147
|
+
name: string;
|
|
148
|
+
input: string;
|
|
149
|
+
output: string;
|
|
150
|
+
budget: number;
|
|
151
|
+
params: GraphParam[];
|
|
152
|
+
flow: FlowNode[];
|
|
153
|
+
location: SourceLocation;
|
|
154
|
+
}
|
|
155
|
+
export interface GraphParam {
|
|
156
|
+
name: string;
|
|
157
|
+
type: 'Node' | 'Int' | 'String' | 'Bool';
|
|
158
|
+
default?: string | number | boolean;
|
|
159
|
+
location: SourceLocation;
|
|
160
|
+
}
|
|
161
|
+
export interface GraphArg {
|
|
162
|
+
name: string;
|
|
163
|
+
value: Expr;
|
|
164
|
+
location: SourceLocation;
|
|
165
|
+
}
|
|
166
|
+
export interface Field {
|
|
167
|
+
name: string;
|
|
168
|
+
type: TypeExpr;
|
|
169
|
+
location: SourceLocation;
|
|
170
|
+
}
|
|
171
|
+
export type TypeExpr = {
|
|
172
|
+
kind: 'primitive';
|
|
173
|
+
name: 'String' | 'Int' | 'Float' | 'Bool';
|
|
174
|
+
} | {
|
|
175
|
+
kind: 'primitive_range';
|
|
176
|
+
name: 'Float';
|
|
177
|
+
min: number;
|
|
178
|
+
max: number;
|
|
179
|
+
} | {
|
|
180
|
+
kind: 'list';
|
|
181
|
+
element: TypeExpr;
|
|
182
|
+
} | {
|
|
183
|
+
kind: 'map';
|
|
184
|
+
key: TypeExpr;
|
|
185
|
+
value: TypeExpr;
|
|
186
|
+
} | {
|
|
187
|
+
kind: 'optional';
|
|
188
|
+
inner: TypeExpr;
|
|
189
|
+
} | {
|
|
190
|
+
kind: 'token_bounded';
|
|
191
|
+
inner: TypeExpr;
|
|
192
|
+
max: number;
|
|
193
|
+
} | {
|
|
194
|
+
kind: 'enum';
|
|
195
|
+
values: string[];
|
|
196
|
+
} | {
|
|
197
|
+
kind: 'struct';
|
|
198
|
+
name: string;
|
|
199
|
+
fields: Field[];
|
|
200
|
+
} | {
|
|
201
|
+
kind: 'domain';
|
|
202
|
+
name: 'FilePath' | 'FileDiff' | 'TestFile' | 'IssueRef';
|
|
203
|
+
};
|
|
204
|
+
export interface ContextRef {
|
|
205
|
+
context: string;
|
|
206
|
+
field?: string[];
|
|
207
|
+
location: SourceLocation;
|
|
208
|
+
}
|
|
209
|
+
export interface WriteRef {
|
|
210
|
+
memory: string;
|
|
211
|
+
field?: string;
|
|
212
|
+
location: SourceLocation;
|
|
213
|
+
}
|
|
214
|
+
export type Transform = {
|
|
215
|
+
type: 'select';
|
|
216
|
+
fields: string[];
|
|
217
|
+
} | {
|
|
218
|
+
type: 'filter';
|
|
219
|
+
field: string;
|
|
220
|
+
condition: Expr;
|
|
221
|
+
} | {
|
|
222
|
+
type: 'drop';
|
|
223
|
+
field: string;
|
|
224
|
+
} | {
|
|
225
|
+
type: 'compact';
|
|
226
|
+
} | {
|
|
227
|
+
type: 'truncate';
|
|
228
|
+
tokens: number;
|
|
229
|
+
};
|
|
230
|
+
export type FailureStrategy = {
|
|
231
|
+
type: 'retry';
|
|
232
|
+
max: number;
|
|
233
|
+
} | {
|
|
234
|
+
type: 'fallback';
|
|
235
|
+
node: string;
|
|
236
|
+
} | {
|
|
237
|
+
type: 'retry_then_fallback';
|
|
238
|
+
max: number;
|
|
239
|
+
node: string;
|
|
240
|
+
} | {
|
|
241
|
+
type: 'skip';
|
|
242
|
+
} | {
|
|
243
|
+
type: 'abort';
|
|
244
|
+
};
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
/** Built-in expression functions with metadata for type checking and LSP. */
|
|
2
|
+
export const BUILTIN_FUNCTIONS = {
|
|
3
|
+
len: { arity: 1, returnType: 'number', signature: 'len(value) -> number', description: 'Returns the length of an array or string.' },
|
|
4
|
+
max: { arity: 2, returnType: 'number', signature: 'max(a, b) -> number', description: 'Returns the larger of two numbers.' },
|
|
5
|
+
min: { arity: 2, returnType: 'number', signature: 'min(a, b) -> number', description: 'Returns the smaller of two numbers.' },
|
|
6
|
+
str: { arity: 1, returnType: 'string', signature: 'str(value) -> string', description: 'Converts a value to its string representation. Objects are JSON-stringified.' },
|
|
7
|
+
abs: { arity: 1, returnType: 'number', signature: 'abs(n) -> number', description: 'Returns the absolute value of a number.' },
|
|
8
|
+
round: { arity: 1, returnType: 'number', signature: 'round(n) -> number', description: 'Rounds a number to the nearest integer.' },
|
|
9
|
+
keys: { arity: 1, returnType: 'unknown', signature: 'keys(obj) -> array', description: 'Returns the keys of an object as an array.' },
|
|
10
|
+
};
|
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
import { Token } from '../lexer/tokens.js';
|
|
2
|
+
import { GraftError } from '../errors/diagnostics.js';
|
|
3
|
+
import { Program } from './ast.js';
|
|
4
|
+
export interface ParseResult {
|
|
5
|
+
program: Program;
|
|
6
|
+
errors: GraftError[];
|
|
7
|
+
}
|
|
8
|
+
export declare class Parser {
|
|
9
|
+
private readonly tokens;
|
|
10
|
+
private pos;
|
|
11
|
+
private errors;
|
|
12
|
+
private static readonly MAX_ERRORS;
|
|
13
|
+
private static readonly DECLARATION_KEYWORDS;
|
|
14
|
+
constructor(tokens: Token[]);
|
|
15
|
+
parse(): ParseResult;
|
|
16
|
+
private synchronize;
|
|
17
|
+
private parseImportDecl;
|
|
18
|
+
private parseMemoryDecl;
|
|
19
|
+
private parseContext;
|
|
20
|
+
private parseNode;
|
|
21
|
+
private parseProduces;
|
|
22
|
+
private parseContextRefList;
|
|
23
|
+
private parseWriteRefList;
|
|
24
|
+
private parseIdentifierList;
|
|
25
|
+
private parseFailureStrategy;
|
|
26
|
+
private parseEdge;
|
|
27
|
+
private parseConditionalTarget;
|
|
28
|
+
private parseTransform;
|
|
29
|
+
private parseCondition;
|
|
30
|
+
private parseConditionValue;
|
|
31
|
+
private parseGraph;
|
|
32
|
+
private parseGraphParam;
|
|
33
|
+
/**
|
|
34
|
+
* Parse a sequence of flow nodes separated by arrows.
|
|
35
|
+
* When insideBlock=true, stops when no more arrows (next token should be RBrace).
|
|
36
|
+
* When insideBlock=false, expects -> done to terminate.
|
|
37
|
+
*/
|
|
38
|
+
private parseFlowNodes;
|
|
39
|
+
/**
|
|
40
|
+
* Parse a single flow node: identifier, parallel block, foreach block,
|
|
41
|
+
* let binding, or graph call.
|
|
42
|
+
*/
|
|
43
|
+
private parseFlowNode;
|
|
44
|
+
private parseLetStep;
|
|
45
|
+
private parseGraphCall;
|
|
46
|
+
/**
|
|
47
|
+
* parallel { SecurityReviewer PerformanceReviewer StyleReviewer }
|
|
48
|
+
*
|
|
49
|
+
* Branches are whitespace-separated identifiers (no commas required).
|
|
50
|
+
* Optional commas are accepted for user convenience.
|
|
51
|
+
*/
|
|
52
|
+
private parseParallelStep;
|
|
53
|
+
/**
|
|
54
|
+
* foreach(Planner.output.steps as step, max_iterations: 5) {
|
|
55
|
+
* Implementer -> Verifier
|
|
56
|
+
* }
|
|
57
|
+
*/
|
|
58
|
+
private parseForeachStep;
|
|
59
|
+
private parseExpr;
|
|
60
|
+
private parseNullCoalesce;
|
|
61
|
+
private parseLogicalOr;
|
|
62
|
+
private parseLogicalAnd;
|
|
63
|
+
private parseComparison;
|
|
64
|
+
private parseAdditive;
|
|
65
|
+
private parseMultiplicative;
|
|
66
|
+
private parseUnary;
|
|
67
|
+
private parseTemplateParts;
|
|
68
|
+
private parsePrimary;
|
|
69
|
+
private parseType;
|
|
70
|
+
private parseTypeOrInlineStruct;
|
|
71
|
+
private parseFields;
|
|
72
|
+
private parseTokenValue;
|
|
73
|
+
private parseIntValue;
|
|
74
|
+
private parseNumericValue;
|
|
75
|
+
/**
|
|
76
|
+
* Strict identifier: only accepts TokenType.Identifier.
|
|
77
|
+
* Used for declaration names (context, node, edge, graph), produces names,
|
|
78
|
+
* context ref context-part, graph flow nodes, edge source/target.
|
|
79
|
+
* These are PascalCase by convention and must not collide with keywords.
|
|
80
|
+
*/
|
|
81
|
+
private expectIdentifier;
|
|
82
|
+
/**
|
|
83
|
+
* Permissive identifier: accepts TokenType.Identifier OR any keyword token.
|
|
84
|
+
* Used for field names, tool names, enum values, transform field arguments,
|
|
85
|
+
* condition field names -- positions where a keyword-like word is valid as a name.
|
|
86
|
+
*/
|
|
87
|
+
private expectIdentifierOrKeyword;
|
|
88
|
+
private expect;
|
|
89
|
+
private check;
|
|
90
|
+
private current;
|
|
91
|
+
private advance;
|
|
92
|
+
private peekType;
|
|
93
|
+
private isAtEnd;
|
|
94
|
+
private error;
|
|
95
|
+
}
|