@jsleekr/graft 5.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +235 -0
- package/dist/analyzer/estimator.d.ts +33 -0
- package/dist/analyzer/estimator.js +273 -0
- package/dist/analyzer/graph-checker.d.ts +13 -0
- package/dist/analyzer/graph-checker.js +153 -0
- package/dist/analyzer/scope.d.ts +21 -0
- package/dist/analyzer/scope.js +324 -0
- package/dist/analyzer/types.d.ts +17 -0
- package/dist/analyzer/types.js +323 -0
- package/dist/codegen/agents.d.ts +2 -0
- package/dist/codegen/agents.js +109 -0
- package/dist/codegen/backend.d.ts +16 -0
- package/dist/codegen/backend.js +1 -0
- package/dist/codegen/claude-backend.d.ts +9 -0
- package/dist/codegen/claude-backend.js +47 -0
- package/dist/codegen/codegen.d.ts +10 -0
- package/dist/codegen/codegen.js +57 -0
- package/dist/codegen/hooks.d.ts +2 -0
- package/dist/codegen/hooks.js +165 -0
- package/dist/codegen/orchestration.d.ts +3 -0
- package/dist/codegen/orchestration.js +250 -0
- package/dist/codegen/settings.d.ts +36 -0
- package/dist/codegen/settings.js +87 -0
- package/dist/compiler.d.ts +21 -0
- package/dist/compiler.js +101 -0
- package/dist/constants.d.ts +9 -0
- package/dist/constants.js +13 -0
- package/dist/errors/diagnostics.d.ts +21 -0
- package/dist/errors/diagnostics.js +25 -0
- package/dist/format.d.ts +12 -0
- package/dist/format.js +46 -0
- package/dist/index.d.ts +2 -0
- package/dist/index.js +181 -0
- package/dist/lexer/lexer.d.ts +23 -0
- package/dist/lexer/lexer.js +268 -0
- package/dist/lexer/tokens.d.ts +96 -0
- package/dist/lexer/tokens.js +150 -0
- package/dist/lsp/features/code-actions.d.ts +7 -0
- package/dist/lsp/features/code-actions.js +58 -0
- package/dist/lsp/features/completions.d.ts +7 -0
- package/dist/lsp/features/completions.js +271 -0
- package/dist/lsp/features/definition.d.ts +3 -0
- package/dist/lsp/features/definition.js +32 -0
- package/dist/lsp/features/diagnostics.d.ts +4 -0
- package/dist/lsp/features/diagnostics.js +33 -0
- package/dist/lsp/features/hover.d.ts +7 -0
- package/dist/lsp/features/hover.js +88 -0
- package/dist/lsp/features/index.d.ts +9 -0
- package/dist/lsp/features/index.js +9 -0
- package/dist/lsp/features/references.d.ts +7 -0
- package/dist/lsp/features/references.js +53 -0
- package/dist/lsp/features/rename.d.ts +17 -0
- package/dist/lsp/features/rename.js +198 -0
- package/dist/lsp/features/symbols.d.ts +7 -0
- package/dist/lsp/features/symbols.js +74 -0
- package/dist/lsp/features/utils.d.ts +3 -0
- package/dist/lsp/features/utils.js +65 -0
- package/dist/lsp/features.d.ts +20 -0
- package/dist/lsp/features.js +513 -0
- package/dist/lsp/server.d.ts +2 -0
- package/dist/lsp/server.js +327 -0
- package/dist/parser/ast.d.ts +244 -0
- package/dist/parser/ast.js +10 -0
- package/dist/parser/parser.d.ts +95 -0
- package/dist/parser/parser.js +1175 -0
- package/dist/program-index.d.ts +21 -0
- package/dist/program-index.js +74 -0
- package/dist/resolver/resolver.d.ts +9 -0
- package/dist/resolver/resolver.js +136 -0
- package/dist/runner.d.ts +13 -0
- package/dist/runner.js +41 -0
- package/dist/runtime/executor.d.ts +56 -0
- package/dist/runtime/executor.js +285 -0
- package/dist/runtime/expr-eval.d.ts +3 -0
- package/dist/runtime/expr-eval.js +138 -0
- package/dist/runtime/flow-runner.d.ts +21 -0
- package/dist/runtime/flow-runner.js +230 -0
- package/dist/runtime/memory.d.ts +5 -0
- package/dist/runtime/memory.js +41 -0
- package/dist/runtime/prompt-builder.d.ts +12 -0
- package/dist/runtime/prompt-builder.js +66 -0
- package/dist/runtime/subprocess.d.ts +20 -0
- package/dist/runtime/subprocess.js +99 -0
- package/dist/runtime/token-tracker.d.ts +36 -0
- package/dist/runtime/token-tracker.js +56 -0
- package/dist/runtime/transforms.d.ts +2 -0
- package/dist/runtime/transforms.js +104 -0
- package/dist/types.d.ts +10 -0
- package/dist/types.js +1 -0
- package/dist/utils.d.ts +3 -0
- package/dist/utils.js +35 -0
- package/dist/version.d.ts +1 -0
- package/dist/version.js +11 -0
- package/package.json +70 -0
|
@@ -0,0 +1,513 @@
|
|
|
1
|
+
import { DiagnosticSeverity, MarkupKind, CompletionItemKind, InsertTextFormat, SymbolKind } from 'vscode-languageserver/node';
|
|
2
|
+
import { pathToFileURL } from 'node:url';
|
|
3
|
+
import * as path from 'node:path';
|
|
4
|
+
import { MODEL_MAP } from '../constants.js';
|
|
5
|
+
// --- Diagnostics ---
|
|
6
|
+
export function toDiagnostics(errors, warnings) {
|
|
7
|
+
const result = [];
|
|
8
|
+
for (const e of errors) {
|
|
9
|
+
result.push(makeDiagnostic(e, DiagnosticSeverity.Error));
|
|
10
|
+
}
|
|
11
|
+
for (const w of warnings) {
|
|
12
|
+
result.push(makeDiagnostic(w, DiagnosticSeverity.Warning));
|
|
13
|
+
}
|
|
14
|
+
return result;
|
|
15
|
+
}
|
|
16
|
+
function makeDiagnostic(e, severity) {
|
|
17
|
+
const line = Math.max(0, e.location.line - 1);
|
|
18
|
+
const character = Math.max(0, e.location.column - 1);
|
|
19
|
+
const endCharacter = character + (e.location.length ?? 1);
|
|
20
|
+
return {
|
|
21
|
+
range: {
|
|
22
|
+
start: { line, character },
|
|
23
|
+
end: { line, character: endCharacter },
|
|
24
|
+
},
|
|
25
|
+
severity,
|
|
26
|
+
message: e.message,
|
|
27
|
+
source: 'graft',
|
|
28
|
+
...(e.code ? { code: e.code } : {}),
|
|
29
|
+
};
|
|
30
|
+
}
|
|
31
|
+
// --- Document Symbols ---
|
|
32
|
+
export function getDocumentSymbols(program, index) {
|
|
33
|
+
const symbols = [];
|
|
34
|
+
for (const ctx of program.contexts) {
|
|
35
|
+
symbols.push(makeSymbol(ctx.name, SymbolKind.Class, ctx.location));
|
|
36
|
+
}
|
|
37
|
+
for (const node of program.nodes) {
|
|
38
|
+
symbols.push(makeSymbol(node.name, SymbolKind.Function, node.location));
|
|
39
|
+
}
|
|
40
|
+
for (const mem of program.memories) {
|
|
41
|
+
symbols.push(makeSymbol(mem.name, SymbolKind.Variable, mem.location));
|
|
42
|
+
}
|
|
43
|
+
for (const graph of program.graphs) {
|
|
44
|
+
symbols.push(makeSymbol(graph.name, SymbolKind.Module, graph.location));
|
|
45
|
+
}
|
|
46
|
+
for (const edge of program.edges) {
|
|
47
|
+
const targetName = edge.target.kind === 'direct' ? edge.target.node : 'conditional';
|
|
48
|
+
symbols.push(makeSymbol(`${edge.source} -> ${targetName}`, SymbolKind.Event, edge.location));
|
|
49
|
+
}
|
|
50
|
+
return symbols;
|
|
51
|
+
}
|
|
52
|
+
function makeSymbol(name, kind, loc) {
|
|
53
|
+
const line = Math.max(0, loc.line - 1);
|
|
54
|
+
const character = Math.max(0, loc.column - 1);
|
|
55
|
+
const range = {
|
|
56
|
+
start: { line, character },
|
|
57
|
+
end: { line, character: character + name.length },
|
|
58
|
+
};
|
|
59
|
+
return { name, kind, range, selectionRange: range };
|
|
60
|
+
}
|
|
61
|
+
// --- Word Extraction ---
|
|
62
|
+
export function getWordAtPosition(text, line, character) {
|
|
63
|
+
const lines = text.split('\n');
|
|
64
|
+
if (line < 0 || line >= lines.length)
|
|
65
|
+
return null;
|
|
66
|
+
const lineText = lines[line];
|
|
67
|
+
if (character < 0 || character > lineText.length)
|
|
68
|
+
return null;
|
|
69
|
+
const pattern = /[A-Za-z_][A-Za-z0-9_]*/g;
|
|
70
|
+
let match;
|
|
71
|
+
while ((match = pattern.exec(lineText)) !== null) {
|
|
72
|
+
const start = match.index;
|
|
73
|
+
const end = start + match[0].length;
|
|
74
|
+
if (character >= start && character < end) {
|
|
75
|
+
return match[0];
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
return null;
|
|
79
|
+
}
|
|
80
|
+
// --- Hover ---
|
|
81
|
+
const KEYWORD_DOCS = {
|
|
82
|
+
context: 'Declares a context schema with typed fields and a max_tokens budget.\n\n```graft\ncontext Name(max_tokens: 1k) {\n field: Type\n}\n```',
|
|
83
|
+
node: 'Declares a processing node with model, budget, reads, writes, and produces.\n\n```graft\nnode Name(model: sonnet, budget: 5k/2k) {\n reads: [ContextName]\n produces Output { field: Type }\n}\n```',
|
|
84
|
+
memory: 'Declares persistent memory with typed fields and storage backend.\n\n```graft\nmemory Name(max_tokens: 2k, storage: file) {\n field: Type\n}\n```',
|
|
85
|
+
graph: 'Declares an execution graph connecting nodes in a flow.\n\n```graft\ngraph Name(input: In, output: Out, budget: 10k) {\n Start -> Middle -> done\n}\n```',
|
|
86
|
+
edge: 'Declares a data transform between nodes.\n\n```graft\nedge Source -> Target | select(field) | compact\n```',
|
|
87
|
+
import: 'Imports contexts and nodes from another .gft file.\n\n```graft\nimport { Name } from "./lib.gft"\n```',
|
|
88
|
+
reads: 'Specifies which contexts, produces, or memories a node reads from.\n\n```graft\nreads: [ContextName, Produces.field]\n```',
|
|
89
|
+
writes: 'Specifies which memories a node writes to.\n\n```graft\nwrites: [MemoryName.field]\n```',
|
|
90
|
+
produces: 'Declares the output schema of a node.\n\n```graft\nproduces OutputName {\n field: Type\n}\n```',
|
|
91
|
+
model: 'Specifies the LLM model alias for a node.\n\nAliases: sonnet, opus, haiku',
|
|
92
|
+
max_tokens: 'Sets the maximum token budget for a context or memory.',
|
|
93
|
+
on_failure: 'Specifies failure handling strategy for a node.\n\nStrategies: retry(N), fallback(Node), skip, abort',
|
|
94
|
+
storage: 'Specifies the storage backend for a memory declaration.\n\nCurrently supported: file',
|
|
95
|
+
foreach: 'Iterates over a list field from a node\'s output.\n\n```graft\nforeach(Node.output.field as item, max_iterations: 5) {\n Step1 -> Step2\n}\n```',
|
|
96
|
+
parallel: 'Executes multiple nodes concurrently.\n\n```graft\nparallel { Node1 Node2 Node3 }\n```',
|
|
97
|
+
};
|
|
98
|
+
export function getHoverInfo(word, index) {
|
|
99
|
+
const keywordDoc = KEYWORD_DOCS[word];
|
|
100
|
+
if (keywordDoc) {
|
|
101
|
+
return mkHover(keywordDoc);
|
|
102
|
+
}
|
|
103
|
+
const ctx = index.contextMap.get(word);
|
|
104
|
+
if (ctx) {
|
|
105
|
+
const fields = ctx.fields.map(f => ` ${f.name}: ${formatType(f.type)}`).join('\n');
|
|
106
|
+
return mkHover(`**context** ${ctx.name} (max_tokens: ${ctx.maxTokens})\n\`\`\`\n${fields}\n\`\`\``);
|
|
107
|
+
}
|
|
108
|
+
const node = index.nodeMap.get(word);
|
|
109
|
+
if (node) {
|
|
110
|
+
const reads = node.reads.map(r => {
|
|
111
|
+
if (!r.field)
|
|
112
|
+
return r.context;
|
|
113
|
+
return r.field.length === 1 ? `${r.context}.${r.field[0]}` : `${r.context}.{${r.field.join(', ')}}`;
|
|
114
|
+
}).join(', ');
|
|
115
|
+
const writes = node.writes.length > 0
|
|
116
|
+
? `\nwrites: ${node.writes.map(w => w.field ? `${w.memory}.${w.field}` : w.memory).join(', ')}`
|
|
117
|
+
: '';
|
|
118
|
+
const producesFields = node.produces.fields.map(f => ` ${f.name}: ${formatType(f.type)}`).join('\n');
|
|
119
|
+
return mkHover(`**node** ${node.name}\n` +
|
|
120
|
+
`- model: ${node.model}\n` +
|
|
121
|
+
`- budget: ${node.budgetIn}/${node.budgetOut}\n` +
|
|
122
|
+
`- reads: ${reads}${writes}\n` +
|
|
123
|
+
`- produces: ${node.produces.name}\n\`\`\`\n${producesFields}\n\`\`\``);
|
|
124
|
+
}
|
|
125
|
+
const mem = index.memoryMap.get(word);
|
|
126
|
+
if (mem) {
|
|
127
|
+
const fields = mem.fields.map(f => ` ${f.name}: ${formatType(f.type)}`).join('\n');
|
|
128
|
+
return mkHover(`**memory** ${mem.name} (max_tokens: ${mem.maxTokens}, storage: ${mem.storage})\n\`\`\`\n${fields}\n\`\`\``);
|
|
129
|
+
}
|
|
130
|
+
const producerNode = index.producesNodeMap.get(word);
|
|
131
|
+
if (producerNode) {
|
|
132
|
+
const fields = producerNode.produces.fields.map(f => ` ${f.name}: ${formatType(f.type)}`).join('\n');
|
|
133
|
+
return mkHover(`**produces** ${word} (from node ${producerNode.name})\n\`\`\`\n${fields}\n\`\`\``);
|
|
134
|
+
}
|
|
135
|
+
return null;
|
|
136
|
+
}
|
|
137
|
+
// --- Go-to-Definition ---
|
|
138
|
+
export function getDefinitionLocation(word, index, currentUri) {
|
|
139
|
+
// Try context
|
|
140
|
+
const ctx = index.contextMap.get(word);
|
|
141
|
+
if (ctx)
|
|
142
|
+
return declLocation(ctx.location, ctx.sourceFile, word.length, currentUri);
|
|
143
|
+
// Try node
|
|
144
|
+
const node = index.nodeMap.get(word);
|
|
145
|
+
if (node)
|
|
146
|
+
return declLocation(node.location, node.sourceFile, word.length, currentUri);
|
|
147
|
+
// Try produces
|
|
148
|
+
const producerNode = index.producesNodeMap.get(word);
|
|
149
|
+
if (producerNode) {
|
|
150
|
+
return declLocation(producerNode.produces.location, producerNode.sourceFile, word.length, currentUri);
|
|
151
|
+
}
|
|
152
|
+
// Try memory (no sourceFile -- memories can't be imported, v2.0-R13)
|
|
153
|
+
const mem = index.memoryMap.get(word);
|
|
154
|
+
if (mem)
|
|
155
|
+
return declLocation(mem.location, undefined, word.length, currentUri);
|
|
156
|
+
return null;
|
|
157
|
+
}
|
|
158
|
+
function declLocation(loc, sourceFile, nameLength, currentUri) {
|
|
159
|
+
const uri = sourceFile ? pathToFileURL(sourceFile).toString() : currentUri;
|
|
160
|
+
const line = Math.max(0, loc.line - 1);
|
|
161
|
+
const character = Math.max(0, loc.column - 1);
|
|
162
|
+
return {
|
|
163
|
+
uri,
|
|
164
|
+
range: {
|
|
165
|
+
start: { line, character },
|
|
166
|
+
end: { line, character: character + nameLength },
|
|
167
|
+
},
|
|
168
|
+
};
|
|
169
|
+
}
|
|
170
|
+
// --- Type Formatting ---
|
|
171
|
+
export function formatType(type) {
|
|
172
|
+
switch (type.kind) {
|
|
173
|
+
case 'primitive': return type.name;
|
|
174
|
+
case 'primitive_range': return `Float(${type.min}..${type.max})`;
|
|
175
|
+
case 'list': return `List<${formatType(type.element)}>`;
|
|
176
|
+
case 'map': return `Map<${formatType(type.key)}, ${formatType(type.value)}>`;
|
|
177
|
+
case 'optional': return `${formatType(type.inner)}?`;
|
|
178
|
+
case 'token_bounded': return `${formatType(type.inner)}(max: ${type.max})`;
|
|
179
|
+
case 'enum': return type.values.join(' | ');
|
|
180
|
+
case 'struct': return `{ ${type.fields.map(f => `${f.name}: ${formatType(f.type)}`).join(', ')} }`;
|
|
181
|
+
case 'domain': return type.name;
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
// --- Helpers ---
|
|
185
|
+
function mkHover(value) {
|
|
186
|
+
return { contents: { kind: MarkupKind.Markdown, value } };
|
|
187
|
+
}
|
|
188
|
+
// --- Completions ---
|
|
189
|
+
export function getCompletions(text, line, character, cache, resolveImportNames) {
|
|
190
|
+
const lines = text.split('\n');
|
|
191
|
+
if (line < 0 || line >= lines.length)
|
|
192
|
+
return [];
|
|
193
|
+
const lineText = lines[line].replace(/\r$/, '');
|
|
194
|
+
const before = lineText.slice(0, character);
|
|
195
|
+
// Suppress completions inside comments
|
|
196
|
+
if (isInComment(lines, line, character))
|
|
197
|
+
return [];
|
|
198
|
+
// Suppress completions inside string literals
|
|
199
|
+
if (isInString(lineText, character))
|
|
200
|
+
return [];
|
|
201
|
+
// After `storage:` → storage types
|
|
202
|
+
if (/storage\s*:\s*\w*$/.test(before)) {
|
|
203
|
+
return [{ label: 'file', kind: CompletionItemKind.EnumMember, detail: 'File-based storage' }];
|
|
204
|
+
}
|
|
205
|
+
// After `model:` → model aliases
|
|
206
|
+
if (/model\s*:\s*\w*$/.test(before)) {
|
|
207
|
+
return Object.entries(MODEL_MAP).map(([alias, full]) => ({
|
|
208
|
+
label: alias,
|
|
209
|
+
kind: CompletionItemKind.EnumMember,
|
|
210
|
+
detail: full,
|
|
211
|
+
}));
|
|
212
|
+
}
|
|
213
|
+
// After `on_failure:` → strategy keywords
|
|
214
|
+
if (/on_failure\s*:\s*\w*$/.test(before)) {
|
|
215
|
+
return [
|
|
216
|
+
{ label: 'retry', kind: CompletionItemKind.Keyword,
|
|
217
|
+
insertText: 'retry(${1:3})', insertTextFormat: InsertTextFormat.Snippet },
|
|
218
|
+
{ label: 'fallback', kind: CompletionItemKind.Keyword,
|
|
219
|
+
insertText: 'fallback(${1:NodeName})', insertTextFormat: InsertTextFormat.Snippet },
|
|
220
|
+
{ label: 'skip', kind: CompletionItemKind.Keyword },
|
|
221
|
+
{ label: 'abort', kind: CompletionItemKind.Keyword },
|
|
222
|
+
{ label: 'retry_then_fallback', kind: CompletionItemKind.Keyword,
|
|
223
|
+
insertText: 'retry(${1:3}, fallback(${2:NodeName}))',
|
|
224
|
+
insertTextFormat: InsertTextFormat.Snippet,
|
|
225
|
+
detail: 'Retry N times, then fallback to node' },
|
|
226
|
+
];
|
|
227
|
+
}
|
|
228
|
+
// Inside `fallback(` → node names
|
|
229
|
+
if (/fallback\s*\(\s*\w*$/.test(before)) {
|
|
230
|
+
if (!cache)
|
|
231
|
+
return [];
|
|
232
|
+
return [...cache.index.nodeMap.keys()].map(name => ({
|
|
233
|
+
label: name,
|
|
234
|
+
kind: CompletionItemKind.Class,
|
|
235
|
+
detail: 'node',
|
|
236
|
+
}));
|
|
237
|
+
}
|
|
238
|
+
// Inside `import { }` → names from resolver
|
|
239
|
+
if (isInsideImportBraces(lines, line, character)) {
|
|
240
|
+
if (!resolveImportNames)
|
|
241
|
+
return [];
|
|
242
|
+
const importPath = extractImportPath(lines, line) ?? '';
|
|
243
|
+
const names = resolveImportNames(importPath);
|
|
244
|
+
return names.map(name => ({
|
|
245
|
+
label: name,
|
|
246
|
+
kind: CompletionItemKind.Class,
|
|
247
|
+
detail: 'importable',
|
|
248
|
+
}));
|
|
249
|
+
}
|
|
250
|
+
// After `Name.` → field completions (also handles multi-field brace `Name.{f1, `)
|
|
251
|
+
const dotMatch = before.match(/([A-Za-z_]\w*)\.\s*(?:\{[^}]*)?\s*\w*$/);
|
|
252
|
+
if (dotMatch) {
|
|
253
|
+
return getFieldCompletions(dotMatch[1], cache?.index ?? null);
|
|
254
|
+
}
|
|
255
|
+
// Inside `reads: [` → context + memory + produces names
|
|
256
|
+
if (isInsideBracketAfter(lines, line, character, 'reads')) {
|
|
257
|
+
if (!cache)
|
|
258
|
+
return [];
|
|
259
|
+
const items = [];
|
|
260
|
+
for (const name of cache.index.contextMap.keys()) {
|
|
261
|
+
items.push({ label: name, kind: CompletionItemKind.Class, detail: 'context' });
|
|
262
|
+
}
|
|
263
|
+
for (const name of cache.index.producesNodeMap.keys()) {
|
|
264
|
+
items.push({ label: name, kind: CompletionItemKind.Struct, detail: 'produces' });
|
|
265
|
+
}
|
|
266
|
+
for (const name of cache.index.memoryMap.keys()) {
|
|
267
|
+
items.push({ label: name, kind: CompletionItemKind.Variable, detail: 'memory' });
|
|
268
|
+
}
|
|
269
|
+
return items;
|
|
270
|
+
}
|
|
271
|
+
// Inside `writes: [` → memory names only
|
|
272
|
+
if (isInsideBracketAfter(lines, line, character, 'writes')) {
|
|
273
|
+
if (!cache)
|
|
274
|
+
return [];
|
|
275
|
+
return [...cache.index.memoryMap.keys()].map(name => ({
|
|
276
|
+
label: name,
|
|
277
|
+
kind: CompletionItemKind.Variable,
|
|
278
|
+
detail: 'memory',
|
|
279
|
+
}));
|
|
280
|
+
}
|
|
281
|
+
// Inside graph flow → node names + done
|
|
282
|
+
if (isInsideBlock(lines, line, 'graph')) {
|
|
283
|
+
const items = [
|
|
284
|
+
{ label: 'done', kind: CompletionItemKind.Keyword, detail: 'Terminal node' },
|
|
285
|
+
];
|
|
286
|
+
if (cache) {
|
|
287
|
+
for (const name of cache.index.nodeMap.keys()) {
|
|
288
|
+
items.push({ label: name, kind: CompletionItemKind.Class, detail: 'node' });
|
|
289
|
+
}
|
|
290
|
+
}
|
|
291
|
+
return items;
|
|
292
|
+
}
|
|
293
|
+
// Top-level → keyword completions + snippets
|
|
294
|
+
if (/^\s*\w*$/.test(before)) {
|
|
295
|
+
return topLevelCompletions();
|
|
296
|
+
}
|
|
297
|
+
return [];
|
|
298
|
+
}
|
|
299
|
+
function getFieldCompletions(name, index) {
|
|
300
|
+
if (!index)
|
|
301
|
+
return [];
|
|
302
|
+
// Context fields
|
|
303
|
+
const ctx = index.contextMap.get(name);
|
|
304
|
+
if (ctx) {
|
|
305
|
+
return ctx.fields.map(f => ({
|
|
306
|
+
label: f.name,
|
|
307
|
+
kind: CompletionItemKind.Field,
|
|
308
|
+
detail: formatType(f.type),
|
|
309
|
+
}));
|
|
310
|
+
}
|
|
311
|
+
// Produces fields (keyed by both node name and produces name)
|
|
312
|
+
const pf = index.producesFieldsMap.get(name);
|
|
313
|
+
if (pf) {
|
|
314
|
+
return [...pf.entries()].map(([fieldName, type]) => ({
|
|
315
|
+
label: fieldName,
|
|
316
|
+
kind: CompletionItemKind.Field,
|
|
317
|
+
detail: formatType(type),
|
|
318
|
+
}));
|
|
319
|
+
}
|
|
320
|
+
// Memory fields
|
|
321
|
+
const mf = index.memoryFieldsMap.get(name);
|
|
322
|
+
if (mf) {
|
|
323
|
+
return [...mf.entries()].map(([fieldName, type]) => ({
|
|
324
|
+
label: fieldName,
|
|
325
|
+
kind: CompletionItemKind.Field,
|
|
326
|
+
detail: formatType(type),
|
|
327
|
+
}));
|
|
328
|
+
}
|
|
329
|
+
return [];
|
|
330
|
+
}
|
|
331
|
+
function isInComment(lines, line, character) {
|
|
332
|
+
let inBlock = false;
|
|
333
|
+
for (let i = 0; i <= line; i++) {
|
|
334
|
+
const l = (lines[i] ?? '').replace(/\r$/, '');
|
|
335
|
+
const endCol = i === line ? character : l.length;
|
|
336
|
+
let j = 0;
|
|
337
|
+
while (j < endCol) {
|
|
338
|
+
if (!inBlock) {
|
|
339
|
+
if (l[j] === '/' && j + 1 < l.length && l[j + 1] === '/') {
|
|
340
|
+
if (i === line)
|
|
341
|
+
return true;
|
|
342
|
+
break; // rest of this line is comment, move to next
|
|
343
|
+
}
|
|
344
|
+
if (l[j] === '/' && j + 1 < l.length && l[j + 1] === '*') {
|
|
345
|
+
inBlock = true;
|
|
346
|
+
j += 2;
|
|
347
|
+
continue;
|
|
348
|
+
}
|
|
349
|
+
if (l[j] === '"') {
|
|
350
|
+
j++;
|
|
351
|
+
while (j < endCol && l[j] !== '"')
|
|
352
|
+
j++;
|
|
353
|
+
if (j < endCol)
|
|
354
|
+
j++;
|
|
355
|
+
continue;
|
|
356
|
+
}
|
|
357
|
+
}
|
|
358
|
+
else {
|
|
359
|
+
if (l[j] === '*' && j + 1 < l.length && l[j + 1] === '/') {
|
|
360
|
+
inBlock = false;
|
|
361
|
+
j += 2;
|
|
362
|
+
continue;
|
|
363
|
+
}
|
|
364
|
+
}
|
|
365
|
+
j++;
|
|
366
|
+
}
|
|
367
|
+
}
|
|
368
|
+
return inBlock;
|
|
369
|
+
}
|
|
370
|
+
function isInString(lineText, character) {
|
|
371
|
+
let inStr = false;
|
|
372
|
+
for (let i = 0; i < character && i < lineText.length; i++) {
|
|
373
|
+
if (lineText[i] === '"')
|
|
374
|
+
inStr = !inStr;
|
|
375
|
+
}
|
|
376
|
+
return inStr;
|
|
377
|
+
}
|
|
378
|
+
function isInsideBracketAfter(lines, currentLine, character, keyword) {
|
|
379
|
+
let depth = 0;
|
|
380
|
+
for (let i = currentLine; i >= 0; i--) {
|
|
381
|
+
const l = (lines[i] ?? '').replace(/\r$/, '');
|
|
382
|
+
const end = i === currentLine ? character : l.length;
|
|
383
|
+
for (let j = end - 1; j >= 0; j--) {
|
|
384
|
+
if (l[j] === ']')
|
|
385
|
+
depth++;
|
|
386
|
+
if (l[j] === '[') {
|
|
387
|
+
if (depth > 0) {
|
|
388
|
+
depth--;
|
|
389
|
+
}
|
|
390
|
+
else {
|
|
391
|
+
const prefix = l.slice(0, j).trimEnd();
|
|
392
|
+
return prefix.endsWith(keyword + ':') || prefix.endsWith(keyword);
|
|
393
|
+
}
|
|
394
|
+
}
|
|
395
|
+
}
|
|
396
|
+
}
|
|
397
|
+
return false;
|
|
398
|
+
}
|
|
399
|
+
function isInsideImportBraces(lines, currentLine, character) {
|
|
400
|
+
let depth = 0;
|
|
401
|
+
for (let i = currentLine; i >= 0; i--) {
|
|
402
|
+
const l = (lines[i] ?? '').replace(/\r$/, '');
|
|
403
|
+
const end = i === currentLine ? character : l.length;
|
|
404
|
+
for (let j = end - 1; j >= 0; j--) {
|
|
405
|
+
if (l[j] === '}')
|
|
406
|
+
depth++;
|
|
407
|
+
if (l[j] === '{') {
|
|
408
|
+
if (depth > 0) {
|
|
409
|
+
depth--;
|
|
410
|
+
}
|
|
411
|
+
else {
|
|
412
|
+
const prefix = l.slice(0, j).trimEnd();
|
|
413
|
+
return /\bimport\s*$/.test(prefix);
|
|
414
|
+
}
|
|
415
|
+
}
|
|
416
|
+
}
|
|
417
|
+
}
|
|
418
|
+
return false;
|
|
419
|
+
}
|
|
420
|
+
function extractImportPath(lines, startLine) {
|
|
421
|
+
for (let i = startLine; i < Math.min(startLine + 3, lines.length); i++) {
|
|
422
|
+
const match = lines[i].match(/from\s+"([^"]+)"/);
|
|
423
|
+
if (match)
|
|
424
|
+
return match[1];
|
|
425
|
+
}
|
|
426
|
+
return null;
|
|
427
|
+
}
|
|
428
|
+
function isInsideBlock(lines, currentLine, keyword) {
|
|
429
|
+
let depth = 0;
|
|
430
|
+
for (let i = currentLine; i >= 0; i--) {
|
|
431
|
+
const l = (lines[i] ?? '').replace(/\r$/, '');
|
|
432
|
+
for (let j = l.length - 1; j >= 0; j--) {
|
|
433
|
+
if (l[j] === '}')
|
|
434
|
+
depth++;
|
|
435
|
+
if (l[j] === '{') {
|
|
436
|
+
if (depth > 0) {
|
|
437
|
+
depth--;
|
|
438
|
+
}
|
|
439
|
+
else {
|
|
440
|
+
return new RegExp(`^\\s*${keyword}\\s`).test(l);
|
|
441
|
+
}
|
|
442
|
+
}
|
|
443
|
+
}
|
|
444
|
+
}
|
|
445
|
+
return false;
|
|
446
|
+
}
|
|
447
|
+
function topLevelCompletions() {
|
|
448
|
+
return [
|
|
449
|
+
{
|
|
450
|
+
label: 'context',
|
|
451
|
+
kind: CompletionItemKind.Keyword,
|
|
452
|
+
detail: 'Declare a context schema',
|
|
453
|
+
insertText: 'context ${1:Name}(max_tokens: ${2:1k}) {\n ${3:field}: ${4:String}\n}',
|
|
454
|
+
insertTextFormat: InsertTextFormat.Snippet,
|
|
455
|
+
},
|
|
456
|
+
{
|
|
457
|
+
label: 'node',
|
|
458
|
+
kind: CompletionItemKind.Keyword,
|
|
459
|
+
detail: 'Declare a processing node',
|
|
460
|
+
insertText: 'node ${1:Name}(model: ${2:sonnet}, budget: ${3:5k}/${4:2k}) {\n reads: [${5}]\n produces ${6:Output} {\n ${7:field}: ${8:String}\n }\n}',
|
|
461
|
+
insertTextFormat: InsertTextFormat.Snippet,
|
|
462
|
+
},
|
|
463
|
+
{
|
|
464
|
+
label: 'memory',
|
|
465
|
+
kind: CompletionItemKind.Keyword,
|
|
466
|
+
detail: 'Declare persistent memory',
|
|
467
|
+
insertText: 'memory ${1:Name}(max_tokens: ${2:2k}, storage: file) {\n ${3:field}: ${4:String}\n}',
|
|
468
|
+
insertTextFormat: InsertTextFormat.Snippet,
|
|
469
|
+
},
|
|
470
|
+
{
|
|
471
|
+
label: 'graph',
|
|
472
|
+
kind: CompletionItemKind.Keyword,
|
|
473
|
+
detail: 'Declare execution graph',
|
|
474
|
+
insertText: 'graph ${1:Name}(input: ${2:Input}, output: ${3:Output}, budget: ${4:10k}) {\n ${5:Start} -> done\n}',
|
|
475
|
+
insertTextFormat: InsertTextFormat.Snippet,
|
|
476
|
+
},
|
|
477
|
+
{ label: 'edge', kind: CompletionItemKind.Keyword, detail: 'Declare edge transform' },
|
|
478
|
+
{
|
|
479
|
+
label: 'import',
|
|
480
|
+
kind: CompletionItemKind.Keyword,
|
|
481
|
+
detail: 'Import from another file',
|
|
482
|
+
insertText: 'import { ${1:Name} } from "${2:./lib.gft}"',
|
|
483
|
+
insertTextFormat: InsertTextFormat.Snippet,
|
|
484
|
+
},
|
|
485
|
+
];
|
|
486
|
+
}
|
|
487
|
+
// --- Auto-Import Helpers ---
|
|
488
|
+
export function extractUndefinedName(message, docText, line, character) {
|
|
489
|
+
// Primary: extract from message (all SCOPE_UNDEFINED_REF messages quote name in single quotes)
|
|
490
|
+
const msgMatch = message.match(/'([^']+)'/);
|
|
491
|
+
if (msgMatch)
|
|
492
|
+
return msgMatch[1];
|
|
493
|
+
// Fallback: extract word at diagnostic range
|
|
494
|
+
return getWordAtPosition(docText, line, character);
|
|
495
|
+
}
|
|
496
|
+
export function buildAutoImportEdit(name, fromPath, docText) {
|
|
497
|
+
const lines = docText.split('\n');
|
|
498
|
+
let insertLine = 0;
|
|
499
|
+
for (let i = 0; i < lines.length; i++) {
|
|
500
|
+
if (/^\s*import\s+\{/.test(lines[i])) {
|
|
501
|
+
insertLine = i + 1;
|
|
502
|
+
}
|
|
503
|
+
}
|
|
504
|
+
return { insertLine, newText: `import { ${name} } from "${fromPath}"\n` };
|
|
505
|
+
}
|
|
506
|
+
export function computeRelativeImportPath(fromFile, toFile) {
|
|
507
|
+
let rel = path.relative(path.dirname(fromFile), toFile);
|
|
508
|
+
rel = rel.replace(/\\/g, '/'); // Windows path normalization
|
|
509
|
+
if (!rel.startsWith('.'))
|
|
510
|
+
rel = './' + rel;
|
|
511
|
+
return rel;
|
|
512
|
+
}
|
|
513
|
+
//# sourceMappingURL=features.js.map
|