@jsleekr/graft 5.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +235 -0
- package/dist/analyzer/estimator.d.ts +33 -0
- package/dist/analyzer/estimator.js +273 -0
- package/dist/analyzer/graph-checker.d.ts +13 -0
- package/dist/analyzer/graph-checker.js +153 -0
- package/dist/analyzer/scope.d.ts +21 -0
- package/dist/analyzer/scope.js +324 -0
- package/dist/analyzer/types.d.ts +17 -0
- package/dist/analyzer/types.js +323 -0
- package/dist/codegen/agents.d.ts +2 -0
- package/dist/codegen/agents.js +109 -0
- package/dist/codegen/backend.d.ts +16 -0
- package/dist/codegen/backend.js +1 -0
- package/dist/codegen/claude-backend.d.ts +9 -0
- package/dist/codegen/claude-backend.js +47 -0
- package/dist/codegen/codegen.d.ts +10 -0
- package/dist/codegen/codegen.js +57 -0
- package/dist/codegen/hooks.d.ts +2 -0
- package/dist/codegen/hooks.js +165 -0
- package/dist/codegen/orchestration.d.ts +3 -0
- package/dist/codegen/orchestration.js +250 -0
- package/dist/codegen/settings.d.ts +36 -0
- package/dist/codegen/settings.js +87 -0
- package/dist/compiler.d.ts +21 -0
- package/dist/compiler.js +101 -0
- package/dist/constants.d.ts +9 -0
- package/dist/constants.js +13 -0
- package/dist/errors/diagnostics.d.ts +21 -0
- package/dist/errors/diagnostics.js +25 -0
- package/dist/format.d.ts +12 -0
- package/dist/format.js +46 -0
- package/dist/index.d.ts +2 -0
- package/dist/index.js +181 -0
- package/dist/lexer/lexer.d.ts +23 -0
- package/dist/lexer/lexer.js +268 -0
- package/dist/lexer/tokens.d.ts +96 -0
- package/dist/lexer/tokens.js +150 -0
- package/dist/lsp/features/code-actions.d.ts +7 -0
- package/dist/lsp/features/code-actions.js +58 -0
- package/dist/lsp/features/completions.d.ts +7 -0
- package/dist/lsp/features/completions.js +271 -0
- package/dist/lsp/features/definition.d.ts +3 -0
- package/dist/lsp/features/definition.js +32 -0
- package/dist/lsp/features/diagnostics.d.ts +4 -0
- package/dist/lsp/features/diagnostics.js +33 -0
- package/dist/lsp/features/hover.d.ts +7 -0
- package/dist/lsp/features/hover.js +88 -0
- package/dist/lsp/features/index.d.ts +9 -0
- package/dist/lsp/features/index.js +9 -0
- package/dist/lsp/features/references.d.ts +7 -0
- package/dist/lsp/features/references.js +53 -0
- package/dist/lsp/features/rename.d.ts +17 -0
- package/dist/lsp/features/rename.js +198 -0
- package/dist/lsp/features/symbols.d.ts +7 -0
- package/dist/lsp/features/symbols.js +74 -0
- package/dist/lsp/features/utils.d.ts +3 -0
- package/dist/lsp/features/utils.js +65 -0
- package/dist/lsp/features.d.ts +20 -0
- package/dist/lsp/features.js +513 -0
- package/dist/lsp/server.d.ts +2 -0
- package/dist/lsp/server.js +327 -0
- package/dist/parser/ast.d.ts +244 -0
- package/dist/parser/ast.js +10 -0
- package/dist/parser/parser.d.ts +95 -0
- package/dist/parser/parser.js +1175 -0
- package/dist/program-index.d.ts +21 -0
- package/dist/program-index.js +74 -0
- package/dist/resolver/resolver.d.ts +9 -0
- package/dist/resolver/resolver.js +136 -0
- package/dist/runner.d.ts +13 -0
- package/dist/runner.js +41 -0
- package/dist/runtime/executor.d.ts +56 -0
- package/dist/runtime/executor.js +285 -0
- package/dist/runtime/expr-eval.d.ts +3 -0
- package/dist/runtime/expr-eval.js +138 -0
- package/dist/runtime/flow-runner.d.ts +21 -0
- package/dist/runtime/flow-runner.js +230 -0
- package/dist/runtime/memory.d.ts +5 -0
- package/dist/runtime/memory.js +41 -0
- package/dist/runtime/prompt-builder.d.ts +12 -0
- package/dist/runtime/prompt-builder.js +66 -0
- package/dist/runtime/subprocess.d.ts +20 -0
- package/dist/runtime/subprocess.js +99 -0
- package/dist/runtime/token-tracker.d.ts +36 -0
- package/dist/runtime/token-tracker.js +56 -0
- package/dist/runtime/transforms.d.ts +2 -0
- package/dist/runtime/transforms.js +104 -0
- package/dist/types.d.ts +10 -0
- package/dist/types.js +1 -0
- package/dist/utils.d.ts +3 -0
- package/dist/utils.js +35 -0
- package/dist/version.d.ts +1 -0
- package/dist/version.js +11 -0
- package/package.json +70 -0
|
@@ -0,0 +1,165 @@
|
|
|
1
|
+
import { formatExpr } from '../format.js';
|
|
2
|
+
export function generateHook(edge) {
|
|
3
|
+
if (edge.transforms.length === 0)
|
|
4
|
+
return null;
|
|
5
|
+
if (edge.target.kind !== 'direct')
|
|
6
|
+
return null;
|
|
7
|
+
const source = edge.source.toLowerCase();
|
|
8
|
+
const target = edge.target.node.toLowerCase();
|
|
9
|
+
const { code: transformCode, isCompact, needsCompactFn, needsTruncateFn } = transformsToJs(edge.transforms);
|
|
10
|
+
const stringify = isCompact ? 'JSON.stringify(result)' : 'JSON.stringify(result, null, 2)';
|
|
11
|
+
// Build helper functions section
|
|
12
|
+
const helpers = [];
|
|
13
|
+
if (needsCompactFn) {
|
|
14
|
+
helpers.push(COMPACT_FN);
|
|
15
|
+
}
|
|
16
|
+
if (needsTruncateFn) {
|
|
17
|
+
helpers.push(TRUNCATE_FN);
|
|
18
|
+
}
|
|
19
|
+
const helperSection = helpers.length > 0 ? '\n' + helpers.join('\n') + '\n' : '';
|
|
20
|
+
return `#!/usr/bin/env node
|
|
21
|
+
// Auto-generated by Graft Compiler
|
|
22
|
+
// Edge: ${edge.source} -> ${edge.target.node}
|
|
23
|
+
|
|
24
|
+
const fs = require('fs');
|
|
25
|
+
const path = require('path');
|
|
26
|
+
|
|
27
|
+
const INPUT = path.resolve('.graft/session/node_outputs/${source}.json');
|
|
28
|
+
const OUTPUT = path.resolve('.graft/session/node_outputs/${source}_to_${target}.json');
|
|
29
|
+
const TOKEN_LOG = path.resolve('.graft/token_log.txt');
|
|
30
|
+
|
|
31
|
+
if (!fs.existsSync(INPUT)) {
|
|
32
|
+
// Graceful no-op: this hook fires on ALL Write calls,
|
|
33
|
+
// so the source output may not exist yet (different agent writing).
|
|
34
|
+
process.exit(0);
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
const raw = fs.readFileSync(INPUT, 'utf-8');
|
|
38
|
+
const data = JSON.parse(raw);
|
|
39
|
+
${helperSection}
|
|
40
|
+
${transformCode}
|
|
41
|
+
|
|
42
|
+
const output = ${stringify};
|
|
43
|
+
fs.writeFileSync(OUTPUT, output);
|
|
44
|
+
|
|
45
|
+
// Token accounting
|
|
46
|
+
const original = Buffer.byteLength(raw);
|
|
47
|
+
const transformed = Buffer.byteLength(output);
|
|
48
|
+
const reduction = original > 0 ? Math.round((original - transformed) * 100 / original) : 0;
|
|
49
|
+
const timestamp = new Date().toISOString().replace('T', ' ').slice(0, 19);
|
|
50
|
+
|
|
51
|
+
fs.appendFileSync(TOKEN_LOG, \`[\${timestamp}] Edge ${edge.source}->${edge.target.node} | \${original}B -> \${transformed}B (\${reduction}% reduction)\\n\`);
|
|
52
|
+
`;
|
|
53
|
+
}
|
|
54
|
+
// Compact: recursively remove null, undefined, empty strings, empty arrays, empty objects
|
|
55
|
+
const COMPACT_FN = `function isEmpty(v) {
|
|
56
|
+
if (v === null || v === undefined || v === '') return true;
|
|
57
|
+
if (Array.isArray(v) && v.length === 0) return true;
|
|
58
|
+
if (typeof v === 'object' && !Array.isArray(v) && Object.keys(v).length === 0) return true;
|
|
59
|
+
return false;
|
|
60
|
+
}
|
|
61
|
+
function compact(data) {
|
|
62
|
+
if (data === null || data === undefined) return undefined;
|
|
63
|
+
if (typeof data !== 'object') return data;
|
|
64
|
+
if (Array.isArray(data)) {
|
|
65
|
+
return data.map(item => compact(item)).filter(item => !isEmpty(item));
|
|
66
|
+
}
|
|
67
|
+
const result = {};
|
|
68
|
+
for (const [key, val] of Object.entries(data)) {
|
|
69
|
+
const c = compact(val);
|
|
70
|
+
if (!isEmpty(c)) result[key] = c;
|
|
71
|
+
}
|
|
72
|
+
return result;
|
|
73
|
+
}`;
|
|
74
|
+
// Truncate: proportionally reduce content to fit within token budget
|
|
75
|
+
const TRUNCATE_FN = `function truncate(data, maxTokens) {
|
|
76
|
+
const maxChars = maxTokens * 4;
|
|
77
|
+
const json = JSON.stringify(data);
|
|
78
|
+
if (json.length <= maxChars) return data;
|
|
79
|
+
if (typeof data === 'string') return data.slice(0, maxChars) + '...';
|
|
80
|
+
if (typeof data !== 'object' || data === null) return data;
|
|
81
|
+
const ratio = maxChars / json.length;
|
|
82
|
+
return truncateDeep(data, ratio);
|
|
83
|
+
}
|
|
84
|
+
function truncateDeep(data, ratio) {
|
|
85
|
+
if (data === null || data === undefined) return data;
|
|
86
|
+
if (typeof data === 'string') {
|
|
87
|
+
const maxLen = Math.max(10, Math.floor(data.length * ratio));
|
|
88
|
+
return data.length > maxLen ? data.slice(0, maxLen) + '...' : data;
|
|
89
|
+
}
|
|
90
|
+
if (Array.isArray(data)) {
|
|
91
|
+
const maxItems = Math.max(1, Math.floor(data.length * ratio));
|
|
92
|
+
return data.slice(0, maxItems).map(item => truncateDeep(item, ratio));
|
|
93
|
+
}
|
|
94
|
+
if (typeof data === 'object') {
|
|
95
|
+
const result = {};
|
|
96
|
+
for (const [key, val] of Object.entries(data)) result[key] = truncateDeep(val, ratio);
|
|
97
|
+
return result;
|
|
98
|
+
}
|
|
99
|
+
return data;
|
|
100
|
+
}`;
|
|
101
|
+
function transformsToJs(transforms) {
|
|
102
|
+
const selectFields = [];
|
|
103
|
+
const dropFields = [];
|
|
104
|
+
const filterExprs = [];
|
|
105
|
+
let isCompact = false;
|
|
106
|
+
let truncateTokens = null;
|
|
107
|
+
for (const t of transforms) {
|
|
108
|
+
switch (t.type) {
|
|
109
|
+
case 'select':
|
|
110
|
+
selectFields.push(...t.fields);
|
|
111
|
+
break;
|
|
112
|
+
case 'drop':
|
|
113
|
+
dropFields.push(t.field);
|
|
114
|
+
break;
|
|
115
|
+
case 'filter':
|
|
116
|
+
filterExprs.push(filterToJs(t));
|
|
117
|
+
break;
|
|
118
|
+
case 'compact':
|
|
119
|
+
isCompact = true;
|
|
120
|
+
break;
|
|
121
|
+
case 'truncate':
|
|
122
|
+
truncateTokens = t.tokens;
|
|
123
|
+
break;
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
const lines = [];
|
|
127
|
+
if (selectFields.length > 0) {
|
|
128
|
+
const picks = selectFields.map(f => ` ${JSON.stringify(f)}: data[${JSON.stringify(f)}]`).join(',\n');
|
|
129
|
+
lines.push(`let result = {\n${picks}\n};`);
|
|
130
|
+
}
|
|
131
|
+
else {
|
|
132
|
+
lines.push('let result = { ...data };');
|
|
133
|
+
}
|
|
134
|
+
for (const f of dropFields) {
|
|
135
|
+
lines.push(`delete result[${JSON.stringify(f)}];`);
|
|
136
|
+
}
|
|
137
|
+
for (const expr of filterExprs) {
|
|
138
|
+
lines.push(expr);
|
|
139
|
+
}
|
|
140
|
+
if (isCompact) {
|
|
141
|
+
lines.push('result = compact(result);');
|
|
142
|
+
}
|
|
143
|
+
if (truncateTokens !== null) {
|
|
144
|
+
lines.push(`result = truncate(result, ${truncateTokens});`);
|
|
145
|
+
}
|
|
146
|
+
return {
|
|
147
|
+
code: lines.join('\n'),
|
|
148
|
+
isCompact,
|
|
149
|
+
needsCompactFn: isCompact,
|
|
150
|
+
needsTruncateFn: truncateTokens !== null,
|
|
151
|
+
};
|
|
152
|
+
}
|
|
153
|
+
function filterToJs(t) {
|
|
154
|
+
const { field, condition } = t;
|
|
155
|
+
if (condition.kind !== 'binary')
|
|
156
|
+
return `result[${JSON.stringify(field)}] = result[${JSON.stringify(field)}];`;
|
|
157
|
+
const fieldName = condition.left.kind === 'field_access'
|
|
158
|
+
? condition.left.segments[condition.left.segments.length - 1]
|
|
159
|
+
: formatExpr(condition.left);
|
|
160
|
+
const valueStr = condition.right.kind === 'literal'
|
|
161
|
+
? JSON.stringify(condition.right.value)
|
|
162
|
+
: formatExpr(condition.right);
|
|
163
|
+
const op = condition.op === '==' ? '===' : condition.op === '!=' ? '!==' : condition.op;
|
|
164
|
+
return `result[${JSON.stringify(field)}] = (result[${JSON.stringify(field)}] || []).filter(item => item[${JSON.stringify(fieldName)}] ${op} ${valueStr});`;
|
|
165
|
+
}
|
|
@@ -0,0 +1,250 @@
|
|
|
1
|
+
import { ProgramIndex } from '../program-index.js';
|
|
2
|
+
import { formatExpr } from '../format.js';
|
|
3
|
+
export function generateOrchestration(program, report) {
|
|
4
|
+
const graph = program.graphs[0];
|
|
5
|
+
if (!graph)
|
|
6
|
+
return '';
|
|
7
|
+
const index = new ProgramIndex(program);
|
|
8
|
+
const memoryNames = new Set(program.memories.map(m => m.name));
|
|
9
|
+
const edgeMap = new Map();
|
|
10
|
+
for (const edge of program.edges) {
|
|
11
|
+
if (edge.target.kind === 'direct' && edge.transforms.length > 0) {
|
|
12
|
+
edgeMap.set(`${edge.source}->${edge.target.node}`, { transforms: edge.transforms });
|
|
13
|
+
}
|
|
14
|
+
}
|
|
15
|
+
const { text: steps } = generateSteps(graph.flow, report, edgeMap, 1, null, index.nodeMap, memoryNames);
|
|
16
|
+
// Memory preamble
|
|
17
|
+
const memorySection = program.memories.length > 0
|
|
18
|
+
? `
|
|
19
|
+
## Persistent Memory
|
|
20
|
+
${program.memories.map(m => `- \`${m.name}\`: \`.graft/memory/${m.name.toLowerCase()}.json\` (${m.maxTokens.toLocaleString('en-US')} tokens max)`).join('\n')}
|
|
21
|
+
- Memories persist across runs. Nodes with \`writes\` clauses update memory after execution.
|
|
22
|
+
|
|
23
|
+
`
|
|
24
|
+
: '';
|
|
25
|
+
const paramsSection = graph.params.length > 0
|
|
26
|
+
? `\n## Parameters\n${graph.params.map(p => `- ${p.name}: ${p.type}${p.default !== undefined ? ` (default: ${p.default})` : ''}`).join('\n')}\n`
|
|
27
|
+
: '';
|
|
28
|
+
return `# Graft Orchestration: ${graph.name}
|
|
29
|
+
|
|
30
|
+
> Auto-generated by Graft Compiler. Edit the .gft source, not this file.
|
|
31
|
+
${paramsSection}
|
|
32
|
+
## Budget
|
|
33
|
+
Total: ${graph.budget.toLocaleString('en-US')} tokens
|
|
34
|
+
Best case: ${report.bestCase.toLocaleString('en-US')} tokens
|
|
35
|
+
Worst case: ${report.worstCase.toLocaleString('en-US')} tokens
|
|
36
|
+
${memorySection}
|
|
37
|
+
## Execution Plan
|
|
38
|
+
${steps}
|
|
39
|
+
## Token Budget Tracking
|
|
40
|
+
Check \`.graft/token_log.txt\` after each step.
|
|
41
|
+
- 80% consumed: switch remaining agents to compact mode
|
|
42
|
+
- 90% consumed: skip non-critical agents
|
|
43
|
+
|
|
44
|
+
## Failure Recovery
|
|
45
|
+
- Agent failure: follow on_failure policy in each agent definition
|
|
46
|
+
- Token overrun: switch to compact mode, then skip non-critical steps
|
|
47
|
+
- Complete failure: intermediate results preserved in \`.graft/session/\`
|
|
48
|
+
`;
|
|
49
|
+
}
|
|
50
|
+
function describeTransforms(transforms) {
|
|
51
|
+
const parts = [];
|
|
52
|
+
for (const t of transforms) {
|
|
53
|
+
switch (t.type) {
|
|
54
|
+
case 'select':
|
|
55
|
+
parts.push(`keep only fields: ${t.fields.map(f => `\`${f}\``).join(', ')}`);
|
|
56
|
+
break;
|
|
57
|
+
case 'drop':
|
|
58
|
+
parts.push(`remove field \`${t.field}\``);
|
|
59
|
+
break;
|
|
60
|
+
case 'compact':
|
|
61
|
+
parts.push('minify JSON (no whitespace)');
|
|
62
|
+
break;
|
|
63
|
+
case 'filter':
|
|
64
|
+
parts.push(`filter \`${t.field}\` array`);
|
|
65
|
+
break;
|
|
66
|
+
case 'truncate':
|
|
67
|
+
parts.push(`truncate to ${t.tokens} tokens`);
|
|
68
|
+
break;
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
return parts.join(', then ');
|
|
72
|
+
}
|
|
73
|
+
function generateSteps(flow, report, edgeMap, startStep, prevNode, nodeMap, memoryNames) {
|
|
74
|
+
let text = '';
|
|
75
|
+
let stepNum = startStep;
|
|
76
|
+
let prev = prevNode;
|
|
77
|
+
let prevParallelBranches = [];
|
|
78
|
+
for (const step of flow) {
|
|
79
|
+
switch (step.kind) {
|
|
80
|
+
case 'node': {
|
|
81
|
+
const lowerName = step.name.toLowerCase();
|
|
82
|
+
const nodeReport = report.nodes.find(n => n.name === step.name);
|
|
83
|
+
let inputSource = '';
|
|
84
|
+
let transformNote = '';
|
|
85
|
+
if (prev) {
|
|
86
|
+
// Single predecessor (sequential)
|
|
87
|
+
const edgeInfo = edgeMap.get(`${prev}->${step.name}`);
|
|
88
|
+
if (edgeInfo) {
|
|
89
|
+
inputSource = `\n- Input: \`.graft/session/node_outputs/${prev.toLowerCase()}_to_${lowerName}.json\``;
|
|
90
|
+
transformNote = `\n- **Edge transform**: After ${prev} completes, transform its output: ${describeTransforms(edgeInfo.transforms)}. Save to \`.graft/session/node_outputs/${prev.toLowerCase()}_to_${lowerName}.json\` before starting ${step.name}.`;
|
|
91
|
+
}
|
|
92
|
+
else {
|
|
93
|
+
inputSource = `\n- Input: \`.graft/session/node_outputs/${prev.toLowerCase()}.json\``;
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
else if (prevParallelBranches.length > 0) {
|
|
97
|
+
// Multiple predecessors (after parallel block)
|
|
98
|
+
const inputs = [];
|
|
99
|
+
const transforms = [];
|
|
100
|
+
for (const branch of prevParallelBranches) {
|
|
101
|
+
const edgeInfo = edgeMap.get(`${branch}->${step.name}`);
|
|
102
|
+
if (edgeInfo) {
|
|
103
|
+
const transformedPath = `.graft/session/node_outputs/${branch.toLowerCase()}_to_${lowerName}.json`;
|
|
104
|
+
inputs.push(`\`${transformedPath}\``);
|
|
105
|
+
transforms.push(`- **Edge transform** (${branch} → ${step.name}): ${describeTransforms(edgeInfo.transforms)}. Run: \`node .claude/hooks/${branch.toLowerCase()}-to-${lowerName}.js\` or manually apply. Output: \`${transformedPath}\``);
|
|
106
|
+
}
|
|
107
|
+
else {
|
|
108
|
+
inputs.push(`\`.graft/session/node_outputs/${branch.toLowerCase()}.json\``);
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
inputSource = `\n- Inputs: ${inputs.join(', ')}`;
|
|
112
|
+
if (transforms.length > 0) {
|
|
113
|
+
transformNote = '\n' + transforms.join('\n');
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
let memoryLines = '';
|
|
117
|
+
let additionalInputs = '';
|
|
118
|
+
const nodeDecl = nodeMap.get(step.name);
|
|
119
|
+
if (nodeDecl) {
|
|
120
|
+
const memReads = nodeDecl.reads.filter(r => memoryNames.has(r.context));
|
|
121
|
+
for (const mr of memReads) {
|
|
122
|
+
memoryLines += `\n- Memory load: \`.graft/memory/${mr.context.toLowerCase()}.json\``;
|
|
123
|
+
}
|
|
124
|
+
for (const w of nodeDecl.writes) {
|
|
125
|
+
if (memoryNames.has(w.memory)) {
|
|
126
|
+
memoryLines += `\n- Memory save: \`.graft/memory/${w.memory.toLowerCase()}.json\``;
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
// List produces-type reads that aren't covered by edges
|
|
130
|
+
for (const ref of nodeDecl.reads) {
|
|
131
|
+
if (memoryNames.has(ref.context))
|
|
132
|
+
continue; // already handled as memory
|
|
133
|
+
// Find the node that produces this type
|
|
134
|
+
const producerNode = [...nodeMap.values()].find(n => n.produces.name === ref.context);
|
|
135
|
+
if (!producerNode || producerNode.name === step.name)
|
|
136
|
+
continue;
|
|
137
|
+
// Skip if already covered by edge-based input/transform
|
|
138
|
+
const edgeKey = `${producerNode.name}->${step.name}`;
|
|
139
|
+
if (edgeMap.has(edgeKey))
|
|
140
|
+
continue;
|
|
141
|
+
// Skip if already in prevParallelBranches list (these are covered by the parallel input resolution above)
|
|
142
|
+
if (prevParallelBranches.includes(producerNode.name))
|
|
143
|
+
continue;
|
|
144
|
+
// Skip if this is the immediate sequential predecessor (already covered)
|
|
145
|
+
if (prev === producerNode.name)
|
|
146
|
+
continue;
|
|
147
|
+
additionalInputs += `\n- Also reads: \`.graft/session/node_outputs/${producerNode.name.toLowerCase()}.json\` (${ref.context})`;
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
text += `
|
|
151
|
+
### Step ${stepNum}: ${step.name} [sequential]
|
|
152
|
+
- Agent: ${lowerName}${inputSource}${transformNote}${additionalInputs}${memoryLines}
|
|
153
|
+
- Expected tokens: input ~${nodeReport?.estimatedIn.toLocaleString('en-US') || '?'} / output ~${nodeReport?.estimatedOut.toLocaleString('en-US') || '?'}
|
|
154
|
+
- Completion: \`===NODE_COMPLETE:${lowerName}===\`
|
|
155
|
+
- Output: \`.graft/session/node_outputs/${lowerName}.json\`
|
|
156
|
+
`;
|
|
157
|
+
prev = step.name;
|
|
158
|
+
prevParallelBranches = [];
|
|
159
|
+
stepNum++;
|
|
160
|
+
break;
|
|
161
|
+
}
|
|
162
|
+
case 'parallel': {
|
|
163
|
+
const branchList = step.branches.join(', ');
|
|
164
|
+
text += `
|
|
165
|
+
### Step ${stepNum}: [parallel] ${branchList}
|
|
166
|
+
- **Dispatch all ${step.branches.length} agents concurrently** using the Agent tool in a single message
|
|
167
|
+
- Wait for all to complete before proceeding
|
|
168
|
+
`;
|
|
169
|
+
// Collect edge transforms from preceding sequential node into parallel branches
|
|
170
|
+
const incomingTransforms = [];
|
|
171
|
+
for (const branchName of step.branches) {
|
|
172
|
+
const lowerName = branchName.toLowerCase();
|
|
173
|
+
const nodeReport = report.nodes.find(n => n.name === branchName);
|
|
174
|
+
let branchInputNote = '';
|
|
175
|
+
if (prev) {
|
|
176
|
+
const edgeInfo = edgeMap.get(`${prev}->${branchName}`);
|
|
177
|
+
if (edgeInfo) {
|
|
178
|
+
const transformedPath = `.graft/session/node_outputs/${prev.toLowerCase()}_to_${lowerName}.json`;
|
|
179
|
+
branchInputNote = ` [input: \`${transformedPath}\`]`;
|
|
180
|
+
incomingTransforms.push(`- **Edge transform** (${prev} → ${branchName}): ${describeTransforms(edgeInfo.transforms)}. Run: \`node .claude/hooks/${prev.toLowerCase()}-to-${lowerName}.js\` or manually apply. Output: \`${transformedPath}\``);
|
|
181
|
+
}
|
|
182
|
+
else {
|
|
183
|
+
branchInputNote = ` [input: \`.graft/session/node_outputs/${prev.toLowerCase()}.json\`]`;
|
|
184
|
+
}
|
|
185
|
+
}
|
|
186
|
+
let branchMemAnnotations = '';
|
|
187
|
+
const branchDecl = nodeMap.get(branchName);
|
|
188
|
+
if (branchDecl) {
|
|
189
|
+
const memReads = branchDecl.reads.filter(r => memoryNames.has(r.context));
|
|
190
|
+
for (const mr of memReads) {
|
|
191
|
+
branchMemAnnotations += ` [mem-read: ${mr.context.toLowerCase()}]`;
|
|
192
|
+
}
|
|
193
|
+
for (const w of branchDecl.writes) {
|
|
194
|
+
if (memoryNames.has(w.memory)) {
|
|
195
|
+
branchMemAnnotations += ` [mem-write: ${w.memory.toLowerCase()}]`;
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
}
|
|
199
|
+
text += `- Agent: ${lowerName} -- tokens: input ~${nodeReport?.estimatedIn.toLocaleString('en-US') || '?'} / output ~${nodeReport?.estimatedOut.toLocaleString('en-US') || '?'}${branchInputNote}${branchMemAnnotations}
|
|
200
|
+
`;
|
|
201
|
+
}
|
|
202
|
+
if (incomingTransforms.length > 0) {
|
|
203
|
+
text += incomingTransforms.join('\n') + '\n';
|
|
204
|
+
}
|
|
205
|
+
text += `- Completion: all ${step.branches.length} \`===NODE_COMPLETE===\` signals received
|
|
206
|
+
`;
|
|
207
|
+
// Track parallel branches for downstream edge resolution
|
|
208
|
+
prevParallelBranches = [...step.branches];
|
|
209
|
+
prev = null;
|
|
210
|
+
stepNum++;
|
|
211
|
+
break;
|
|
212
|
+
}
|
|
213
|
+
case 'foreach': {
|
|
214
|
+
text += `
|
|
215
|
+
### Step ${stepNum}: [foreach over ${step.source}.output.${step.field}, max ${step.maxIterations} iterations]
|
|
216
|
+
- For each \`${step.binding}\` in list:
|
|
217
|
+
`;
|
|
218
|
+
let subLetter = 'a';
|
|
219
|
+
for (const bodyStep of step.body) {
|
|
220
|
+
if (bodyStep.kind === 'node') {
|
|
221
|
+
text += ` - Sub-step ${stepNum}${subLetter}: ${bodyStep.name} [foreach-body]
|
|
222
|
+
`;
|
|
223
|
+
subLetter = String.fromCharCode(subLetter.charCodeAt(0) + 1);
|
|
224
|
+
}
|
|
225
|
+
}
|
|
226
|
+
text += `- Completion: all iterations done or list exhausted
|
|
227
|
+
`;
|
|
228
|
+
prev = null;
|
|
229
|
+
stepNum++;
|
|
230
|
+
break;
|
|
231
|
+
}
|
|
232
|
+
case 'let':
|
|
233
|
+
text += `
|
|
234
|
+
### Step ${stepNum}: [data binding] let ${step.name}
|
|
235
|
+
- Bind: \`${step.name}\` = \`${formatExpr(step.value)}\`
|
|
236
|
+
`;
|
|
237
|
+
stepNum++;
|
|
238
|
+
break;
|
|
239
|
+
case 'graph_call':
|
|
240
|
+
text += `
|
|
241
|
+
### Step ${stepNum}: [sub-pipeline] ${step.name}(${step.args.map(a => `${a.name}: ${formatExpr(a.value)}`).join(', ')})
|
|
242
|
+
- Execute graph \`${step.name}\` with parameters
|
|
243
|
+
`;
|
|
244
|
+
stepNum++;
|
|
245
|
+
prev = null;
|
|
246
|
+
break;
|
|
247
|
+
}
|
|
248
|
+
}
|
|
249
|
+
return { text, nextStep: stepNum, lastNode: prev };
|
|
250
|
+
}
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
import { Program } from '../parser/ast.js';
|
|
2
|
+
import { ProgramIndex } from '../program-index.js';
|
|
3
|
+
export interface GraftSettings {
|
|
4
|
+
model: string;
|
|
5
|
+
permissions: {
|
|
6
|
+
allow: string[];
|
|
7
|
+
};
|
|
8
|
+
graft: {
|
|
9
|
+
version: string;
|
|
10
|
+
source: string;
|
|
11
|
+
compiled_at: string;
|
|
12
|
+
budget: {
|
|
13
|
+
total: number;
|
|
14
|
+
warning_threshold: number;
|
|
15
|
+
critical_threshold: number;
|
|
16
|
+
};
|
|
17
|
+
model_routing: {
|
|
18
|
+
default: string;
|
|
19
|
+
overrides: Record<string, string>;
|
|
20
|
+
};
|
|
21
|
+
};
|
|
22
|
+
hooks: {
|
|
23
|
+
PostToolUse: HookEntry[];
|
|
24
|
+
};
|
|
25
|
+
}
|
|
26
|
+
interface HookCommand {
|
|
27
|
+
type: 'command';
|
|
28
|
+
command: string;
|
|
29
|
+
if?: string;
|
|
30
|
+
}
|
|
31
|
+
interface HookEntry {
|
|
32
|
+
matcher: string;
|
|
33
|
+
hooks: HookCommand[];
|
|
34
|
+
}
|
|
35
|
+
export declare function generateSettings(program: Program, sourceFile: string, index?: ProgramIndex): GraftSettings;
|
|
36
|
+
export {};
|
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
import { MODEL_MAP, BUDGET_WARNING_THRESHOLD, BUDGET_CRITICAL_THRESHOLD } from '../constants.js';
|
|
2
|
+
import { VERSION } from '../version.js';
|
|
3
|
+
import { ProgramIndex } from '../program-index.js';
|
|
4
|
+
/**
|
|
5
|
+
* Find the first 'node'-kind FlowNode name from a FlowNode array.
|
|
6
|
+
* Returns undefined if no node-kind step exists.
|
|
7
|
+
*/
|
|
8
|
+
function findFirstNodeName(flow) {
|
|
9
|
+
for (const step of flow) {
|
|
10
|
+
switch (step.kind) {
|
|
11
|
+
case 'node':
|
|
12
|
+
return step.name;
|
|
13
|
+
case 'parallel':
|
|
14
|
+
return step.branches[0];
|
|
15
|
+
case 'foreach':
|
|
16
|
+
return findFirstNodeName(step.body);
|
|
17
|
+
case 'let':
|
|
18
|
+
break;
|
|
19
|
+
case 'graph_call':
|
|
20
|
+
break;
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
return undefined;
|
|
24
|
+
}
|
|
25
|
+
export function generateSettings(program, sourceFile, index) {
|
|
26
|
+
const idx = index ?? new ProgramIndex(program);
|
|
27
|
+
const graph = program.graphs[0];
|
|
28
|
+
const firstNodeName = graph ? findFirstNodeName(graph.flow) : undefined;
|
|
29
|
+
const firstNodeModel = firstNodeName
|
|
30
|
+
? idx.nodeMap.get(firstNodeName)?.model
|
|
31
|
+
: undefined;
|
|
32
|
+
const defaultModel = firstNodeModel
|
|
33
|
+
? (MODEL_MAP[firstNodeModel] || firstNodeModel)
|
|
34
|
+
: MODEL_MAP.sonnet;
|
|
35
|
+
const overrides = {};
|
|
36
|
+
for (const node of program.nodes) {
|
|
37
|
+
const resolved = MODEL_MAP[node.model] || node.model;
|
|
38
|
+
if (resolved !== defaultModel) {
|
|
39
|
+
overrides[node.name.toLowerCase()] = resolved;
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
// Collect all hook commands, then merge into a single "Write" matcher entry
|
|
43
|
+
const hookCommands = [];
|
|
44
|
+
for (const edge of program.edges) {
|
|
45
|
+
if (edge.transforms.length === 0)
|
|
46
|
+
continue;
|
|
47
|
+
if (edge.target.kind !== 'direct')
|
|
48
|
+
continue;
|
|
49
|
+
const source = edge.source.toLowerCase();
|
|
50
|
+
const target = edge.target.node.toLowerCase();
|
|
51
|
+
hookCommands.push({
|
|
52
|
+
type: 'command',
|
|
53
|
+
command: `node .claude/hooks/${source}-to-${target}.js`,
|
|
54
|
+
if: `Write(.graft/session/node_outputs/${source}.json)`,
|
|
55
|
+
});
|
|
56
|
+
}
|
|
57
|
+
const hookEntries = [];
|
|
58
|
+
if (hookCommands.length > 0) {
|
|
59
|
+
hookEntries.push({
|
|
60
|
+
matcher: 'Write',
|
|
61
|
+
hooks: hookCommands,
|
|
62
|
+
});
|
|
63
|
+
}
|
|
64
|
+
return {
|
|
65
|
+
model: defaultModel,
|
|
66
|
+
permissions: {
|
|
67
|
+
allow: ['Read', 'Write', 'Edit', 'Bash', 'Skill'],
|
|
68
|
+
},
|
|
69
|
+
graft: {
|
|
70
|
+
version: VERSION,
|
|
71
|
+
source: sourceFile,
|
|
72
|
+
compiled_at: new Date().toISOString(),
|
|
73
|
+
budget: {
|
|
74
|
+
total: graph?.budget || 0,
|
|
75
|
+
warning_threshold: BUDGET_WARNING_THRESHOLD,
|
|
76
|
+
critical_threshold: BUDGET_CRITICAL_THRESHOLD,
|
|
77
|
+
},
|
|
78
|
+
model_routing: {
|
|
79
|
+
default: defaultModel,
|
|
80
|
+
overrides,
|
|
81
|
+
},
|
|
82
|
+
},
|
|
83
|
+
hooks: {
|
|
84
|
+
PostToolUse: hookEntries,
|
|
85
|
+
},
|
|
86
|
+
};
|
|
87
|
+
}
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import { TokenReport } from './analyzer/estimator.js';
|
|
2
|
+
import { GeneratedFile } from './codegen/codegen.js';
|
|
3
|
+
import { GraftError } from './errors/diagnostics.js';
|
|
4
|
+
import { Program } from './parser/ast.js';
|
|
5
|
+
import { ProgramIndex } from './program-index.js';
|
|
6
|
+
export interface ProgramResult {
|
|
7
|
+
success: boolean;
|
|
8
|
+
program?: Program;
|
|
9
|
+
index?: ProgramIndex;
|
|
10
|
+
report?: TokenReport;
|
|
11
|
+
errors: GraftError[];
|
|
12
|
+
warnings: GraftError[];
|
|
13
|
+
}
|
|
14
|
+
export interface CompileResult extends ProgramResult {
|
|
15
|
+
files?: GeneratedFile[];
|
|
16
|
+
}
|
|
17
|
+
export declare function compileToProgram(source: string, sourceFile: string): ProgramResult;
|
|
18
|
+
export declare function compileAndGenerate(source: string, sourceFile: string): CompileResult;
|
|
19
|
+
/** Backward-compatible alias for compileAndGenerate */
|
|
20
|
+
export declare function compile(source: string, sourceFile: string): CompileResult;
|
|
21
|
+
export declare function compileAndWrite(source: string, sourceFile: string, outDir: string): CompileResult;
|
package/dist/compiler.js
ADDED
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
import * as path from 'node:path';
|
|
2
|
+
import { Lexer } from './lexer/lexer.js';
|
|
3
|
+
import { Parser } from './parser/parser.js';
|
|
4
|
+
import { resolve } from './resolver/resolver.js';
|
|
5
|
+
import { ScopeChecker } from './analyzer/scope.js';
|
|
6
|
+
import { TypeChecker } from './analyzer/types.js';
|
|
7
|
+
import { TokenEstimator } from './analyzer/estimator.js';
|
|
8
|
+
import { generate, writeFiles } from './codegen/codegen.js';
|
|
9
|
+
import { GraftError } from './errors/diagnostics.js';
|
|
10
|
+
import { ProgramIndex } from './program-index.js';
|
|
11
|
+
export function compileToProgram(source, sourceFile) {
|
|
12
|
+
const errors = [];
|
|
13
|
+
const warnings = [];
|
|
14
|
+
// Lex
|
|
15
|
+
let tokens;
|
|
16
|
+
try {
|
|
17
|
+
const lexer = new Lexer(source);
|
|
18
|
+
tokens = lexer.tokenize();
|
|
19
|
+
}
|
|
20
|
+
catch (e) {
|
|
21
|
+
if (e instanceof GraftError) {
|
|
22
|
+
return { success: false, errors: [e], warnings };
|
|
23
|
+
}
|
|
24
|
+
throw e;
|
|
25
|
+
}
|
|
26
|
+
// Parse
|
|
27
|
+
const { program: parsedProgram, errors: parseErrors } = new Parser(tokens).parse();
|
|
28
|
+
let program = parsedProgram;
|
|
29
|
+
errors.push(...parseErrors);
|
|
30
|
+
if (parseErrors.length > 0) {
|
|
31
|
+
return { success: false, program, errors, warnings };
|
|
32
|
+
}
|
|
33
|
+
// Set sourceFile on all entry declarations
|
|
34
|
+
const absSourceFile = path.resolve(sourceFile);
|
|
35
|
+
for (const c of program.contexts)
|
|
36
|
+
c.sourceFile = absSourceFile;
|
|
37
|
+
for (const n of program.nodes)
|
|
38
|
+
n.sourceFile = absSourceFile;
|
|
39
|
+
// Resolve imports
|
|
40
|
+
if (program.imports.length > 0) {
|
|
41
|
+
const resolveResult = resolve(program, sourceFile);
|
|
42
|
+
if (resolveResult.errors.length > 0) {
|
|
43
|
+
errors.push(...resolveResult.errors);
|
|
44
|
+
return { success: false, program, errors, warnings };
|
|
45
|
+
}
|
|
46
|
+
program = resolveResult.program;
|
|
47
|
+
}
|
|
48
|
+
// Build ProgramIndex once (after resolve, before analyzers)
|
|
49
|
+
const index = new ProgramIndex(program);
|
|
50
|
+
// Analyze: scope
|
|
51
|
+
const scopeDiagnostics = new ScopeChecker(program, index).check();
|
|
52
|
+
// Analyze: types (ratchet v3.0-R4: TypeChecker migrated to ProgramIndex)
|
|
53
|
+
const typeDiagnostics = new TypeChecker(program, index).check();
|
|
54
|
+
// Separate errors from warnings
|
|
55
|
+
for (const d of [...scopeDiagnostics, ...typeDiagnostics]) {
|
|
56
|
+
if (d.severity === 'warning') {
|
|
57
|
+
warnings.push(d);
|
|
58
|
+
}
|
|
59
|
+
else {
|
|
60
|
+
errors.push(d);
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
if (errors.length > 0) {
|
|
64
|
+
return { success: false, program, index, errors, warnings };
|
|
65
|
+
}
|
|
66
|
+
// Analyze: tokens
|
|
67
|
+
const report = new TokenEstimator(program, index).estimate();
|
|
68
|
+
warnings.push(...report.warnings);
|
|
69
|
+
return { success: true, program, index, report, errors, warnings };
|
|
70
|
+
}
|
|
71
|
+
export function compileAndGenerate(source, sourceFile) {
|
|
72
|
+
const result = compileToProgram(source, sourceFile);
|
|
73
|
+
if (!result.success || !result.program) {
|
|
74
|
+
return result;
|
|
75
|
+
}
|
|
76
|
+
// Guard: no graph declaration (codegen prerequisite)
|
|
77
|
+
if (result.program.graphs.length === 0) {
|
|
78
|
+
return {
|
|
79
|
+
...result,
|
|
80
|
+
success: false,
|
|
81
|
+
errors: [
|
|
82
|
+
...result.errors,
|
|
83
|
+
new GraftError('No graph declaration found', { line: 1, column: 1, offset: 0 }, 'error', 'GRAPH_MISSING'),
|
|
84
|
+
],
|
|
85
|
+
};
|
|
86
|
+
}
|
|
87
|
+
// Generate
|
|
88
|
+
const files = generate(result.program, result.report, sourceFile, result.index);
|
|
89
|
+
return { ...result, files };
|
|
90
|
+
}
|
|
91
|
+
/** Backward-compatible alias for compileAndGenerate */
|
|
92
|
+
export function compile(source, sourceFile) {
|
|
93
|
+
return compileAndGenerate(source, sourceFile);
|
|
94
|
+
}
|
|
95
|
+
export function compileAndWrite(source, sourceFile, outDir) {
|
|
96
|
+
const result = compile(source, sourceFile);
|
|
97
|
+
if (result.success && result.files) {
|
|
98
|
+
writeFiles(result.files, outDir);
|
|
99
|
+
}
|
|
100
|
+
return result;
|
|
101
|
+
}
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
export declare const MODEL_MAP: Record<string, string>;
|
|
2
|
+
/** Fraction of tokens estimated for a single-field read (vs full context). */
|
|
3
|
+
export declare const PARTIAL_FIELD_FACTOR = 0.3;
|
|
4
|
+
/** Budget fraction at which to emit a warning. */
|
|
5
|
+
export declare const BUDGET_WARNING_THRESHOLD = 0.8;
|
|
6
|
+
/** Budget fraction at which to emit a critical warning. */
|
|
7
|
+
export declare const BUDGET_CRITICAL_THRESHOLD = 0.9;
|
|
8
|
+
/** Maximum depth for conditional chain traversal in both estimation and runtime. */
|
|
9
|
+
export declare const MAX_CONDITIONAL_HOPS = 10;
|