clementine-agent 1.2.2 → 1.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/agent/assistant.js +12 -0
- package/dist/cli/dashboard.js +3034 -734
- package/dist/cli/static/LICENSE-NOTICES.md +12 -0
- package/dist/cli/static/drawflow.min.css +1 -0
- package/dist/cli/static/drawflow.min.js +1 -0
- package/dist/config.d.ts +11 -0
- package/dist/config.js +16 -0
- package/dist/dashboard/builder/dry-run.d.ts +31 -0
- package/dist/dashboard/builder/dry-run.js +138 -0
- package/dist/dashboard/builder/events.d.ts +23 -0
- package/dist/dashboard/builder/events.js +28 -0
- package/dist/dashboard/builder/mcp-invoke.d.ts +25 -0
- package/dist/dashboard/builder/mcp-invoke.js +143 -0
- package/dist/dashboard/builder/runner.d.ts +68 -0
- package/dist/dashboard/builder/runner.js +418 -0
- package/dist/dashboard/builder/serializer.d.ts +79 -0
- package/dist/dashboard/builder/serializer.js +547 -0
- package/dist/dashboard/builder/snapshots.d.ts +32 -0
- package/dist/dashboard/builder/snapshots.js +138 -0
- package/dist/dashboard/builder/validation.d.ts +26 -0
- package/dist/dashboard/builder/validation.js +183 -0
- package/dist/gateway/router.js +31 -2
- package/dist/index.js +38 -0
- package/dist/memory/chunker.js +13 -2
- package/dist/memory/hot-cache.d.ts +38 -0
- package/dist/memory/hot-cache.js +73 -0
- package/dist/memory/integrity.d.ts +28 -0
- package/dist/memory/integrity.js +119 -0
- package/dist/memory/maintenance.d.ts +23 -2
- package/dist/memory/maintenance.js +140 -3
- package/dist/memory/store.d.ts +259 -2
- package/dist/memory/store.js +751 -21
- package/dist/memory/write-queue.d.ts +96 -0
- package/dist/memory/write-queue.js +165 -0
- package/dist/tools/builder-tools.d.ts +13 -0
- package/dist/tools/builder-tools.js +437 -0
- package/dist/tools/mcp-server.js +2 -0
- package/dist/tools/memory-tools.js +38 -1
- package/dist/types.d.ts +56 -2
- package/package.json +2 -2
- package/vault/00-System/skills/builder-canvas.md +126 -0
|
@@ -0,0 +1,547 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Builder serializer.
|
|
3
|
+
*
|
|
4
|
+
* Two responsibilities:
|
|
5
|
+
*
|
|
6
|
+
* 1. Unified read/write of crons + workflows as WorkflowDefinition objects
|
|
7
|
+
* so the visual canvas can edit both with one shape. CRON.md entries
|
|
8
|
+
* round-trip as virtual single-step workflows; multi-step workflow
|
|
9
|
+
* files round-trip via the existing workflow-runner format.
|
|
10
|
+
*
|
|
11
|
+
* 2. Convert WorkflowDefinition ⇄ Drawflow's canvas data shape so the
|
|
12
|
+
* frontend can drop the JSON straight into drawflow.import().
|
|
13
|
+
*
|
|
14
|
+
* Backwards-compatible: existing CRON.md and workflow files are unchanged
|
|
15
|
+
* unless edited through this module, and edits preserve unrelated fields.
|
|
16
|
+
*/
|
|
17
|
+
import { existsSync, readFileSync, readdirSync, writeFileSync, mkdirSync } from 'node:fs';
|
|
18
|
+
import path from 'node:path';
|
|
19
|
+
import matter from 'gray-matter';
|
|
20
|
+
import yaml from 'js-yaml';
|
|
21
|
+
import { CRON_FILE, WORKFLOWS_DIR } from '../../config.js';
|
|
22
|
+
import { snapshotWorkflow } from './snapshots.js';
|
|
23
|
+
// ── ID scheme ───────────────────────────────────────────────────────
|
|
24
|
+
const CRON_ID_PREFIX = 'cron:';
|
|
25
|
+
const WORKFLOW_ID_PREFIX = 'workflow:';
|
|
26
|
+
export function cronId(name) {
|
|
27
|
+
return CRON_ID_PREFIX + name;
|
|
28
|
+
}
|
|
29
|
+
export function workflowId(filename) {
|
|
30
|
+
const base = filename.endsWith('.md') ? filename.slice(0, -3) : filename;
|
|
31
|
+
return WORKFLOW_ID_PREFIX + base;
|
|
32
|
+
}
|
|
33
|
+
export function parseBuilderId(id) {
|
|
34
|
+
if (id.startsWith(CRON_ID_PREFIX))
|
|
35
|
+
return { origin: 'cron', key: id.slice(CRON_ID_PREFIX.length) };
|
|
36
|
+
if (id.startsWith(WORKFLOW_ID_PREFIX))
|
|
37
|
+
return { origin: 'workflow', key: id.slice(WORKFLOW_ID_PREFIX.length) };
|
|
38
|
+
return null;
|
|
39
|
+
}
|
|
40
|
+
// ── List ────────────────────────────────────────────────────────────
|
|
41
|
+
export function listAllForBuilder() {
|
|
42
|
+
const out = [];
|
|
43
|
+
// Crons from CRON.md
|
|
44
|
+
for (const job of readCronJobs()) {
|
|
45
|
+
out.push({
|
|
46
|
+
id: cronId(job.name),
|
|
47
|
+
origin: 'cron',
|
|
48
|
+
name: job.name,
|
|
49
|
+
description: '',
|
|
50
|
+
enabled: job.enabled,
|
|
51
|
+
schedule: job.schedule,
|
|
52
|
+
stepCount: 1,
|
|
53
|
+
sourceFile: CRON_FILE,
|
|
54
|
+
agentSlug: job.agentSlug,
|
|
55
|
+
});
|
|
56
|
+
}
|
|
57
|
+
// Workflows from workflows dir
|
|
58
|
+
if (existsSync(WORKFLOWS_DIR)) {
|
|
59
|
+
for (const file of readdirSync(WORKFLOWS_DIR).filter(f => f.endsWith('.md'))) {
|
|
60
|
+
try {
|
|
61
|
+
const wf = parseWorkflowFile(path.join(WORKFLOWS_DIR, file));
|
|
62
|
+
out.push({
|
|
63
|
+
id: workflowId(file),
|
|
64
|
+
origin: 'workflow',
|
|
65
|
+
name: wf.name,
|
|
66
|
+
description: wf.description,
|
|
67
|
+
enabled: wf.enabled,
|
|
68
|
+
schedule: wf.trigger.schedule,
|
|
69
|
+
stepCount: wf.steps.length,
|
|
70
|
+
sourceFile: wf.sourceFile,
|
|
71
|
+
agentSlug: wf.agentSlug,
|
|
72
|
+
});
|
|
73
|
+
}
|
|
74
|
+
catch {
|
|
75
|
+
// Skip unparseable workflow files
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
return out.sort((a, b) => a.name.localeCompare(b.name));
|
|
80
|
+
}
|
|
81
|
+
// ── Read ────────────────────────────────────────────────────────────
|
|
82
|
+
export function readWorkflow(id) {
|
|
83
|
+
const parsed = parseBuilderId(id);
|
|
84
|
+
if (!parsed)
|
|
85
|
+
return null;
|
|
86
|
+
if (parsed.origin === 'cron') {
|
|
87
|
+
const job = readCronJobs().find(j => j.name === parsed.key);
|
|
88
|
+
if (!job)
|
|
89
|
+
return null;
|
|
90
|
+
return cronJobToWorkflow(job);
|
|
91
|
+
}
|
|
92
|
+
const file = path.join(WORKFLOWS_DIR, parsed.key + '.md');
|
|
93
|
+
if (!existsSync(file))
|
|
94
|
+
return null;
|
|
95
|
+
try {
|
|
96
|
+
return parseWorkflowFile(file);
|
|
97
|
+
}
|
|
98
|
+
catch {
|
|
99
|
+
return null;
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
function readCronJobs() {
|
|
103
|
+
if (!existsSync(CRON_FILE))
|
|
104
|
+
return [];
|
|
105
|
+
const raw = readFileSync(CRON_FILE, 'utf-8');
|
|
106
|
+
let parsed;
|
|
107
|
+
try {
|
|
108
|
+
parsed = matter(raw);
|
|
109
|
+
}
|
|
110
|
+
catch {
|
|
111
|
+
return [];
|
|
112
|
+
}
|
|
113
|
+
const jobDefs = (parsed.data.jobs ?? []);
|
|
114
|
+
const jobs = [];
|
|
115
|
+
for (const job of jobDefs) {
|
|
116
|
+
const name = String(job.name ?? '');
|
|
117
|
+
const schedule = String(job.schedule ?? '');
|
|
118
|
+
const prompt = String(job.prompt ?? '');
|
|
119
|
+
if (!name || !schedule || !prompt)
|
|
120
|
+
continue;
|
|
121
|
+
jobs.push({
|
|
122
|
+
name,
|
|
123
|
+
schedule,
|
|
124
|
+
prompt,
|
|
125
|
+
enabled: job.enabled !== false,
|
|
126
|
+
tier: Number(job.tier ?? 1),
|
|
127
|
+
maxTurns: job.max_turns != null ? Number(job.max_turns) : undefined,
|
|
128
|
+
model: job.model != null ? String(job.model) : undefined,
|
|
129
|
+
workDir: job.work_dir != null ? String(job.work_dir) : undefined,
|
|
130
|
+
mode: job.mode === 'unleashed' ? 'unleashed' : 'standard',
|
|
131
|
+
maxHours: job.max_hours != null ? Number(job.max_hours) : undefined,
|
|
132
|
+
maxRetries: job.max_retries != null ? Number(job.max_retries) : undefined,
|
|
133
|
+
after: job.after != null ? String(job.after) : undefined,
|
|
134
|
+
successCriteria: Array.isArray(job.success_criteria)
|
|
135
|
+
? job.success_criteria.map(c => String(c))
|
|
136
|
+
: undefined,
|
|
137
|
+
alwaysDeliver: job.always_deliver === true ? true : undefined,
|
|
138
|
+
context: job.context != null ? String(job.context) : undefined,
|
|
139
|
+
preCheck: job.pre_check != null ? String(job.pre_check) : undefined,
|
|
140
|
+
agentSlug: typeof job.agentSlug === 'string'
|
|
141
|
+
? job.agentSlug
|
|
142
|
+
: typeof job.agent_slug === 'string'
|
|
143
|
+
? job.agent_slug
|
|
144
|
+
: undefined,
|
|
145
|
+
});
|
|
146
|
+
}
|
|
147
|
+
return jobs;
|
|
148
|
+
}
|
|
149
|
+
function parseWorkflowFile(filePath) {
|
|
150
|
+
const raw = readFileSync(filePath, 'utf-8');
|
|
151
|
+
const parsed = matter(raw);
|
|
152
|
+
const data = parsed.data;
|
|
153
|
+
if (data.type !== 'workflow') {
|
|
154
|
+
throw new Error(`Not a workflow file (type=${String(data.type)}): ${filePath}`);
|
|
155
|
+
}
|
|
156
|
+
const name = String(data.name ?? path.basename(filePath, '.md'));
|
|
157
|
+
const description = String(data.description ?? '');
|
|
158
|
+
const enabled = data.enabled !== false;
|
|
159
|
+
const triggerRaw = (data.trigger ?? {});
|
|
160
|
+
const trigger = {
|
|
161
|
+
schedule: triggerRaw.schedule ? String(triggerRaw.schedule) : undefined,
|
|
162
|
+
manual: triggerRaw.manual !== false,
|
|
163
|
+
};
|
|
164
|
+
const inputs = {};
|
|
165
|
+
if (data.inputs && typeof data.inputs === 'object') {
|
|
166
|
+
for (const [key, val] of Object.entries(data.inputs)) {
|
|
167
|
+
const v = val;
|
|
168
|
+
inputs[key] = {
|
|
169
|
+
type: (v.type === 'number' ? 'number' : 'string'),
|
|
170
|
+
default: v.default != null ? String(v.default) : undefined,
|
|
171
|
+
description: v.description ? String(v.description) : undefined,
|
|
172
|
+
};
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
const stepsRaw = (data.steps ?? []);
|
|
176
|
+
const steps = stepsRaw.map((s, i) => {
|
|
177
|
+
const step = {
|
|
178
|
+
id: String(s.id ?? `step-${i + 1}`),
|
|
179
|
+
prompt: String(s.prompt ?? ''),
|
|
180
|
+
dependsOn: Array.isArray(s.dependsOn) ? s.dependsOn.map(String) : [],
|
|
181
|
+
tier: Number(s.tier ?? 1),
|
|
182
|
+
maxTurns: Number(s.maxTurns ?? 15),
|
|
183
|
+
model: s.model != null ? String(s.model) : undefined,
|
|
184
|
+
workDir: s.workDir != null ? String(s.workDir) : undefined,
|
|
185
|
+
};
|
|
186
|
+
const kind = s.kind;
|
|
187
|
+
if (kind && kind !== 'prompt')
|
|
188
|
+
step.kind = kind;
|
|
189
|
+
if (s.mcp && typeof s.mcp === 'object')
|
|
190
|
+
step.mcp = s.mcp;
|
|
191
|
+
if (s.channel && typeof s.channel === 'object')
|
|
192
|
+
step.channel = s.channel;
|
|
193
|
+
if (s.transform && typeof s.transform === 'object')
|
|
194
|
+
step.transform = s.transform;
|
|
195
|
+
if (s.conditional && typeof s.conditional === 'object')
|
|
196
|
+
step.conditional = s.conditional;
|
|
197
|
+
if (s.loop && typeof s.loop === 'object')
|
|
198
|
+
step.loop = s.loop;
|
|
199
|
+
if (s.canvas && typeof s.canvas === 'object') {
|
|
200
|
+
const c = s.canvas;
|
|
201
|
+
if (typeof c.x === 'number' && typeof c.y === 'number')
|
|
202
|
+
step.canvas = { x: c.x, y: c.y };
|
|
203
|
+
}
|
|
204
|
+
return step;
|
|
205
|
+
});
|
|
206
|
+
const synthesis = data.synthesis?.prompt
|
|
207
|
+
? { prompt: String(data.synthesis.prompt) }
|
|
208
|
+
: undefined;
|
|
209
|
+
return {
|
|
210
|
+
name,
|
|
211
|
+
description,
|
|
212
|
+
enabled,
|
|
213
|
+
trigger,
|
|
214
|
+
inputs,
|
|
215
|
+
steps,
|
|
216
|
+
synthesis,
|
|
217
|
+
sourceFile: filePath,
|
|
218
|
+
agentSlug: typeof data.agentSlug === 'string' ? data.agentSlug : undefined,
|
|
219
|
+
};
|
|
220
|
+
}
|
|
221
|
+
// ── Cron ⇄ Workflow ─────────────────────────────────────────────────
|
|
222
|
+
export function cronJobToWorkflow(job) {
|
|
223
|
+
const step = {
|
|
224
|
+
id: 'main',
|
|
225
|
+
prompt: job.prompt,
|
|
226
|
+
dependsOn: [],
|
|
227
|
+
tier: job.tier,
|
|
228
|
+
maxTurns: job.maxTurns ?? 15,
|
|
229
|
+
model: job.model,
|
|
230
|
+
workDir: job.workDir,
|
|
231
|
+
kind: 'prompt',
|
|
232
|
+
};
|
|
233
|
+
return {
|
|
234
|
+
name: job.name,
|
|
235
|
+
description: '',
|
|
236
|
+
enabled: job.enabled,
|
|
237
|
+
trigger: { schedule: job.schedule, manual: false },
|
|
238
|
+
inputs: {},
|
|
239
|
+
steps: [step],
|
|
240
|
+
sourceFile: CRON_FILE,
|
|
241
|
+
agentSlug: job.agentSlug,
|
|
242
|
+
};
|
|
243
|
+
}
|
|
244
|
+
/** True if a workflow is shaped like a CRON.md entry (single prompt step + cron schedule). */
|
|
245
|
+
export function isCronShape(wf) {
|
|
246
|
+
return (wf.steps.length === 1 &&
|
|
247
|
+
(wf.steps[0].kind ?? 'prompt') === 'prompt' &&
|
|
248
|
+
!!wf.trigger.schedule);
|
|
249
|
+
}
|
|
250
|
+
// ── Save ────────────────────────────────────────────────────────────
|
|
251
|
+
export function saveWorkflow(id, wf) {
|
|
252
|
+
const parsed = parseBuilderId(id);
|
|
253
|
+
if (!parsed)
|
|
254
|
+
return { ok: false, error: 'Unknown builder id: ' + id };
|
|
255
|
+
// Snapshot the current state of the source file before overwriting.
|
|
256
|
+
// Best-effort — failures here never block the save.
|
|
257
|
+
const sourceBefore = sourceFileForId(id, parsed);
|
|
258
|
+
if (sourceBefore) {
|
|
259
|
+
try {
|
|
260
|
+
snapshotWorkflow(id, sourceBefore);
|
|
261
|
+
}
|
|
262
|
+
catch { /* */ }
|
|
263
|
+
}
|
|
264
|
+
if (parsed.origin === 'cron') {
|
|
265
|
+
if (!isCronShape(wf)) {
|
|
266
|
+
return { ok: false, error: 'Cron entry must remain a single prompt step with a cron schedule' };
|
|
267
|
+
}
|
|
268
|
+
return saveCronEntry(parsed.key, wf);
|
|
269
|
+
}
|
|
270
|
+
return saveWorkflowFile(parsed.key, wf);
|
|
271
|
+
}
|
|
272
|
+
/** Resolve the on-disk file path for a builder id (cron entries all share CRON_FILE). */
|
|
273
|
+
export function sourceFileForId(id, parsedHint) {
|
|
274
|
+
const parsed = parsedHint ?? parseBuilderId(id);
|
|
275
|
+
if (!parsed)
|
|
276
|
+
return null;
|
|
277
|
+
if (parsed.origin === 'cron')
|
|
278
|
+
return CRON_FILE;
|
|
279
|
+
return path.join(WORKFLOWS_DIR, parsed.key + '.md');
|
|
280
|
+
}
|
|
281
|
+
function saveCronEntry(originalName, wf) {
|
|
282
|
+
if (!existsSync(CRON_FILE))
|
|
283
|
+
return { ok: false, error: 'CRON.md does not exist' };
|
|
284
|
+
const raw = readFileSync(CRON_FILE, 'utf-8');
|
|
285
|
+
let parsed;
|
|
286
|
+
try {
|
|
287
|
+
parsed = matter(raw);
|
|
288
|
+
}
|
|
289
|
+
catch (err) {
|
|
290
|
+
return { ok: false, error: 'CRON.md YAML parse error: ' + err.message };
|
|
291
|
+
}
|
|
292
|
+
const jobs = Array.isArray(parsed.data.jobs) ? [...parsed.data.jobs] : [];
|
|
293
|
+
const idx = jobs.findIndex(j => String(j.name ?? '') === originalName);
|
|
294
|
+
if (idx === -1)
|
|
295
|
+
return { ok: false, error: 'Cron entry not found: ' + originalName };
|
|
296
|
+
const step = wf.steps[0];
|
|
297
|
+
const prev = jobs[idx];
|
|
298
|
+
const updated = {
|
|
299
|
+
...prev,
|
|
300
|
+
name: wf.name,
|
|
301
|
+
schedule: wf.trigger.schedule,
|
|
302
|
+
prompt: step.prompt,
|
|
303
|
+
enabled: wf.enabled,
|
|
304
|
+
tier: step.tier,
|
|
305
|
+
};
|
|
306
|
+
if (step.maxTurns != null)
|
|
307
|
+
updated.max_turns = step.maxTurns;
|
|
308
|
+
if (step.model != null)
|
|
309
|
+
updated.model = step.model;
|
|
310
|
+
if (step.workDir != null)
|
|
311
|
+
updated.work_dir = step.workDir;
|
|
312
|
+
if (wf.agentSlug)
|
|
313
|
+
updated.agentSlug = wf.agentSlug;
|
|
314
|
+
jobs[idx] = updated;
|
|
315
|
+
parsed.data.jobs = jobs;
|
|
316
|
+
const out = matter.stringify(parsed.content ?? '', parsed.data);
|
|
317
|
+
writeFileSync(CRON_FILE, out, 'utf-8');
|
|
318
|
+
return { ok: true };
|
|
319
|
+
}
|
|
320
|
+
function saveWorkflowFile(key, wf) {
|
|
321
|
+
if (!existsSync(WORKFLOWS_DIR))
|
|
322
|
+
mkdirSync(WORKFLOWS_DIR, { recursive: true });
|
|
323
|
+
const file = path.join(WORKFLOWS_DIR, key + '.md');
|
|
324
|
+
// Preserve body content if the file exists; otherwise empty body.
|
|
325
|
+
let body = '';
|
|
326
|
+
if (existsSync(file)) {
|
|
327
|
+
try {
|
|
328
|
+
const existing = matter(readFileSync(file, 'utf-8'));
|
|
329
|
+
body = existing.content ?? '';
|
|
330
|
+
}
|
|
331
|
+
catch {
|
|
332
|
+
body = '';
|
|
333
|
+
}
|
|
334
|
+
}
|
|
335
|
+
const data = {
|
|
336
|
+
type: 'workflow',
|
|
337
|
+
name: wf.name,
|
|
338
|
+
description: wf.description,
|
|
339
|
+
enabled: wf.enabled,
|
|
340
|
+
trigger: wf.trigger,
|
|
341
|
+
};
|
|
342
|
+
if (wf.agentSlug)
|
|
343
|
+
data.agentSlug = wf.agentSlug;
|
|
344
|
+
if (Object.keys(wf.inputs).length > 0)
|
|
345
|
+
data.inputs = wf.inputs;
|
|
346
|
+
data.steps = wf.steps.map(serializeStep);
|
|
347
|
+
if (wf.synthesis)
|
|
348
|
+
data.synthesis = wf.synthesis;
|
|
349
|
+
const out = matter.stringify(body, data);
|
|
350
|
+
writeFileSync(file, out, 'utf-8');
|
|
351
|
+
return { ok: true };
|
|
352
|
+
}
|
|
353
|
+
function serializeStep(step) {
|
|
354
|
+
const out = {
|
|
355
|
+
id: step.id,
|
|
356
|
+
prompt: step.prompt,
|
|
357
|
+
dependsOn: step.dependsOn,
|
|
358
|
+
tier: step.tier,
|
|
359
|
+
maxTurns: step.maxTurns,
|
|
360
|
+
};
|
|
361
|
+
if (step.model)
|
|
362
|
+
out.model = step.model;
|
|
363
|
+
if (step.workDir)
|
|
364
|
+
out.workDir = step.workDir;
|
|
365
|
+
const kind = step.kind ?? 'prompt';
|
|
366
|
+
if (kind !== 'prompt')
|
|
367
|
+
out.kind = kind;
|
|
368
|
+
if (step.mcp)
|
|
369
|
+
out.mcp = step.mcp;
|
|
370
|
+
if (step.channel)
|
|
371
|
+
out.channel = step.channel;
|
|
372
|
+
if (step.transform)
|
|
373
|
+
out.transform = step.transform;
|
|
374
|
+
if (step.conditional)
|
|
375
|
+
out.conditional = step.conditional;
|
|
376
|
+
if (step.loop)
|
|
377
|
+
out.loop = step.loop;
|
|
378
|
+
if (step.canvas)
|
|
379
|
+
out.canvas = step.canvas;
|
|
380
|
+
return out;
|
|
381
|
+
}
|
|
382
|
+
const COL_WIDTH = 260;
|
|
383
|
+
const ROW_HEIGHT = 140;
|
|
384
|
+
export function workflowToDrawflow(wf) {
|
|
385
|
+
// Wave-based default layout: x = wave * COL_WIDTH, y = position-in-wave * ROW_HEIGHT.
|
|
386
|
+
const waveOf = computeWaves(wf.steps);
|
|
387
|
+
const perWave = {};
|
|
388
|
+
const stepIdToNumeric = {};
|
|
389
|
+
wf.steps.forEach((s, i) => { stepIdToNumeric[s.id] = i + 1; });
|
|
390
|
+
const data = {};
|
|
391
|
+
for (let i = 0; i < wf.steps.length; i++) {
|
|
392
|
+
const step = wf.steps[i];
|
|
393
|
+
const numericId = i + 1;
|
|
394
|
+
const wave = waveOf[step.id] ?? 0;
|
|
395
|
+
const idxInWave = perWave[wave] ?? 0;
|
|
396
|
+
perWave[wave] = idxInWave + 1;
|
|
397
|
+
const x = step.canvas?.x ?? wave * COL_WIDTH + 50;
|
|
398
|
+
const y = step.canvas?.y ?? idxInWave * ROW_HEIGHT + 50;
|
|
399
|
+
const inputConnections = step.dependsOn.map(depId => ({
|
|
400
|
+
node: String(stepIdToNumeric[depId] ?? ''),
|
|
401
|
+
input: 'output_1',
|
|
402
|
+
})).filter(c => c.node);
|
|
403
|
+
data[String(numericId)] = {
|
|
404
|
+
id: numericId,
|
|
405
|
+
name: nodeNameForKind(step.kind ?? 'prompt'),
|
|
406
|
+
data: stepToNodeData(step),
|
|
407
|
+
class: 'cl-node cl-node-' + (step.kind ?? 'prompt'),
|
|
408
|
+
html: '',
|
|
409
|
+
typenode: false,
|
|
410
|
+
inputs: { input_1: { connections: inputConnections } },
|
|
411
|
+
outputs: { output_1: { connections: [] } },
|
|
412
|
+
pos_x: x,
|
|
413
|
+
pos_y: y,
|
|
414
|
+
};
|
|
415
|
+
}
|
|
416
|
+
// Fill in output connections from input connections (Drawflow needs both directions).
|
|
417
|
+
for (const [nodeIdStr, node] of Object.entries(data)) {
|
|
418
|
+
for (const conn of node.inputs.input_1.connections) {
|
|
419
|
+
const src = data[conn.node];
|
|
420
|
+
if (src) {
|
|
421
|
+
src.outputs.output_1.connections.push({ node: nodeIdStr, output: 'input_1' });
|
|
422
|
+
}
|
|
423
|
+
}
|
|
424
|
+
}
|
|
425
|
+
return { drawflow: { Home: { data } } };
|
|
426
|
+
}
|
|
427
|
+
export function drawflowToWorkflow(exportData, base) {
|
|
428
|
+
const nodes = exportData?.drawflow?.Home?.data ?? {};
|
|
429
|
+
const numericToStepId = {};
|
|
430
|
+
const orderedNumericIds = Object.keys(nodes).sort((a, b) => Number(a) - Number(b));
|
|
431
|
+
// Read step ids back out of node.data; fall back to step-N if missing.
|
|
432
|
+
for (const nid of orderedNumericIds) {
|
|
433
|
+
const stepData = (nodes[nid].data ?? {});
|
|
434
|
+
numericToStepId[nid] = stepData.stepId || `step-${nid}`;
|
|
435
|
+
}
|
|
436
|
+
const baseById = new Map(base.steps.map(s => [s.id, s]));
|
|
437
|
+
const steps = orderedNumericIds.map(nid => {
|
|
438
|
+
const node = nodes[nid];
|
|
439
|
+
const data = node.data;
|
|
440
|
+
const stepId = numericToStepId[nid];
|
|
441
|
+
const baseStep = baseById.get(stepId);
|
|
442
|
+
const dependsOn = node.inputs.input_1.connections
|
|
443
|
+
.map(c => numericToStepId[c.node])
|
|
444
|
+
.filter((s) => !!s);
|
|
445
|
+
const next = {
|
|
446
|
+
id: stepId,
|
|
447
|
+
prompt: typeof data.prompt === 'string' ? data.prompt : (baseStep?.prompt ?? ''),
|
|
448
|
+
dependsOn,
|
|
449
|
+
tier: typeof data.tier === 'number' ? data.tier : (baseStep?.tier ?? 1),
|
|
450
|
+
maxTurns: typeof data.maxTurns === 'number' ? data.maxTurns : (baseStep?.maxTurns ?? 15),
|
|
451
|
+
model: typeof data.model === 'string' ? data.model : baseStep?.model,
|
|
452
|
+
workDir: typeof data.workDir === 'string' ? data.workDir : baseStep?.workDir,
|
|
453
|
+
kind: data.kind ?? baseStep?.kind,
|
|
454
|
+
mcp: data.mcp ?? baseStep?.mcp,
|
|
455
|
+
channel: data.channel ?? baseStep?.channel,
|
|
456
|
+
transform: data.transform ?? baseStep?.transform,
|
|
457
|
+
conditional: data.conditional ?? baseStep?.conditional,
|
|
458
|
+
loop: data.loop ?? baseStep?.loop,
|
|
459
|
+
canvas: { x: node.pos_x, y: node.pos_y },
|
|
460
|
+
};
|
|
461
|
+
return next;
|
|
462
|
+
});
|
|
463
|
+
return { ...base, steps };
|
|
464
|
+
}
|
|
465
|
+
function nodeNameForKind(kind) {
|
|
466
|
+
switch (kind) {
|
|
467
|
+
case 'mcp': return 'MCP Tool';
|
|
468
|
+
case 'channel': return 'Channel';
|
|
469
|
+
case 'transform': return 'Transform';
|
|
470
|
+
case 'conditional': return 'Conditional';
|
|
471
|
+
case 'loop': return 'Loop';
|
|
472
|
+
default: return 'Prompt';
|
|
473
|
+
}
|
|
474
|
+
}
|
|
475
|
+
function stepToNodeData(step) {
|
|
476
|
+
const data = {
|
|
477
|
+
stepId: step.id,
|
|
478
|
+
prompt: step.prompt,
|
|
479
|
+
tier: step.tier,
|
|
480
|
+
maxTurns: step.maxTurns,
|
|
481
|
+
kind: step.kind ?? 'prompt',
|
|
482
|
+
};
|
|
483
|
+
if (step.model)
|
|
484
|
+
data.model = step.model;
|
|
485
|
+
if (step.workDir)
|
|
486
|
+
data.workDir = step.workDir;
|
|
487
|
+
if (step.mcp)
|
|
488
|
+
data.mcp = step.mcp;
|
|
489
|
+
if (step.channel)
|
|
490
|
+
data.channel = step.channel;
|
|
491
|
+
if (step.transform)
|
|
492
|
+
data.transform = step.transform;
|
|
493
|
+
if (step.conditional)
|
|
494
|
+
data.conditional = step.conditional;
|
|
495
|
+
if (step.loop)
|
|
496
|
+
data.loop = step.loop;
|
|
497
|
+
return data;
|
|
498
|
+
}
|
|
499
|
+
/** Topological wave numbers (0 = no deps, 1 = depends only on wave 0, ...). */
|
|
500
|
+
function computeWaves(steps) {
|
|
501
|
+
const wave = {};
|
|
502
|
+
const ids = new Set(steps.map(s => s.id));
|
|
503
|
+
const remaining = new Set(steps.map(s => s.id));
|
|
504
|
+
let current = 0;
|
|
505
|
+
// Cap iterations to avoid infinite loops on cyclic/malformed graphs.
|
|
506
|
+
const maxIter = steps.length + 1;
|
|
507
|
+
for (let iter = 0; iter < maxIter && remaining.size > 0; iter++) {
|
|
508
|
+
const ready = [];
|
|
509
|
+
for (const s of steps) {
|
|
510
|
+
if (!remaining.has(s.id))
|
|
511
|
+
continue;
|
|
512
|
+
const depsResolved = s.dependsOn.every(d => !ids.has(d) || (wave[d] != null && wave[d] < current + 1));
|
|
513
|
+
if (depsResolved)
|
|
514
|
+
ready.push(s.id);
|
|
515
|
+
}
|
|
516
|
+
if (ready.length === 0) {
|
|
517
|
+
// Break cycles: assign remaining to current wave.
|
|
518
|
+
for (const id of remaining)
|
|
519
|
+
wave[id] = current;
|
|
520
|
+
break;
|
|
521
|
+
}
|
|
522
|
+
for (const id of ready) {
|
|
523
|
+
wave[id] = current;
|
|
524
|
+
remaining.delete(id);
|
|
525
|
+
}
|
|
526
|
+
current++;
|
|
527
|
+
}
|
|
528
|
+
return wave;
|
|
529
|
+
}
|
|
530
|
+
// ── YAML helper for tests / debug ───────────────────────────────────
|
|
531
|
+
/** Stringify a workflow's frontmatter for visual inspection. */
|
|
532
|
+
export function workflowFrontmatterString(wf) {
|
|
533
|
+
const data = {
|
|
534
|
+
type: 'workflow',
|
|
535
|
+
name: wf.name,
|
|
536
|
+
description: wf.description,
|
|
537
|
+
enabled: wf.enabled,
|
|
538
|
+
trigger: wf.trigger,
|
|
539
|
+
steps: wf.steps.map(serializeStep),
|
|
540
|
+
};
|
|
541
|
+
if (wf.synthesis)
|
|
542
|
+
data.synthesis = wf.synthesis;
|
|
543
|
+
if (wf.agentSlug)
|
|
544
|
+
data.agentSlug = wf.agentSlug;
|
|
545
|
+
return yaml.dump(data);
|
|
546
|
+
}
|
|
547
|
+
//# sourceMappingURL=serializer.js.map
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Builder snapshot history — file-based undo for workflow saves.
|
|
3
|
+
*
|
|
4
|
+
* Every successful save writes a copy of the source file to:
|
|
5
|
+
* ~/.clementine/snapshots/builder/<origin>/<key>/<timestamp>.md
|
|
6
|
+
*
|
|
7
|
+
* Bounded: keep at most MAX_PER_WORKFLOW snapshots per workflow id.
|
|
8
|
+
* Older snapshots are pruned on each save.
|
|
9
|
+
*
|
|
10
|
+
* No git dependency, no user-facing CLI — agent invokes via MCP tools
|
|
11
|
+
* (workflow_history / workflow_restore).
|
|
12
|
+
*/
|
|
13
|
+
export interface SnapshotEntry {
|
|
14
|
+
id: string;
|
|
15
|
+
filename: string;
|
|
16
|
+
ts: string;
|
|
17
|
+
size: number;
|
|
18
|
+
preview: string;
|
|
19
|
+
}
|
|
20
|
+
/**
|
|
21
|
+
* Write a snapshot of the current state of a workflow's source file.
|
|
22
|
+
* Best-effort — failures are logged but never block the underlying save.
|
|
23
|
+
*/
|
|
24
|
+
export declare function snapshotWorkflow(id: string, sourceFile: string): SnapshotEntry | null;
|
|
25
|
+
/** List snapshots for a builder id, newest first. */
|
|
26
|
+
export declare function listSnapshots(id: string): SnapshotEntry[];
|
|
27
|
+
/** Restore a snapshot by filename. Writes the snapshot's contents back into sourceFile. */
|
|
28
|
+
export declare function restoreSnapshot(id: string, snapshotFilename: string, sourceFile: string): {
|
|
29
|
+
ok: boolean;
|
|
30
|
+
error?: string;
|
|
31
|
+
};
|
|
32
|
+
//# sourceMappingURL=snapshots.d.ts.map
|