atomism 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +210 -0
- package/dist/chunk-34O5KJWR.js +81 -0
- package/dist/chunk-34O5KJWR.js.map +1 -0
- package/dist/chunk-55AP34JO.js +116 -0
- package/dist/chunk-55AP34JO.js.map +1 -0
- package/dist/chunk-6MDHM2B4.js +17 -0
- package/dist/chunk-6MDHM2B4.js.map +1 -0
- package/dist/chunk-GU2R4KLP.js +43 -0
- package/dist/chunk-GU2R4KLP.js.map +1 -0
- package/dist/chunk-H7WC3NXZ.js +39 -0
- package/dist/chunk-H7WC3NXZ.js.map +1 -0
- package/dist/chunk-P33CQFMY.js +329 -0
- package/dist/chunk-P33CQFMY.js.map +1 -0
- package/dist/chunk-P6X7T4KA.js +200 -0
- package/dist/chunk-P6X7T4KA.js.map +1 -0
- package/dist/chunk-PLQJM2KT.js +9 -0
- package/dist/chunk-PLQJM2KT.js.map +1 -0
- package/dist/chunk-RS2IEGW3.js +10 -0
- package/dist/chunk-RS2IEGW3.js.map +1 -0
- package/dist/chunk-S6Z5G5DB.js +84 -0
- package/dist/chunk-S6Z5G5DB.js.map +1 -0
- package/dist/chunk-UVUDQ4XP.js +259 -0
- package/dist/chunk-UVUDQ4XP.js.map +1 -0
- package/dist/chunk-UWVZQSP4.js +597 -0
- package/dist/chunk-UWVZQSP4.js.map +1 -0
- package/dist/chunk-YKJO3ZFY.js +308 -0
- package/dist/chunk-YKJO3ZFY.js.map +1 -0
- package/dist/cli.d.ts +1 -0
- package/dist/cli.js +152 -0
- package/dist/cli.js.map +1 -0
- package/dist/create-atom-AXPDBYQL.js +153 -0
- package/dist/create-atom-AXPDBYQL.js.map +1 -0
- package/dist/escalate-BTEJT5NL.js +211 -0
- package/dist/escalate-BTEJT5NL.js.map +1 -0
- package/dist/extract-RPKCTINT.js +514 -0
- package/dist/extract-RPKCTINT.js.map +1 -0
- package/dist/graduate-453M7ZRQ.js +222 -0
- package/dist/graduate-453M7ZRQ.js.map +1 -0
- package/dist/helpers-PJPFPYBQ.js +11 -0
- package/dist/helpers-PJPFPYBQ.js.map +1 -0
- package/dist/history-OPD7NLZW.js +258 -0
- package/dist/history-OPD7NLZW.js.map +1 -0
- package/dist/import-generator-4CKRBMTE.js +1864 -0
- package/dist/import-generator-4CKRBMTE.js.map +1 -0
- package/dist/index.d.ts +230 -0
- package/dist/index.js +41 -0
- package/dist/index.js.map +1 -0
- package/dist/init-2FINDMYK.js +741 -0
- package/dist/init-2FINDMYK.js.map +1 -0
- package/dist/list-NEBVBGG3.js +71 -0
- package/dist/list-NEBVBGG3.js.map +1 -0
- package/dist/parser-3BILOSOO.js +157 -0
- package/dist/parser-3BILOSOO.js.map +1 -0
- package/dist/plan-DNVARHWH.js +249 -0
- package/dist/plan-DNVARHWH.js.map +1 -0
- package/dist/register-XTRMSH7Y.js +91 -0
- package/dist/register-XTRMSH7Y.js.map +1 -0
- package/dist/revert-J4CRDE2K.js +87 -0
- package/dist/revert-J4CRDE2K.js.map +1 -0
- package/dist/run-3GI3SBYL.js +188 -0
- package/dist/run-3GI3SBYL.js.map +1 -0
- package/dist/scan-generators-ST4TBEY7.js +375 -0
- package/dist/scan-generators-ST4TBEY7.js.map +1 -0
- package/dist/signatures-K5QIL4WG.js +258 -0
- package/dist/signatures-K5QIL4WG.js.map +1 -0
- package/dist/skills-assign-IHOXX4AI.js +182 -0
- package/dist/skills-assign-IHOXX4AI.js.map +1 -0
- package/dist/skills-load-JSD5UG2K.js +20 -0
- package/dist/skills-load-JSD5UG2K.js.map +1 -0
- package/dist/skills-scan-WACJFRJN.js +25 -0
- package/dist/skills-scan-WACJFRJN.js.map +1 -0
- package/dist/skills-suggest-JFI2NUJI.js +269 -0
- package/dist/skills-suggest-JFI2NUJI.js.map +1 -0
- package/dist/status-KQVSAZFR.js +111 -0
- package/dist/status-KQVSAZFR.js.map +1 -0
- package/dist/suggest-IFFJQFIW.js +183 -0
- package/dist/suggest-IFFJQFIW.js.map +1 -0
- package/dist/test-HP3FG3MO.js +152 -0
- package/dist/test-HP3FG3MO.js.map +1 -0
- package/dist/test-gen-2ZGPOP35.js +347 -0
- package/dist/test-gen-2ZGPOP35.js.map +1 -0
- package/dist/trust-4R26DULG.js +248 -0
- package/dist/trust-4R26DULG.js.map +1 -0
- package/dist/validate-generator-46H2LYYQ.js +410 -0
- package/dist/validate-generator-46H2LYYQ.js.map +1 -0
- package/dist/workflow-5UVLBS7J.js +655 -0
- package/dist/workflow-5UVLBS7J.js.map +1 -0
- package/package.json +84 -0
|
@@ -0,0 +1,597 @@
|
|
|
1
|
+
import {
|
|
2
|
+
ATOMIC_DIR,
|
|
3
|
+
fileExists
|
|
4
|
+
} from "./chunk-YKJO3ZFY.js";
|
|
5
|
+
import {
|
|
6
|
+
toErrorMessage
|
|
7
|
+
} from "./chunk-PLQJM2KT.js";
|
|
8
|
+
|
|
9
|
+
// src/workflow/resolver.ts
|
|
10
|
+
import { z } from "zod";
|
|
11
|
+
import { createHash } from "crypto";
|
|
12
|
+
|
|
13
|
+
// src/workflow/dag.ts
|
|
14
|
+
function dfsVisit(id, nodeMap, visited, visiting, result) {
|
|
15
|
+
if (visited.has(id)) return;
|
|
16
|
+
if (visiting.has(id)) {
|
|
17
|
+
throw new Error(`Cycle detected at "${id}"`);
|
|
18
|
+
}
|
|
19
|
+
visiting.add(id);
|
|
20
|
+
const node = nodeMap.get(id);
|
|
21
|
+
if (!node) {
|
|
22
|
+
throw new Error(`Unknown node "${id}"`);
|
|
23
|
+
}
|
|
24
|
+
for (const dep of node.dependsOn) {
|
|
25
|
+
if (!nodeMap.has(dep)) {
|
|
26
|
+
throw new Error(`Node "${id}" depends on unknown node "${dep}"`);
|
|
27
|
+
}
|
|
28
|
+
dfsVisit(dep, nodeMap, visited, visiting, result);
|
|
29
|
+
}
|
|
30
|
+
visiting.delete(id);
|
|
31
|
+
visited.add(id);
|
|
32
|
+
result.push(id);
|
|
33
|
+
}
|
|
34
|
+
function buildNodeMap(nodes) {
|
|
35
|
+
const nodeMap = /* @__PURE__ */ new Map();
|
|
36
|
+
for (const node of nodes) {
|
|
37
|
+
if (nodeMap.has(node.id)) {
|
|
38
|
+
throw new Error(`Duplicate node ID "${node.id}"`);
|
|
39
|
+
}
|
|
40
|
+
nodeMap.set(node.id, node);
|
|
41
|
+
}
|
|
42
|
+
return nodeMap;
|
|
43
|
+
}
|
|
44
|
+
function topologicalSort(nodes) {
|
|
45
|
+
const result = [];
|
|
46
|
+
const visited = /* @__PURE__ */ new Set();
|
|
47
|
+
const visiting = /* @__PURE__ */ new Set();
|
|
48
|
+
const nodeMap = buildNodeMap(nodes);
|
|
49
|
+
for (const node of nodes) {
|
|
50
|
+
dfsVisit(node.id, nodeMap, visited, visiting, result);
|
|
51
|
+
}
|
|
52
|
+
return result;
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
// src/workflow/resolver.ts
|
|
56
|
+
var ArtifactRequirement = z.object({
|
|
57
|
+
/** The artifact name to require */
|
|
58
|
+
artifact: z.string().min(1),
|
|
59
|
+
/** Which atom produces this artifact (must be in dependsOn) */
|
|
60
|
+
from: z.string().min(1),
|
|
61
|
+
/** Optional: map to a different input name (must be non-empty if provided) */
|
|
62
|
+
as: z.string().min(1).optional()
|
|
63
|
+
});
|
|
64
|
+
var ArtifactProduction = z.object({
|
|
65
|
+
/** The artifact name to produce */
|
|
66
|
+
artifact: z.string().min(1),
|
|
67
|
+
/** Optional description of the artifact */
|
|
68
|
+
description: z.string().optional()
|
|
69
|
+
});
|
|
70
|
+
var AtomWithArtifacts = z.object({
|
|
71
|
+
/** Atom name (must match a registered atom) */
|
|
72
|
+
atom: z.string().min(1),
|
|
73
|
+
/** Atoms this depends on (must complete before this runs) */
|
|
74
|
+
dependsOn: z.array(z.string().min(1)).default([]),
|
|
75
|
+
/** Optional alias for this atom instance in the workflow (must be non-empty if provided) */
|
|
76
|
+
alias: z.string().min(1).optional(),
|
|
77
|
+
/** Optional description of this atom's role in the workflow */
|
|
78
|
+
description: z.string().optional(),
|
|
79
|
+
/** Artifacts this atom requires (from dependencies) */
|
|
80
|
+
requires: z.array(ArtifactRequirement).default([]),
|
|
81
|
+
/** Artifacts this atom produces */
|
|
82
|
+
produces: z.array(ArtifactProduction).default([])
|
|
83
|
+
});
|
|
84
|
+
function createArtifactCache() {
|
|
85
|
+
const cache = /* @__PURE__ */ new Map();
|
|
86
|
+
return {
|
|
87
|
+
get(atomId, artifactName) {
|
|
88
|
+
return cache.get(atomId)?.get(artifactName);
|
|
89
|
+
},
|
|
90
|
+
set(atomId, artifactName, artifact) {
|
|
91
|
+
let atomCache = cache.get(atomId);
|
|
92
|
+
if (!atomCache) {
|
|
93
|
+
atomCache = /* @__PURE__ */ new Map();
|
|
94
|
+
cache.set(atomId, atomCache);
|
|
95
|
+
}
|
|
96
|
+
atomCache.set(artifactName, artifact);
|
|
97
|
+
},
|
|
98
|
+
has(atomId, artifactName) {
|
|
99
|
+
return cache.get(atomId)?.has(artifactName) ?? false;
|
|
100
|
+
},
|
|
101
|
+
clear(atomId, artifactName) {
|
|
102
|
+
const atomCache = cache.get(atomId);
|
|
103
|
+
if (!atomCache) return;
|
|
104
|
+
atomCache.delete(artifactName);
|
|
105
|
+
if (atomCache.size === 0) {
|
|
106
|
+
cache.delete(atomId);
|
|
107
|
+
}
|
|
108
|
+
},
|
|
109
|
+
clearAll() {
|
|
110
|
+
cache.clear();
|
|
111
|
+
}
|
|
112
|
+
};
|
|
113
|
+
}
|
|
114
|
+
function resolveDependencies(atoms, options = {}) {
|
|
115
|
+
validateArtifactDependencies(atoms);
|
|
116
|
+
const { cache, forceRebuild = [], forceRebuildAll = false } = options;
|
|
117
|
+
const forceRebuildSet = new Set(forceRebuild);
|
|
118
|
+
const atomMap = /* @__PURE__ */ new Map();
|
|
119
|
+
for (const a of atoms) {
|
|
120
|
+
const id = a.alias ?? a.atom;
|
|
121
|
+
if (atomMap.has(id)) {
|
|
122
|
+
throw new Error(`Duplicate atom id "${id}"`);
|
|
123
|
+
}
|
|
124
|
+
atomMap.set(id, a);
|
|
125
|
+
}
|
|
126
|
+
if (!forceRebuildAll) {
|
|
127
|
+
for (const id of forceRebuildSet) {
|
|
128
|
+
if (!atomMap.has(id)) {
|
|
129
|
+
throw new Error(`forceRebuild references unknown atom "${id}"`);
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
const dagNodes = atoms.map((a) => ({
|
|
134
|
+
id: a.alias ?? a.atom,
|
|
135
|
+
dependsOn: [...a.dependsOn]
|
|
136
|
+
}));
|
|
137
|
+
const executionOrder = topologicalSort(dagNodes);
|
|
138
|
+
const productions = /* @__PURE__ */ new Map();
|
|
139
|
+
const resolved = [];
|
|
140
|
+
const skipped = [];
|
|
141
|
+
const toExecute = [];
|
|
142
|
+
const atomStatus = /* @__PURE__ */ new Map();
|
|
143
|
+
for (const id of executionOrder) {
|
|
144
|
+
const a = atomMap.get(id);
|
|
145
|
+
let canSkip = false;
|
|
146
|
+
let reason = "No cache available";
|
|
147
|
+
if (cache && !forceRebuildAll && !forceRebuildSet.has(id)) {
|
|
148
|
+
const hasOutputs = a.produces.length > 0;
|
|
149
|
+
const allOutputsCached = hasOutputs && a.produces.every((prod) => cache.has(id, prod.artifact));
|
|
150
|
+
const depsSkippable = a.dependsOn.every(
|
|
151
|
+
(dep) => atomStatus.get(dep)?.canSkip
|
|
152
|
+
);
|
|
153
|
+
if (hasOutputs && allOutputsCached && depsSkippable) {
|
|
154
|
+
canSkip = true;
|
|
155
|
+
reason = "All outputs cached";
|
|
156
|
+
const cachedProductions = /* @__PURE__ */ new Map();
|
|
157
|
+
for (const prod of a.produces) {
|
|
158
|
+
const cached = cache.get(id, prod.artifact);
|
|
159
|
+
if (cached) {
|
|
160
|
+
cachedProductions.set(prod.artifact, cached);
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
productions.set(id, cachedProductions);
|
|
164
|
+
skipped.push(id);
|
|
165
|
+
} else {
|
|
166
|
+
if (!depsSkippable) {
|
|
167
|
+
reason = "Dependency rebuild required";
|
|
168
|
+
} else if (!hasOutputs) {
|
|
169
|
+
reason = "No outputs to cache";
|
|
170
|
+
} else {
|
|
171
|
+
reason = "Some outputs not cached";
|
|
172
|
+
}
|
|
173
|
+
toExecute.push(id);
|
|
174
|
+
const placeholderProductions = /* @__PURE__ */ new Map();
|
|
175
|
+
productions.set(id, placeholderProductions);
|
|
176
|
+
}
|
|
177
|
+
} else if (forceRebuildAll) {
|
|
178
|
+
reason = "Force rebuild all";
|
|
179
|
+
toExecute.push(id);
|
|
180
|
+
productions.set(id, /* @__PURE__ */ new Map());
|
|
181
|
+
} else if (forceRebuildSet.has(id)) {
|
|
182
|
+
reason = `Force rebuild: ${id}`;
|
|
183
|
+
toExecute.push(id);
|
|
184
|
+
productions.set(id, /* @__PURE__ */ new Map());
|
|
185
|
+
} else {
|
|
186
|
+
toExecute.push(id);
|
|
187
|
+
productions.set(id, /* @__PURE__ */ new Map());
|
|
188
|
+
}
|
|
189
|
+
atomStatus.set(id, { canSkip, reason });
|
|
190
|
+
}
|
|
191
|
+
for (const id of executionOrder) {
|
|
192
|
+
const a = atomMap.get(id);
|
|
193
|
+
const status = atomStatus.get(id);
|
|
194
|
+
const inputs = /* @__PURE__ */ new Map();
|
|
195
|
+
for (const req of a.requires) {
|
|
196
|
+
const depProductions = productions.get(req.from);
|
|
197
|
+
if (!depProductions) {
|
|
198
|
+
throw new Error(
|
|
199
|
+
`Atom "${id}" requires artifact "${req.artifact}" from "${req.from}", but "${req.from}" has not been resolved yet`
|
|
200
|
+
);
|
|
201
|
+
}
|
|
202
|
+
const artifact = depProductions.get(req.artifact);
|
|
203
|
+
if (artifact) {
|
|
204
|
+
const inputName = req.as ?? req.artifact;
|
|
205
|
+
inputs.set(inputName, artifact);
|
|
206
|
+
}
|
|
207
|
+
}
|
|
208
|
+
resolved.push({
|
|
209
|
+
id,
|
|
210
|
+
atom: a,
|
|
211
|
+
inputs,
|
|
212
|
+
canSkip: status.canSkip,
|
|
213
|
+
reason: status.reason
|
|
214
|
+
});
|
|
215
|
+
}
|
|
216
|
+
return {
|
|
217
|
+
resolved,
|
|
218
|
+
skipped,
|
|
219
|
+
toExecute
|
|
220
|
+
};
|
|
221
|
+
}
|
|
222
|
+
function validateArtifactDependencies(atoms) {
|
|
223
|
+
const atomMap = /* @__PURE__ */ new Map();
|
|
224
|
+
for (const a of atoms) {
|
|
225
|
+
const id = a.alias ?? a.atom;
|
|
226
|
+
atomMap.set(id, a);
|
|
227
|
+
}
|
|
228
|
+
for (const a of atoms) {
|
|
229
|
+
const id = a.alias ?? a.atom;
|
|
230
|
+
const deps = new Set(a.dependsOn);
|
|
231
|
+
const inputNames = /* @__PURE__ */ new Set();
|
|
232
|
+
for (const req of a.requires) {
|
|
233
|
+
const inputName = req.as ?? req.artifact;
|
|
234
|
+
if (inputNames.has(inputName)) {
|
|
235
|
+
throw new Error(
|
|
236
|
+
`Atom "${id}" has duplicate input mapping "${inputName}"`
|
|
237
|
+
);
|
|
238
|
+
}
|
|
239
|
+
inputNames.add(inputName);
|
|
240
|
+
if (!deps.has(req.from)) {
|
|
241
|
+
throw new Error(
|
|
242
|
+
`Atom "${id}" requires artifact "${req.artifact}" from "${req.from}", but "${req.from}" is not in dependsOn`
|
|
243
|
+
);
|
|
244
|
+
}
|
|
245
|
+
if (!atomMap.has(req.from)) {
|
|
246
|
+
throw new Error(
|
|
247
|
+
`Atom "${id}" requires artifact from unknown atom "${req.from}"`
|
|
248
|
+
);
|
|
249
|
+
}
|
|
250
|
+
const producer = atomMap.get(req.from);
|
|
251
|
+
const producesArtifact = producer.produces.some(
|
|
252
|
+
(p) => p.artifact === req.artifact
|
|
253
|
+
);
|
|
254
|
+
if (!producesArtifact) {
|
|
255
|
+
throw new Error(
|
|
256
|
+
`Atom "${id}" requires artifact "${req.artifact}" from "${req.from}", but "${req.from}" does not produce it`
|
|
257
|
+
);
|
|
258
|
+
}
|
|
259
|
+
}
|
|
260
|
+
}
|
|
261
|
+
}
|
|
262
|
+
function formatDependencyChain(chain) {
|
|
263
|
+
return chain.join(" \u2192 ");
|
|
264
|
+
}
|
|
265
|
+
|
|
266
|
+
// src/workflow/beads.ts
|
|
267
|
+
import { execFile } from "child_process";
|
|
268
|
+
import { promisify } from "util";
|
|
269
|
+
import { platform } from "os";
|
|
270
|
+
import { z as z2 } from "zod";
|
|
271
|
+
var execFileAsync = promisify(execFile);
|
|
272
|
+
var WorkflowBeadOptionsSchema = z2.object({
|
|
273
|
+
workflow: z2.string().min(1),
|
|
274
|
+
description: z2.string().optional(),
|
|
275
|
+
atoms: z2.array(z2.string().min(1))
|
|
276
|
+
});
|
|
277
|
+
async function isBeadsAvailable() {
|
|
278
|
+
try {
|
|
279
|
+
const cmd = platform() === "win32" ? "where" : "which";
|
|
280
|
+
await execFileAsync(cmd, ["bd"], {});
|
|
281
|
+
return true;
|
|
282
|
+
} catch {
|
|
283
|
+
return false;
|
|
284
|
+
}
|
|
285
|
+
}
|
|
286
|
+
async function isBeadsInitialized() {
|
|
287
|
+
try {
|
|
288
|
+
await execFileAsync("bd", ["info"], {});
|
|
289
|
+
return true;
|
|
290
|
+
} catch {
|
|
291
|
+
return false;
|
|
292
|
+
}
|
|
293
|
+
}
|
|
294
|
+
async function createBead(title, description, labels, parentId) {
|
|
295
|
+
try {
|
|
296
|
+
const args = ["create", title, "-d", description];
|
|
297
|
+
for (const label of labels) {
|
|
298
|
+
args.push("-l", label);
|
|
299
|
+
}
|
|
300
|
+
args.push("-t", "task", "--json");
|
|
301
|
+
if (parentId) {
|
|
302
|
+
args.push("--parent", parentId);
|
|
303
|
+
}
|
|
304
|
+
const { stdout } = await execFileAsync("bd", args, {
|
|
305
|
+
encoding: "utf-8"
|
|
306
|
+
});
|
|
307
|
+
const parsed = JSON.parse(stdout);
|
|
308
|
+
if (parsed.id) {
|
|
309
|
+
return { id: parsed.id };
|
|
310
|
+
}
|
|
311
|
+
return { error: "Failed to parse bead ID from output" };
|
|
312
|
+
} catch (err) {
|
|
313
|
+
return {
|
|
314
|
+
error: toErrorMessage(err)
|
|
315
|
+
};
|
|
316
|
+
}
|
|
317
|
+
}
|
|
318
|
+
async function updateBeadStatus(beadId, status, comment) {
|
|
319
|
+
try {
|
|
320
|
+
const args = ["update", beadId, "--status", status];
|
|
321
|
+
if (comment) {
|
|
322
|
+
args.push("-c", comment);
|
|
323
|
+
}
|
|
324
|
+
await execFileAsync("bd", args, {
|
|
325
|
+
encoding: "utf-8"
|
|
326
|
+
});
|
|
327
|
+
return { success: true };
|
|
328
|
+
} catch (err) {
|
|
329
|
+
return {
|
|
330
|
+
success: false,
|
|
331
|
+
error: toErrorMessage(err)
|
|
332
|
+
};
|
|
333
|
+
}
|
|
334
|
+
}
|
|
335
|
+
async function createWorkflowBeads(options) {
|
|
336
|
+
const parseResult = WorkflowBeadOptionsSchema.safeParse(options);
|
|
337
|
+
if (!parseResult.success) {
|
|
338
|
+
return {
|
|
339
|
+
success: false,
|
|
340
|
+
error: `Invalid options: ${parseResult.error.message}`
|
|
341
|
+
};
|
|
342
|
+
}
|
|
343
|
+
const validated = parseResult.data;
|
|
344
|
+
if (!await isBeadsAvailable()) {
|
|
345
|
+
return {
|
|
346
|
+
success: false,
|
|
347
|
+
error: "Beads CLI (bd) not found"
|
|
348
|
+
};
|
|
349
|
+
}
|
|
350
|
+
if (!await isBeadsInitialized()) {
|
|
351
|
+
return {
|
|
352
|
+
success: false,
|
|
353
|
+
error: "Beads not initialized in this directory"
|
|
354
|
+
};
|
|
355
|
+
}
|
|
356
|
+
const parentDescription = buildParentDescription(validated);
|
|
357
|
+
const parentResult = await createBead(
|
|
358
|
+
`[atomic] Workflow: ${validated.workflow}`,
|
|
359
|
+
parentDescription,
|
|
360
|
+
["atomic", "workflow"]
|
|
361
|
+
);
|
|
362
|
+
if ("error" in parentResult) {
|
|
363
|
+
return {
|
|
364
|
+
success: false,
|
|
365
|
+
error: `Failed to create parent bead: ${parentResult.error}`
|
|
366
|
+
};
|
|
367
|
+
}
|
|
368
|
+
const childIds = /* @__PURE__ */ new Map();
|
|
369
|
+
for (const atom of validated.atoms) {
|
|
370
|
+
const childDescription = `Atom execution for workflow '${validated.workflow}'`;
|
|
371
|
+
const childResult = await createBead(
|
|
372
|
+
`[atomic] ${atom}`,
|
|
373
|
+
childDescription,
|
|
374
|
+
["atomic", "atom"],
|
|
375
|
+
parentResult.id
|
|
376
|
+
);
|
|
377
|
+
if ("error" in childResult) {
|
|
378
|
+
console.warn(`Warning: Failed to create bead for atom '${atom}': ${childResult.error}`);
|
|
379
|
+
} else {
|
|
380
|
+
childIds.set(atom, childResult.id);
|
|
381
|
+
}
|
|
382
|
+
}
|
|
383
|
+
return {
|
|
384
|
+
success: true,
|
|
385
|
+
parentId: parentResult.id,
|
|
386
|
+
childIds
|
|
387
|
+
};
|
|
388
|
+
}
|
|
389
|
+
function buildParentDescription(options) {
|
|
390
|
+
const lines = [];
|
|
391
|
+
lines.push("## Workflow Execution");
|
|
392
|
+
lines.push("");
|
|
393
|
+
lines.push(`**Workflow:** \`${options.workflow}\``);
|
|
394
|
+
if (options.description) {
|
|
395
|
+
lines.push(`**Description:** ${options.description}`);
|
|
396
|
+
}
|
|
397
|
+
lines.push("");
|
|
398
|
+
lines.push("## Atoms");
|
|
399
|
+
lines.push("");
|
|
400
|
+
for (const atom of options.atoms) {
|
|
401
|
+
lines.push(`- [ ] ${atom}`);
|
|
402
|
+
}
|
|
403
|
+
lines.push("");
|
|
404
|
+
lines.push("## Progress");
|
|
405
|
+
lines.push("");
|
|
406
|
+
lines.push("Child beads track individual atom status.");
|
|
407
|
+
return lines.join("\n");
|
|
408
|
+
}
|
|
409
|
+
async function updateAtomBead(beadId, status, details) {
|
|
410
|
+
const statusMap = {
|
|
411
|
+
pending: "todo",
|
|
412
|
+
in_progress: "in_progress",
|
|
413
|
+
completed: "done",
|
|
414
|
+
failed: "blocked"
|
|
415
|
+
};
|
|
416
|
+
const bdStatus = statusMap[status];
|
|
417
|
+
const comment = details ? status === "failed" ? `Failed: ${details}` : details : void 0;
|
|
418
|
+
return updateBeadStatus(beadId, bdStatus, comment);
|
|
419
|
+
}
|
|
420
|
+
async function updateWorkflowBead(beadId, success, failedAtom) {
|
|
421
|
+
if (success) {
|
|
422
|
+
return updateBeadStatus(beadId, "done", "Workflow completed successfully");
|
|
423
|
+
} else {
|
|
424
|
+
const comment = failedAtom ? `Blocked: Atom '${failedAtom}' failed` : "Blocked: Workflow execution failed";
|
|
425
|
+
return updateBeadStatus(beadId, "blocked", comment);
|
|
426
|
+
}
|
|
427
|
+
}
|
|
428
|
+
|
|
429
|
+
// src/workflow/run-state.ts
|
|
430
|
+
import { join } from "path";
|
|
431
|
+
import { readFile, writeFile, mkdir, readdir } from "fs/promises";
|
|
432
|
+
import { z as z3 } from "zod";
|
|
433
|
+
var RUNS_DIR = "runs";
|
|
434
|
+
var AtomRunStateSchema = z3.object({
|
|
435
|
+
id: z3.string().min(1),
|
|
436
|
+
status: z3.enum(["pending", "running", "completed", "failed", "skipped"]),
|
|
437
|
+
artifacts: z3.array(z3.string()).optional(),
|
|
438
|
+
error: z3.string().optional(),
|
|
439
|
+
startedAt: z3.string().optional(),
|
|
440
|
+
completedAt: z3.string().optional(),
|
|
441
|
+
/** Bead ID for this atom's tracking issue */
|
|
442
|
+
beadId: z3.string().optional()
|
|
443
|
+
});
|
|
444
|
+
var WorkflowRunStateSchema = z3.object({
|
|
445
|
+
runId: z3.string().min(1),
|
|
446
|
+
workflow: z3.string().min(1),
|
|
447
|
+
status: z3.enum(["running", "completed", "failed", "cancelled"]),
|
|
448
|
+
atoms: z3.array(AtomRunStateSchema),
|
|
449
|
+
executionOrder: z3.array(z3.string()),
|
|
450
|
+
startedAt: z3.string(),
|
|
451
|
+
completedAt: z3.string().optional(),
|
|
452
|
+
failedAtom: z3.string().optional(),
|
|
453
|
+
error: z3.string().optional(),
|
|
454
|
+
/** Parent bead ID for workflow-level tracking */
|
|
455
|
+
beadParentId: z3.string().optional(),
|
|
456
|
+
/** Whether beads tracking is enabled for this run */
|
|
457
|
+
beadsEnabled: z3.boolean().optional()
|
|
458
|
+
});
|
|
459
|
+
function generateRunId() {
|
|
460
|
+
const timestamp = Date.now().toString(36);
|
|
461
|
+
const random = Math.random().toString(36).substring(2, 8);
|
|
462
|
+
return `run-${timestamp}-${random}`;
|
|
463
|
+
}
|
|
464
|
+
function getRunStatePath(runId) {
|
|
465
|
+
const projectRoot = process.cwd();
|
|
466
|
+
return join(projectRoot, ATOMIC_DIR, RUNS_DIR, `${runId}.json`);
|
|
467
|
+
}
|
|
468
|
+
async function saveRunState(state) {
|
|
469
|
+
const projectRoot = process.cwd();
|
|
470
|
+
const runsDir = join(projectRoot, ATOMIC_DIR, RUNS_DIR);
|
|
471
|
+
await mkdir(runsDir, { recursive: true });
|
|
472
|
+
const filePath = getRunStatePath(state.runId);
|
|
473
|
+
await writeFile(filePath, JSON.stringify(state, null, 2), "utf-8");
|
|
474
|
+
}
|
|
475
|
+
async function loadRunState(runId) {
|
|
476
|
+
const filePath = getRunStatePath(runId);
|
|
477
|
+
if (!await fileExists(filePath)) {
|
|
478
|
+
return null;
|
|
479
|
+
}
|
|
480
|
+
try {
|
|
481
|
+
const content = await readFile(filePath, "utf-8");
|
|
482
|
+
const data = JSON.parse(content);
|
|
483
|
+
return WorkflowRunStateSchema.parse(data);
|
|
484
|
+
} catch (_err) {
|
|
485
|
+
return null;
|
|
486
|
+
}
|
|
487
|
+
}
|
|
488
|
+
async function listRunStates(workflowName) {
|
|
489
|
+
const projectRoot = process.cwd();
|
|
490
|
+
const runsDir = join(projectRoot, ATOMIC_DIR, RUNS_DIR);
|
|
491
|
+
if (!await fileExists(runsDir)) {
|
|
492
|
+
return [];
|
|
493
|
+
}
|
|
494
|
+
const files = await readdir(runsDir);
|
|
495
|
+
const runFiles = files.filter((f) => f.endsWith(".json"));
|
|
496
|
+
const states = [];
|
|
497
|
+
for (const file of runFiles) {
|
|
498
|
+
const runId = file.replace(".json", "");
|
|
499
|
+
const state = await loadRunState(runId);
|
|
500
|
+
if (state) {
|
|
501
|
+
if (!workflowName || state.workflow === workflowName) {
|
|
502
|
+
states.push(state);
|
|
503
|
+
}
|
|
504
|
+
}
|
|
505
|
+
}
|
|
506
|
+
states.sort((a, b) => new Date(b.startedAt).getTime() - new Date(a.startedAt).getTime());
|
|
507
|
+
return states;
|
|
508
|
+
}
|
|
509
|
+
function createRunState(workflow, executionOrder, skipped, beadOptions) {
|
|
510
|
+
const runId = generateRunId();
|
|
511
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
512
|
+
const atoms = executionOrder.map((id) => ({
|
|
513
|
+
id,
|
|
514
|
+
status: skipped.includes(id) ? "skipped" : "pending",
|
|
515
|
+
beadId: beadOptions?.atomBeadIds?.get(id)
|
|
516
|
+
}));
|
|
517
|
+
return {
|
|
518
|
+
runId,
|
|
519
|
+
workflow,
|
|
520
|
+
status: "running",
|
|
521
|
+
atoms,
|
|
522
|
+
executionOrder,
|
|
523
|
+
startedAt: now,
|
|
524
|
+
beadsEnabled: beadOptions?.enabled ?? false,
|
|
525
|
+
beadParentId: beadOptions?.parentId
|
|
526
|
+
};
|
|
527
|
+
}
|
|
528
|
+
var atomIndexCache = /* @__PURE__ */ new WeakMap();
|
|
529
|
+
function updateAtomState(state, atomId, updates) {
|
|
530
|
+
let index = atomIndexCache.get(state);
|
|
531
|
+
if (!index || index.size !== state.atoms.length) {
|
|
532
|
+
index = /* @__PURE__ */ new Map();
|
|
533
|
+
for (const atom2 of state.atoms) {
|
|
534
|
+
index.set(atom2.id, atom2);
|
|
535
|
+
}
|
|
536
|
+
atomIndexCache.set(state, index);
|
|
537
|
+
}
|
|
538
|
+
let atom = index.get(atomId);
|
|
539
|
+
if (!atom) {
|
|
540
|
+
index = new Map(state.atoms.map((a) => [a.id, a]));
|
|
541
|
+
atomIndexCache.set(state, index);
|
|
542
|
+
atom = index.get(atomId);
|
|
543
|
+
}
|
|
544
|
+
if (!atom) {
|
|
545
|
+
throw new Error(`Atom '${atomId}' not found in run state for workflow '${state.workflow}'`);
|
|
546
|
+
}
|
|
547
|
+
Object.assign(atom, updates);
|
|
548
|
+
}
|
|
549
|
+
function getResumableAtoms(state, fromAtom) {
|
|
550
|
+
const executionOrder = state.executionOrder;
|
|
551
|
+
if (fromAtom) {
|
|
552
|
+
const startIndex = executionOrder.indexOf(fromAtom);
|
|
553
|
+
if (startIndex === -1) {
|
|
554
|
+
throw new Error(`Atom '${fromAtom}' not found in workflow`);
|
|
555
|
+
}
|
|
556
|
+
return executionOrder.slice(startIndex);
|
|
557
|
+
}
|
|
558
|
+
const atomMap = /* @__PURE__ */ new Map();
|
|
559
|
+
for (const atom of state.atoms) {
|
|
560
|
+
atomMap.set(atom.id, atom);
|
|
561
|
+
}
|
|
562
|
+
const toExecute = [];
|
|
563
|
+
let foundIncomplete = false;
|
|
564
|
+
for (const id of executionOrder) {
|
|
565
|
+
const atom = atomMap.get(id);
|
|
566
|
+
if (!atom || atom.status === "pending" || atom.status === "failed" || atom.status === "running") {
|
|
567
|
+
foundIncomplete = true;
|
|
568
|
+
}
|
|
569
|
+
if (foundIncomplete) {
|
|
570
|
+
toExecute.push(id);
|
|
571
|
+
}
|
|
572
|
+
}
|
|
573
|
+
return toExecute;
|
|
574
|
+
}
|
|
575
|
+
async function getLastFailedRun(workflowName) {
|
|
576
|
+
const states = await listRunStates(workflowName);
|
|
577
|
+
return states.find((s) => s.status === "failed") ?? null;
|
|
578
|
+
}
|
|
579
|
+
|
|
580
|
+
export {
|
|
581
|
+
AtomWithArtifacts,
|
|
582
|
+
createArtifactCache,
|
|
583
|
+
resolveDependencies,
|
|
584
|
+
formatDependencyChain,
|
|
585
|
+
isBeadsAvailable,
|
|
586
|
+
isBeadsInitialized,
|
|
587
|
+
createWorkflowBeads,
|
|
588
|
+
updateAtomBead,
|
|
589
|
+
updateWorkflowBead,
|
|
590
|
+
saveRunState,
|
|
591
|
+
loadRunState,
|
|
592
|
+
createRunState,
|
|
593
|
+
updateAtomState,
|
|
594
|
+
getResumableAtoms,
|
|
595
|
+
getLastFailedRun
|
|
596
|
+
};
|
|
597
|
+
//# sourceMappingURL=chunk-UWVZQSP4.js.map
|