@cascade-flow/backend-filesystem 0.1.0 → 0.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +35 -89
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +361 -200
- package/dist/index.js.map +7 -4
- package/dist/lib/file-utils.d.ts +23 -0
- package/dist/lib/file-utils.d.ts.map +1 -0
- package/dist/lib/ids.d.ts +17 -0
- package/dist/lib/ids.d.ts.map +1 -0
- package/dist/lib/paths.d.ts +46 -0
- package/dist/lib/paths.d.ts.map +1 -0
- package/package.json +3 -3
package/dist/index.js
CHANGED
|
@@ -1,10 +1,120 @@
|
|
|
1
1
|
// src/index.ts
|
|
2
|
-
import { mkdir, writeFile, readFile, readdir, access, unlink, open as
|
|
2
|
+
import { mkdir as mkdir2, writeFile as writeFile2, readFile, readdir, access, unlink as unlink2, open as openFile2 } from "node:fs/promises";
|
|
3
|
+
import { join as join2, dirname as dirname3 } from "node:path";
|
|
4
|
+
|
|
5
|
+
// src/lib/paths.ts
|
|
3
6
|
import { join, dirname } from "node:path";
|
|
7
|
+
|
|
8
|
+
class PathHelper {
|
|
9
|
+
baseDir;
|
|
10
|
+
constructor(baseDir) {
|
|
11
|
+
this.baseDir = baseDir;
|
|
12
|
+
}
|
|
13
|
+
getRunDir(workflowSlug, runId) {
|
|
14
|
+
return join(this.baseDir, workflowSlug, runId);
|
|
15
|
+
}
|
|
16
|
+
getWorkflowEventsDir(workflowSlug, runId) {
|
|
17
|
+
return join(this.getRunDir(workflowSlug, runId), "workflow-events");
|
|
18
|
+
}
|
|
19
|
+
getStepEventsDir(workflowSlug, runId) {
|
|
20
|
+
return join(this.getRunDir(workflowSlug, runId), "step-events");
|
|
21
|
+
}
|
|
22
|
+
getStepOutputsDir(workflowSlug, runId) {
|
|
23
|
+
return join(this.getRunDir(workflowSlug, runId), "step-outputs");
|
|
24
|
+
}
|
|
25
|
+
getStepEventDir(workflowSlug, runId, stepId) {
|
|
26
|
+
const eventsDir = this.getStepEventsDir(workflowSlug, runId);
|
|
27
|
+
const stepDirPath = dirname(stepId);
|
|
28
|
+
return stepDirPath === "." ? eventsDir : join(eventsDir, stepDirPath);
|
|
29
|
+
}
|
|
30
|
+
getStepOutputDir(workflowSlug, runId, stepId) {
|
|
31
|
+
const outputsDir = this.getStepOutputsDir(workflowSlug, runId);
|
|
32
|
+
const stepDirPath = dirname(stepId);
|
|
33
|
+
return stepDirPath === "." ? outputsDir : join(outputsDir, stepDirPath);
|
|
34
|
+
}
|
|
35
|
+
getStepOutputPath(workflowSlug, runId, stepId, attemptNumber) {
|
|
36
|
+
const outputDir = this.getStepOutputDir(workflowSlug, runId, stepId);
|
|
37
|
+
const stepName = stepId.split("/").pop();
|
|
38
|
+
return join(outputDir, `${stepName}-attempt-${attemptNumber}.json`);
|
|
39
|
+
}
|
|
40
|
+
getStepLockDir(workflowSlug, runId, stepId) {
|
|
41
|
+
const locksDir = join(this.getRunDir(workflowSlug, runId), "locks");
|
|
42
|
+
const stepDirPath = dirname(stepId);
|
|
43
|
+
return stepDirPath === "." ? locksDir : join(locksDir, stepDirPath);
|
|
44
|
+
}
|
|
45
|
+
getStepClaimLockPath(workflowSlug, runId, stepId, attemptNumber) {
|
|
46
|
+
const lockDir = this.getStepLockDir(workflowSlug, runId, stepId);
|
|
47
|
+
const stepName = stepId.split("/").pop();
|
|
48
|
+
return join(lockDir, `${stepName}-attempt-${attemptNumber}.lock`);
|
|
49
|
+
}
|
|
50
|
+
getIdempotencyDir() {
|
|
51
|
+
return join(this.baseDir, ".idempotency");
|
|
52
|
+
}
|
|
53
|
+
getRegistryDir() {
|
|
54
|
+
return join(this.baseDir, ".registry");
|
|
55
|
+
}
|
|
56
|
+
getWorkflowRegistryDir(slug) {
|
|
57
|
+
return join(this.getRegistryDir(), slug);
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
// src/lib/ids.ts
|
|
4
62
|
import { createHash } from "node:crypto";
|
|
63
|
+
import { getMicrosecondTimestamp } from "@cascade-flow/backend-interface";
|
|
64
|
+
function generateRunId() {
|
|
65
|
+
return `run_${Date.now()}_${Math.random().toString(36).substring(2, 15)}`;
|
|
66
|
+
}
|
|
67
|
+
function hashIdempotencyKey(key) {
|
|
68
|
+
return createHash("sha256").update(key).digest("hex");
|
|
69
|
+
}
|
|
70
|
+
function generateEventId(timestamp) {
|
|
71
|
+
const ts = timestamp ?? getMicrosecondTimestamp();
|
|
72
|
+
return `${ts}`;
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
// src/lib/file-utils.ts
|
|
76
|
+
import { mkdir, writeFile, open as openFile, rename, unlink } from "node:fs/promises";
|
|
77
|
+
import { dirname as dirname2 } from "node:path";
|
|
78
|
+
import { createHash as createHash2 } from "node:crypto";
|
|
79
|
+
async function writeJsonAtomic(filePath, data) {
|
|
80
|
+
const dir = dirname2(filePath);
|
|
81
|
+
await mkdir(dir, { recursive: true });
|
|
82
|
+
const jsonString = JSON.stringify(data, null, 2);
|
|
83
|
+
const randomSuffix = createHash2("sha256").update(`${filePath}-${Date.now()}-${Math.random()}`).digest("hex").substring(0, 16);
|
|
84
|
+
const tempPath = `${filePath}.tmp.${randomSuffix}`;
|
|
85
|
+
let tempHandle;
|
|
86
|
+
try {
|
|
87
|
+
await writeFile(tempPath, jsonString, "utf-8");
|
|
88
|
+
tempHandle = await openFile(tempPath, "r+");
|
|
89
|
+
await tempHandle.datasync();
|
|
90
|
+
await tempHandle.close();
|
|
91
|
+
tempHandle = undefined;
|
|
92
|
+
await rename(tempPath, filePath);
|
|
93
|
+
const dirHandle = await openFile(dir, "r");
|
|
94
|
+
await dirHandle.datasync();
|
|
95
|
+
await dirHandle.close();
|
|
96
|
+
} catch (error) {
|
|
97
|
+
if (tempHandle) {
|
|
98
|
+
await tempHandle.close().catch(() => {});
|
|
99
|
+
}
|
|
100
|
+
try {
|
|
101
|
+
await unlink(tempPath);
|
|
102
|
+
} catch {}
|
|
103
|
+
throw error;
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
function getWorkflowEventFilename(event) {
|
|
107
|
+
return `${event.eventId}-${event.type}.json`;
|
|
108
|
+
}
|
|
109
|
+
function getStepEventFilename(event) {
|
|
110
|
+
const stepName = event.stepId.split("/").pop();
|
|
111
|
+
return `${event.eventId}-${stepName}-${event.type}.json`;
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
// src/index.ts
|
|
5
115
|
import {
|
|
6
116
|
Backend,
|
|
7
|
-
|
|
117
|
+
WorkflowVersionSchema,
|
|
8
118
|
eventSchema,
|
|
9
119
|
safeSerialize,
|
|
10
120
|
projectStepRecord,
|
|
@@ -12,7 +122,7 @@ import {
|
|
|
12
122
|
projectRunStateFromEvents,
|
|
13
123
|
extractLogsFromEvents,
|
|
14
124
|
getCurrentAttemptNumber,
|
|
15
|
-
getMicrosecondTimestamp,
|
|
125
|
+
getMicrosecondTimestamp as getMicrosecondTimestamp2,
|
|
16
126
|
computeErrorAnalysis,
|
|
17
127
|
computeRetryAnalysis,
|
|
18
128
|
computeSchedulingLatency,
|
|
@@ -27,106 +137,42 @@ import { projectStepState as projectStepState2, projectRunStateFromEvents as pro
|
|
|
27
137
|
|
|
28
138
|
class FileSystemBackend extends Backend {
|
|
29
139
|
baseDir;
|
|
140
|
+
paths;
|
|
30
141
|
constructor(baseDir = "./.runs") {
|
|
31
142
|
super();
|
|
32
143
|
this.baseDir = baseDir;
|
|
144
|
+
this.paths = new PathHelper(baseDir);
|
|
33
145
|
}
|
|
34
146
|
async initialize() {}
|
|
35
|
-
getRunDir(workflowSlug, runId) {
|
|
36
|
-
return join(this.baseDir, workflowSlug, runId);
|
|
37
|
-
}
|
|
38
|
-
getWorkflowEventsDir(workflowSlug, runId) {
|
|
39
|
-
return join(this.getRunDir(workflowSlug, runId), "workflow-events");
|
|
40
|
-
}
|
|
41
|
-
getStepEventsDir(workflowSlug, runId) {
|
|
42
|
-
return join(this.getRunDir(workflowSlug, runId), "step-events");
|
|
43
|
-
}
|
|
44
|
-
getStepOutputsDir(workflowSlug, runId) {
|
|
45
|
-
return join(this.getRunDir(workflowSlug, runId), "step-outputs");
|
|
46
|
-
}
|
|
47
147
|
getStepOutputPath(workflowSlug, runId, stepId, attemptNumber) {
|
|
48
|
-
return
|
|
49
|
-
}
|
|
50
|
-
getStepsDir(workflowSlug, runId) {
|
|
51
|
-
return join(this.getRunDir(workflowSlug, runId), "steps");
|
|
52
|
-
}
|
|
53
|
-
getStepFile(workflowSlug, runId, stepName) {
|
|
54
|
-
return join(this.getStepsDir(workflowSlug, runId), `${stepName}.json`);
|
|
55
|
-
}
|
|
56
|
-
getStepLogsFile(workflowSlug, runId, stepName) {
|
|
57
|
-
return join(this.getStepsDir(workflowSlug, runId), `${stepName}.logs.json`);
|
|
58
|
-
}
|
|
59
|
-
getIdempotencyDir() {
|
|
60
|
-
return join(this.baseDir, ".idempotency");
|
|
61
|
-
}
|
|
62
|
-
generateRunId() {
|
|
63
|
-
return `run_${Date.now()}_${Math.random().toString(36).substring(2, 15)}`;
|
|
64
|
-
}
|
|
65
|
-
hashIdempotencyKey(key) {
|
|
66
|
-
return createHash("sha256").update(key).digest("hex");
|
|
67
|
-
}
|
|
68
|
-
generateEventId(timestamp) {
|
|
69
|
-
const ts = timestamp ?? getMicrosecondTimestamp();
|
|
70
|
-
return `${ts}`;
|
|
71
|
-
}
|
|
72
|
-
getWorkflowEventFilename(event) {
|
|
73
|
-
return `${event.eventId}-${event.type}.json`;
|
|
74
|
-
}
|
|
75
|
-
getStepEventFilename(event) {
|
|
76
|
-
return `${event.eventId}-${event.stepId}-${event.type}.json`;
|
|
77
|
-
}
|
|
78
|
-
getStepClaimLockPath(workflowSlug, runId, stepId, attemptNumber) {
|
|
79
|
-
return join(this.getRunDir(workflowSlug, runId), "locks", `${stepId}-attempt-${attemptNumber}.lock`);
|
|
80
|
-
}
|
|
81
|
-
async writeJsonAtomic(filePath, data) {
|
|
82
|
-
const dir = dirname(filePath);
|
|
83
|
-
await mkdir(dir, { recursive: true });
|
|
84
|
-
const jsonString = JSON.stringify(data, null, 2);
|
|
85
|
-
await writeFile(filePath, jsonString, "utf-8");
|
|
86
|
-
}
|
|
87
|
-
async readStepRecord(workflowSlug, runId, stepName) {
|
|
88
|
-
try {
|
|
89
|
-
const filePath = this.getStepFile(workflowSlug, runId, stepName);
|
|
90
|
-
const content = await readFile(filePath, "utf-8");
|
|
91
|
-
const parsed = JSON.parse(content);
|
|
92
|
-
return StepRecordSchema.parse(parsed);
|
|
93
|
-
} catch {
|
|
94
|
-
return null;
|
|
95
|
-
}
|
|
96
|
-
}
|
|
97
|
-
createStateChange(status, message) {
|
|
98
|
-
return {
|
|
99
|
-
status,
|
|
100
|
-
timestamp: getMicrosecondTimestamp(),
|
|
101
|
-
message
|
|
102
|
-
};
|
|
148
|
+
return this.paths.getStepOutputPath(workflowSlug, runId, stepId, attemptNumber);
|
|
103
149
|
}
|
|
104
150
|
async initializeRun(workflowSlug, runId) {
|
|
105
|
-
const workflowEventsDir = this.getWorkflowEventsDir(workflowSlug, runId);
|
|
106
|
-
const stepEventsDir = this.getStepEventsDir(workflowSlug, runId);
|
|
107
|
-
await
|
|
108
|
-
await
|
|
151
|
+
const workflowEventsDir = this.paths.getWorkflowEventsDir(workflowSlug, runId);
|
|
152
|
+
const stepEventsDir = this.paths.getStepEventsDir(workflowSlug, runId);
|
|
153
|
+
await mkdir2(workflowEventsDir, { recursive: true });
|
|
154
|
+
await mkdir2(stepEventsDir, { recursive: true });
|
|
109
155
|
}
|
|
110
156
|
async appendEvent(workflowSlug, runId, event) {
|
|
111
157
|
if (!event.eventId) {
|
|
112
|
-
event.eventId =
|
|
158
|
+
event.eventId = generateEventId();
|
|
113
159
|
}
|
|
114
160
|
if (!event.timestampUs) {
|
|
115
|
-
event.timestampUs =
|
|
161
|
+
event.timestampUs = getMicrosecondTimestamp2();
|
|
116
162
|
}
|
|
117
163
|
eventSchema.parse(event);
|
|
118
|
-
const eventsDir = event.category === "workflow" ? this.getWorkflowEventsDir(workflowSlug, runId) : this.
|
|
164
|
+
const eventsDir = event.category === "workflow" ? this.paths.getWorkflowEventsDir(workflowSlug, runId) : this.paths.getStepEventDir(workflowSlug, runId, event.stepId);
|
|
119
165
|
const getFilename = (evt) => {
|
|
120
166
|
if (evt.category === "workflow") {
|
|
121
|
-
return
|
|
167
|
+
return getWorkflowEventFilename(evt);
|
|
122
168
|
} else {
|
|
123
|
-
return
|
|
169
|
+
return getStepEventFilename(evt);
|
|
124
170
|
}
|
|
125
171
|
};
|
|
126
|
-
await
|
|
172
|
+
await mkdir2(eventsDir, { recursive: true });
|
|
127
173
|
let timestamp = parseInt(event.eventId);
|
|
128
174
|
let filename = getFilename(event);
|
|
129
|
-
let filePath =
|
|
175
|
+
let filePath = join2(eventsDir, filename);
|
|
130
176
|
while (true) {
|
|
131
177
|
try {
|
|
132
178
|
await access(filePath);
|
|
@@ -134,12 +180,12 @@ class FileSystemBackend extends Backend {
|
|
|
134
180
|
event.eventId = `${timestamp}`;
|
|
135
181
|
event.timestampUs = timestamp;
|
|
136
182
|
filename = getFilename(event);
|
|
137
|
-
filePath =
|
|
183
|
+
filePath = join2(eventsDir, filename);
|
|
138
184
|
} catch {
|
|
139
185
|
break;
|
|
140
186
|
}
|
|
141
187
|
}
|
|
142
|
-
await
|
|
188
|
+
await writeJsonAtomic(filePath, event);
|
|
143
189
|
}
|
|
144
190
|
async loadEvents(workflowSlug, runId, options) {
|
|
145
191
|
try {
|
|
@@ -147,28 +193,26 @@ class FileSystemBackend extends Backend {
|
|
|
147
193
|
const directories = [];
|
|
148
194
|
if (!options?.category || options.category === "workflow") {
|
|
149
195
|
directories.push({
|
|
150
|
-
dir: this.getWorkflowEventsDir(workflowSlug, runId),
|
|
196
|
+
dir: this.paths.getWorkflowEventsDir(workflowSlug, runId),
|
|
151
197
|
category: "workflow"
|
|
152
198
|
});
|
|
153
199
|
}
|
|
154
200
|
if (!options?.category || options.category === "step") {
|
|
155
201
|
directories.push({
|
|
156
|
-
dir: this.getStepEventsDir(workflowSlug, runId),
|
|
202
|
+
dir: this.paths.getStepEventsDir(workflowSlug, runId),
|
|
157
203
|
category: "step"
|
|
158
204
|
});
|
|
159
205
|
}
|
|
160
206
|
for (const { dir, category } of directories) {
|
|
161
207
|
try {
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
const
|
|
166
|
-
const
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
}
|
|
171
|
-
} else {
|
|
208
|
+
if (category === "step") {
|
|
209
|
+
await this.loadEventsRecursive(dir, events, options?.stepId);
|
|
210
|
+
} else {
|
|
211
|
+
const files = await readdir(dir);
|
|
212
|
+
const eventFiles = files.filter((f) => f.endsWith(".json"));
|
|
213
|
+
for (const file of eventFiles) {
|
|
214
|
+
const content = await readFile(join2(dir, file), "utf-8");
|
|
215
|
+
const event = eventSchema.parse(JSON.parse(content));
|
|
172
216
|
events.push(event);
|
|
173
217
|
}
|
|
174
218
|
}
|
|
@@ -187,13 +231,60 @@ class FileSystemBackend extends Backend {
|
|
|
187
231
|
return [];
|
|
188
232
|
}
|
|
189
233
|
}
|
|
234
|
+
async loadEventsRecursive(dir, events, stepIdFilter) {
|
|
235
|
+
try {
|
|
236
|
+
const entries = await readdir(dir, { withFileTypes: true });
|
|
237
|
+
for (const entry of entries) {
|
|
238
|
+
const fullPath = join2(dir, entry.name);
|
|
239
|
+
if (entry.isDirectory()) {
|
|
240
|
+
await this.loadEventsRecursive(fullPath, events, stepIdFilter);
|
|
241
|
+
} else if (entry.name.endsWith(".json")) {
|
|
242
|
+
const content = await readFile(fullPath, "utf-8");
|
|
243
|
+
const event = eventSchema.parse(JSON.parse(content));
|
|
244
|
+
if (stepIdFilter) {
|
|
245
|
+
if (event.category === "step" && event.stepId === stepIdFilter) {
|
|
246
|
+
events.push(event);
|
|
247
|
+
}
|
|
248
|
+
} else {
|
|
249
|
+
events.push(event);
|
|
250
|
+
}
|
|
251
|
+
}
|
|
252
|
+
}
|
|
253
|
+
} catch (err) {}
|
|
254
|
+
}
|
|
255
|
+
async discoverStepIdsFromEvents(stepEventsDir) {
|
|
256
|
+
const stepIds = new Set;
|
|
257
|
+
const scanDirectory = async (dir) => {
|
|
258
|
+
try {
|
|
259
|
+
const entries = await readdir(dir, { withFileTypes: true });
|
|
260
|
+
for (const entry of entries) {
|
|
261
|
+
const fullPath = join2(dir, entry.name);
|
|
262
|
+
if (entry.isDirectory()) {
|
|
263
|
+
await scanDirectory(fullPath);
|
|
264
|
+
} else if (entry.name.endsWith(".json")) {
|
|
265
|
+
try {
|
|
266
|
+
const content = await readFile(fullPath, "utf-8");
|
|
267
|
+
const event = JSON.parse(content);
|
|
268
|
+
if (event.category === "step" && event.stepId) {
|
|
269
|
+
stepIds.add(event.stepId);
|
|
270
|
+
}
|
|
271
|
+
} catch {
|
|
272
|
+
continue;
|
|
273
|
+
}
|
|
274
|
+
}
|
|
275
|
+
}
|
|
276
|
+
} catch {}
|
|
277
|
+
};
|
|
278
|
+
await scanDirectory(stepEventsDir);
|
|
279
|
+
return stepIds;
|
|
280
|
+
}
|
|
190
281
|
async saveStepStart(workflowSlug, runId, stepId, workerId, metadata) {
|
|
191
282
|
const events = await this.loadEvents(workflowSlug, runId, { category: "step", stepId });
|
|
192
283
|
const attemptNumber = getCurrentAttemptNumber(events) + 1;
|
|
193
|
-
const timestamp =
|
|
284
|
+
const timestamp = getMicrosecondTimestamp2();
|
|
194
285
|
const event = {
|
|
195
286
|
category: "step",
|
|
196
|
-
eventId:
|
|
287
|
+
eventId: generateEventId(timestamp),
|
|
197
288
|
timestampUs: timestamp,
|
|
198
289
|
workflowSlug,
|
|
199
290
|
runId,
|
|
@@ -216,7 +307,7 @@ class FileSystemBackend extends Backend {
|
|
|
216
307
|
const logTimestamp = log.timestamp;
|
|
217
308
|
const logEvent = {
|
|
218
309
|
category: "step",
|
|
219
|
-
eventId:
|
|
310
|
+
eventId: generateEventId(logTimestamp),
|
|
220
311
|
timestampUs: logTimestamp,
|
|
221
312
|
workflowSlug,
|
|
222
313
|
runId,
|
|
@@ -231,10 +322,10 @@ class FileSystemBackend extends Backend {
|
|
|
231
322
|
}
|
|
232
323
|
const serialized = safeSerialize(output);
|
|
233
324
|
const outputString = serialized.success ? serialized.data : serialized.fallback;
|
|
234
|
-
const timestamp =
|
|
325
|
+
const timestamp = getMicrosecondTimestamp2();
|
|
235
326
|
const event = {
|
|
236
327
|
category: "step",
|
|
237
|
-
eventId:
|
|
328
|
+
eventId: generateEventId(timestamp),
|
|
238
329
|
timestampUs: timestamp,
|
|
239
330
|
workflowSlug,
|
|
240
331
|
runId,
|
|
@@ -248,10 +339,10 @@ class FileSystemBackend extends Backend {
|
|
|
248
339
|
await this.appendEvent(workflowSlug, runId, event);
|
|
249
340
|
}
|
|
250
341
|
async saveStepFailed(workflowSlug, runId, stepId, error, metadata) {
|
|
251
|
-
const now =
|
|
342
|
+
const now = getMicrosecondTimestamp2();
|
|
252
343
|
const event = {
|
|
253
344
|
category: "step",
|
|
254
|
-
eventId:
|
|
345
|
+
eventId: generateEventId(now),
|
|
255
346
|
timestampUs: now,
|
|
256
347
|
workflowSlug,
|
|
257
348
|
runId,
|
|
@@ -268,13 +359,13 @@ class FileSystemBackend extends Backend {
|
|
|
268
359
|
await this.appendEvent(workflowSlug, runId, event);
|
|
269
360
|
}
|
|
270
361
|
async saveStepFailedAndScheduleRetry(workflowSlug, runId, stepId, error, failureMetadata, scheduleMetadata) {
|
|
271
|
-
const failedTimestamp =
|
|
362
|
+
const failedTimestamp = getMicrosecondTimestamp2();
|
|
272
363
|
const retryingTimestamp = failedTimestamp + 1;
|
|
273
364
|
const scheduledTimestamp = failedTimestamp + 2;
|
|
274
365
|
const failedEvent = {
|
|
275
366
|
category: "step",
|
|
276
367
|
type: "StepFailed",
|
|
277
|
-
eventId:
|
|
368
|
+
eventId: generateEventId(failedTimestamp),
|
|
278
369
|
timestampUs: failedTimestamp,
|
|
279
370
|
workflowSlug,
|
|
280
371
|
runId,
|
|
@@ -291,7 +382,7 @@ class FileSystemBackend extends Backend {
|
|
|
291
382
|
const retryingEvent = {
|
|
292
383
|
category: "step",
|
|
293
384
|
type: "StepRetrying",
|
|
294
|
-
eventId:
|
|
385
|
+
eventId: generateEventId(retryingTimestamp),
|
|
295
386
|
timestampUs: retryingTimestamp,
|
|
296
387
|
workflowSlug,
|
|
297
388
|
runId,
|
|
@@ -305,7 +396,7 @@ class FileSystemBackend extends Backend {
|
|
|
305
396
|
const scheduledEvent = {
|
|
306
397
|
category: "step",
|
|
307
398
|
type: "StepScheduled",
|
|
308
|
-
eventId:
|
|
399
|
+
eventId: generateEventId(scheduledTimestamp),
|
|
309
400
|
timestampUs: scheduledTimestamp,
|
|
310
401
|
workflowSlug,
|
|
311
402
|
runId,
|
|
@@ -318,10 +409,10 @@ class FileSystemBackend extends Backend {
|
|
|
318
409
|
await this.appendEvent(workflowSlug, runId, scheduledEvent);
|
|
319
410
|
}
|
|
320
411
|
async saveStepSkipped(workflowSlug, runId, stepId, metadata) {
|
|
321
|
-
const now =
|
|
412
|
+
const now = getMicrosecondTimestamp2();
|
|
322
413
|
const event = {
|
|
323
414
|
category: "step",
|
|
324
|
-
eventId:
|
|
415
|
+
eventId: generateEventId(now),
|
|
325
416
|
timestampUs: now,
|
|
326
417
|
workflowSlug,
|
|
327
418
|
runId,
|
|
@@ -337,10 +428,10 @@ class FileSystemBackend extends Backend {
|
|
|
337
428
|
await this.appendEvent(workflowSlug, runId, event);
|
|
338
429
|
}
|
|
339
430
|
async saveStepScheduled(workflowSlug, runId, stepId, metadata) {
|
|
340
|
-
const now =
|
|
431
|
+
const now = getMicrosecondTimestamp2();
|
|
341
432
|
const event = {
|
|
342
433
|
category: "step",
|
|
343
|
-
eventId:
|
|
434
|
+
eventId: generateEventId(now),
|
|
344
435
|
timestampUs: now,
|
|
345
436
|
workflowSlug,
|
|
346
437
|
runId,
|
|
@@ -354,10 +445,10 @@ class FileSystemBackend extends Backend {
|
|
|
354
445
|
await this.appendEvent(workflowSlug, runId, event);
|
|
355
446
|
}
|
|
356
447
|
async saveStepHeartbeat(workflowSlug, runId, stepId, workerId, attemptNumber) {
|
|
357
|
-
const now =
|
|
448
|
+
const now = getMicrosecondTimestamp2();
|
|
358
449
|
const event = {
|
|
359
450
|
category: "step",
|
|
360
|
-
eventId:
|
|
451
|
+
eventId: generateEventId(now),
|
|
361
452
|
timestampUs: now,
|
|
362
453
|
workflowSlug,
|
|
363
454
|
runId,
|
|
@@ -369,10 +460,10 @@ class FileSystemBackend extends Backend {
|
|
|
369
460
|
await this.appendEvent(workflowSlug, runId, event);
|
|
370
461
|
}
|
|
371
462
|
async saveStepReclaimed(workflowSlug, runId, stepId, metadata) {
|
|
372
|
-
const now =
|
|
463
|
+
const now = getMicrosecondTimestamp2();
|
|
373
464
|
const event = {
|
|
374
465
|
category: "step",
|
|
375
|
-
eventId:
|
|
466
|
+
eventId: generateEventId(now),
|
|
376
467
|
timestampUs: now,
|
|
377
468
|
workflowSlug,
|
|
378
469
|
runId,
|
|
@@ -390,10 +481,10 @@ class FileSystemBackend extends Backend {
|
|
|
390
481
|
async saveWorkflowComplete(workflowSlug, runId, output, metadata) {
|
|
391
482
|
const serialized = safeSerialize(output);
|
|
392
483
|
const outputString = serialized.success ? serialized.data : serialized.fallback;
|
|
393
|
-
const timestamp =
|
|
484
|
+
const timestamp = getMicrosecondTimestamp2();
|
|
394
485
|
const event = {
|
|
395
486
|
category: "workflow",
|
|
396
|
-
eventId:
|
|
487
|
+
eventId: generateEventId(timestamp),
|
|
397
488
|
timestampUs: timestamp,
|
|
398
489
|
workflowSlug,
|
|
399
490
|
runId,
|
|
@@ -441,7 +532,7 @@ class FileSystemBackend extends Backend {
|
|
|
441
532
|
}
|
|
442
533
|
async runExists(workflowSlug, runId) {
|
|
443
534
|
try {
|
|
444
|
-
const runDir = this.getRunDir(workflowSlug, runId);
|
|
535
|
+
const runDir = this.paths.getRunDir(workflowSlug, runId);
|
|
445
536
|
await access(runDir);
|
|
446
537
|
return true;
|
|
447
538
|
} catch {
|
|
@@ -449,14 +540,15 @@ class FileSystemBackend extends Backend {
|
|
|
449
540
|
}
|
|
450
541
|
}
|
|
451
542
|
async saveWorkflowStart(workflowSlug, runId, metadata) {
|
|
452
|
-
const timestamp =
|
|
543
|
+
const timestamp = getMicrosecondTimestamp2();
|
|
453
544
|
const event = {
|
|
454
545
|
category: "workflow",
|
|
455
|
-
eventId:
|
|
546
|
+
eventId: generateEventId(timestamp),
|
|
456
547
|
timestampUs: timestamp,
|
|
457
548
|
workflowSlug,
|
|
458
549
|
runId,
|
|
459
550
|
type: "WorkflowStarted",
|
|
551
|
+
versionId: metadata.versionId,
|
|
460
552
|
workflowAttemptNumber: metadata.workflowAttemptNumber,
|
|
461
553
|
hasInputSchema: metadata.hasInputSchema,
|
|
462
554
|
hasInput: metadata.hasInput
|
|
@@ -464,10 +556,10 @@ class FileSystemBackend extends Backend {
|
|
|
464
556
|
await this.appendEvent(workflowSlug, runId, event);
|
|
465
557
|
}
|
|
466
558
|
async saveWorkflowInputValidation(workflowSlug, runId, result) {
|
|
467
|
-
const timestamp =
|
|
559
|
+
const timestamp = getMicrosecondTimestamp2();
|
|
468
560
|
const event = {
|
|
469
561
|
category: "workflow",
|
|
470
|
-
eventId:
|
|
562
|
+
eventId: generateEventId(timestamp),
|
|
471
563
|
timestampUs: timestamp,
|
|
472
564
|
workflowSlug,
|
|
473
565
|
runId,
|
|
@@ -477,10 +569,10 @@ class FileSystemBackend extends Backend {
|
|
|
477
569
|
await this.appendEvent(workflowSlug, runId, event);
|
|
478
570
|
}
|
|
479
571
|
async saveWorkflowFailed(workflowSlug, runId, error, metadata, failureReason) {
|
|
480
|
-
const timestamp =
|
|
572
|
+
const timestamp = getMicrosecondTimestamp2();
|
|
481
573
|
const event = {
|
|
482
574
|
category: "workflow",
|
|
483
|
-
eventId:
|
|
575
|
+
eventId: generateEventId(timestamp),
|
|
484
576
|
timestampUs: timestamp,
|
|
485
577
|
workflowSlug,
|
|
486
578
|
runId,
|
|
@@ -495,10 +587,10 @@ class FileSystemBackend extends Backend {
|
|
|
495
587
|
await this.appendEvent(workflowSlug, runId, event);
|
|
496
588
|
}
|
|
497
589
|
async saveWorkflowResumed(workflowSlug, runId, metadata) {
|
|
498
|
-
const timestamp =
|
|
590
|
+
const timestamp = getMicrosecondTimestamp2();
|
|
499
591
|
const event = {
|
|
500
592
|
category: "workflow",
|
|
501
|
-
eventId:
|
|
593
|
+
eventId: generateEventId(timestamp),
|
|
502
594
|
timestampUs: timestamp,
|
|
503
595
|
workflowSlug,
|
|
504
596
|
runId,
|
|
@@ -510,10 +602,10 @@ class FileSystemBackend extends Backend {
|
|
|
510
602
|
await this.appendEvent(workflowSlug, runId, event);
|
|
511
603
|
}
|
|
512
604
|
async saveWorkflowCancelled(workflowSlug, runId, metadata) {
|
|
513
|
-
const timestamp =
|
|
605
|
+
const timestamp = getMicrosecondTimestamp2();
|
|
514
606
|
const event = {
|
|
515
607
|
category: "workflow",
|
|
516
|
-
eventId:
|
|
608
|
+
eventId: generateEventId(timestamp),
|
|
517
609
|
timestampUs: timestamp,
|
|
518
610
|
workflowSlug,
|
|
519
611
|
runId,
|
|
@@ -526,10 +618,10 @@ class FileSystemBackend extends Backend {
|
|
|
526
618
|
await this.appendEvent(workflowSlug, runId, event);
|
|
527
619
|
}
|
|
528
620
|
async saveWorkflowRetryStarted(workflowSlug, runId, metadata) {
|
|
529
|
-
const timestamp =
|
|
621
|
+
const timestamp = getMicrosecondTimestamp2();
|
|
530
622
|
const event = {
|
|
531
623
|
category: "workflow",
|
|
532
|
-
eventId:
|
|
624
|
+
eventId: generateEventId(timestamp),
|
|
533
625
|
timestampUs: timestamp,
|
|
534
626
|
workflowSlug,
|
|
535
627
|
runId,
|
|
@@ -570,14 +662,15 @@ class FileSystemBackend extends Backend {
|
|
|
570
662
|
return failedSteps;
|
|
571
663
|
}
|
|
572
664
|
async saveRunSubmitted(workflowSlug, runId, metadata) {
|
|
573
|
-
const timestamp =
|
|
665
|
+
const timestamp = getMicrosecondTimestamp2();
|
|
574
666
|
const event = {
|
|
575
667
|
category: "workflow",
|
|
576
|
-
eventId:
|
|
668
|
+
eventId: generateEventId(timestamp),
|
|
577
669
|
timestampUs: timestamp,
|
|
578
670
|
workflowSlug,
|
|
579
671
|
runId,
|
|
580
672
|
type: "RunSubmitted",
|
|
673
|
+
versionId: metadata.versionId,
|
|
581
674
|
availableAtUs: metadata.availableAt,
|
|
582
675
|
priority: metadata.priority,
|
|
583
676
|
input: metadata.input,
|
|
@@ -591,35 +684,42 @@ class FileSystemBackend extends Backend {
|
|
|
591
684
|
}
|
|
592
685
|
async submitRun(submission) {
|
|
593
686
|
if (submission.idempotencyKey) {
|
|
594
|
-
const hash =
|
|
595
|
-
const idempotencyFile =
|
|
687
|
+
const hash = hashIdempotencyKey(submission.idempotencyKey);
|
|
688
|
+
const idempotencyFile = join2(this.paths.getIdempotencyDir(), `${hash}.json`);
|
|
596
689
|
try {
|
|
597
690
|
const content = await readFile(idempotencyFile, "utf-8");
|
|
598
691
|
const existing = JSON.parse(content);
|
|
599
692
|
return { runId: existing.runId, isNew: false };
|
|
600
693
|
} catch {}
|
|
601
694
|
}
|
|
602
|
-
const runId = submission.runId ||
|
|
603
|
-
const now =
|
|
695
|
+
const runId = submission.runId || generateRunId();
|
|
696
|
+
const now = getMicrosecondTimestamp2();
|
|
604
697
|
const availableAt = submission.availableAt || now;
|
|
605
698
|
const priority = submission.priority || 0;
|
|
606
699
|
await this.initializeRun(submission.workflowSlug, runId);
|
|
700
|
+
const workflowMetadata = await this.getWorkflowMetadata(submission.workflowSlug);
|
|
701
|
+
const hasInputSchema = !!workflowMetadata?.inputSchemaJSON;
|
|
702
|
+
const currentVersion = await this.getCurrentWorkflowVersion(submission.workflowSlug);
|
|
703
|
+
if (!currentVersion) {
|
|
704
|
+
throw new Error(`Workflow ${submission.workflowSlug} not registered. Please ensure the worker has started and registered workflows.`);
|
|
705
|
+
}
|
|
607
706
|
await this.saveRunSubmitted(submission.workflowSlug, runId, {
|
|
707
|
+
versionId: currentVersion.versionId,
|
|
608
708
|
availableAt,
|
|
609
709
|
priority,
|
|
610
710
|
input: submission.input !== undefined ? JSON.stringify(submission.input) : undefined,
|
|
611
|
-
hasInputSchema
|
|
711
|
+
hasInputSchema,
|
|
612
712
|
timeout: submission.timeout,
|
|
613
713
|
idempotencyKey: submission.idempotencyKey,
|
|
614
714
|
metadata: submission.metadata,
|
|
615
715
|
tags: submission.tags
|
|
616
716
|
});
|
|
617
717
|
if (submission.idempotencyKey) {
|
|
618
|
-
const hash =
|
|
619
|
-
const idempotencyDir = this.getIdempotencyDir();
|
|
620
|
-
await
|
|
621
|
-
const idempotencyFile =
|
|
622
|
-
await
|
|
718
|
+
const hash = hashIdempotencyKey(submission.idempotencyKey);
|
|
719
|
+
const idempotencyDir = this.paths.getIdempotencyDir();
|
|
720
|
+
await mkdir2(idempotencyDir, { recursive: true });
|
|
721
|
+
const idempotencyFile = join2(idempotencyDir, `${hash}.json`);
|
|
722
|
+
await writeJsonAtomic(idempotencyFile, { runId, createdAt: now });
|
|
623
723
|
}
|
|
624
724
|
return { runId, isNew: true };
|
|
625
725
|
}
|
|
@@ -632,7 +732,7 @@ class FileSystemBackend extends Backend {
|
|
|
632
732
|
continue;
|
|
633
733
|
if (options?.workflowSlug && workflowSlug !== options.workflowSlug)
|
|
634
734
|
continue;
|
|
635
|
-
const workflowDir =
|
|
735
|
+
const workflowDir = join2(this.baseDir, workflowSlug);
|
|
636
736
|
const runDirs = await readdir(workflowDir);
|
|
637
737
|
for (const runId of runDirs) {
|
|
638
738
|
try {
|
|
@@ -667,7 +767,7 @@ class FileSystemBackend extends Backend {
|
|
|
667
767
|
for (const workflow of workflows) {
|
|
668
768
|
if (workflow.startsWith("."))
|
|
669
769
|
continue;
|
|
670
|
-
const runDir = this.getRunDir(workflow, runId);
|
|
770
|
+
const runDir = this.paths.getRunDir(workflow, runId);
|
|
671
771
|
try {
|
|
672
772
|
await access(runDir);
|
|
673
773
|
} catch {
|
|
@@ -677,7 +777,7 @@ class FileSystemBackend extends Backend {
|
|
|
677
777
|
if (events.length === 0)
|
|
678
778
|
continue;
|
|
679
779
|
const state = projectRunStateFromEvents(events, workflow);
|
|
680
|
-
const duration =
|
|
780
|
+
const duration = getMicrosecondTimestamp2() - state.createdAt;
|
|
681
781
|
const stepRecords = await this.loadRun(workflow, runId);
|
|
682
782
|
const completedSteps = stepRecords.filter((r) => r.status === "completed").length;
|
|
683
783
|
await this.saveWorkflowCancelled(workflow, runId, {
|
|
@@ -695,7 +795,7 @@ class FileSystemBackend extends Backend {
|
|
|
695
795
|
for (const workflowSlug of workflows) {
|
|
696
796
|
if (workflowSlug.startsWith("."))
|
|
697
797
|
continue;
|
|
698
|
-
const runDir = this.getRunDir(workflowSlug, runId);
|
|
798
|
+
const runDir = this.paths.getRunDir(workflowSlug, runId);
|
|
699
799
|
try {
|
|
700
800
|
await access(runDir);
|
|
701
801
|
} catch {
|
|
@@ -718,7 +818,7 @@ class FileSystemBackend extends Backend {
|
|
|
718
818
|
for (const workflowSlug of workflows) {
|
|
719
819
|
if (workflowSlug.startsWith("."))
|
|
720
820
|
continue;
|
|
721
|
-
const workflowDir =
|
|
821
|
+
const workflowDir = join2(this.baseDir, workflowSlug);
|
|
722
822
|
const runDirs = await readdir(workflowDir);
|
|
723
823
|
for (const runId of runDirs) {
|
|
724
824
|
try {
|
|
@@ -741,7 +841,7 @@ class FileSystemBackend extends Backend {
|
|
|
741
841
|
}
|
|
742
842
|
}
|
|
743
843
|
async listScheduledSteps(options) {
|
|
744
|
-
const now =
|
|
844
|
+
const now = getMicrosecondTimestamp2();
|
|
745
845
|
const availableBefore = options?.availableBefore || now;
|
|
746
846
|
const scheduledSteps = [];
|
|
747
847
|
try {
|
|
@@ -751,25 +851,17 @@ class FileSystemBackend extends Backend {
|
|
|
751
851
|
continue;
|
|
752
852
|
if (options?.workflowSlug && workflowSlug !== options.workflowSlug)
|
|
753
853
|
continue;
|
|
754
|
-
const workflowDir =
|
|
854
|
+
const workflowDir = join2(this.baseDir, workflowSlug);
|
|
755
855
|
const runDirs = await readdir(workflowDir);
|
|
756
856
|
for (const runId of runDirs) {
|
|
757
857
|
try {
|
|
758
|
-
const stepEventsDir = this.getStepEventsDir(workflowSlug, runId);
|
|
858
|
+
const stepEventsDir = this.paths.getStepEventsDir(workflowSlug, runId);
|
|
759
859
|
try {
|
|
760
860
|
await access(stepEventsDir);
|
|
761
861
|
} catch {
|
|
762
862
|
continue;
|
|
763
863
|
}
|
|
764
|
-
const
|
|
765
|
-
const stepIds = new Set;
|
|
766
|
-
for (const file of eventFiles) {
|
|
767
|
-
const parts = file.replace(".json", "").split("-");
|
|
768
|
-
if (parts.length >= 3) {
|
|
769
|
-
const stepId = parts.slice(1, -1).join("-");
|
|
770
|
-
stepIds.add(stepId);
|
|
771
|
-
}
|
|
772
|
-
}
|
|
864
|
+
const stepIds = await this.discoverStepIdsFromEvents(stepEventsDir);
|
|
773
865
|
for (const stepId of stepIds) {
|
|
774
866
|
const events = await this.loadEvents(workflowSlug, runId, { category: "step", stepId });
|
|
775
867
|
if (events.length === 0)
|
|
@@ -802,7 +894,7 @@ class FileSystemBackend extends Backend {
|
|
|
802
894
|
if (events.length === 0)
|
|
803
895
|
return false;
|
|
804
896
|
const state = projectStepState(events, workflowSlug);
|
|
805
|
-
const now =
|
|
897
|
+
const now = getMicrosecondTimestamp2();
|
|
806
898
|
return state.status === "scheduled" && state.availableAt !== undefined && state.availableAt <= now;
|
|
807
899
|
} catch {
|
|
808
900
|
return false;
|
|
@@ -813,17 +905,17 @@ class FileSystemBackend extends Backend {
|
|
|
813
905
|
if (initialEvents.length === 0) {
|
|
814
906
|
return null;
|
|
815
907
|
}
|
|
816
|
-
const now =
|
|
908
|
+
const now = getMicrosecondTimestamp2();
|
|
817
909
|
const initialState = projectStepState(initialEvents, workflowSlug);
|
|
818
910
|
if (initialState.status !== "scheduled" || initialState.availableAt === undefined || initialState.availableAt > now) {
|
|
819
911
|
return null;
|
|
820
912
|
}
|
|
821
913
|
const attemptNumber = initialState.attemptNumber;
|
|
822
|
-
const lockPath = this.getStepClaimLockPath(workflowSlug, runId, stepId, attemptNumber);
|
|
823
|
-
await
|
|
914
|
+
const lockPath = this.paths.getStepClaimLockPath(workflowSlug, runId, stepId, attemptNumber);
|
|
915
|
+
await mkdir2(dirname3(lockPath), { recursive: true });
|
|
824
916
|
let lockHandle = null;
|
|
825
917
|
try {
|
|
826
|
-
lockHandle = await
|
|
918
|
+
lockHandle = await openFile2(lockPath, "wx");
|
|
827
919
|
} catch (error) {
|
|
828
920
|
if (error.code === "EEXIST") {
|
|
829
921
|
return null;
|
|
@@ -836,14 +928,14 @@ class FileSystemBackend extends Backend {
|
|
|
836
928
|
return null;
|
|
837
929
|
}
|
|
838
930
|
const currentState = projectStepState(currentEvents, workflowSlug);
|
|
839
|
-
const claimable = currentState.status === "scheduled" && currentState.availableAt !== undefined && currentState.availableAt <=
|
|
931
|
+
const claimable = currentState.status === "scheduled" && currentState.availableAt !== undefined && currentState.availableAt <= getMicrosecondTimestamp2() && currentState.attemptNumber === attemptNumber;
|
|
840
932
|
if (!claimable) {
|
|
841
933
|
return null;
|
|
842
934
|
}
|
|
843
|
-
const timestamp =
|
|
935
|
+
const timestamp = getMicrosecondTimestamp2();
|
|
844
936
|
const event = {
|
|
845
937
|
category: "step",
|
|
846
|
-
eventId:
|
|
938
|
+
eventId: generateEventId(timestamp),
|
|
847
939
|
timestampUs: timestamp,
|
|
848
940
|
workflowSlug,
|
|
849
941
|
runId,
|
|
@@ -861,7 +953,7 @@ class FileSystemBackend extends Backend {
|
|
|
861
953
|
await lockHandle.close();
|
|
862
954
|
} catch {}
|
|
863
955
|
try {
|
|
864
|
-
await
|
|
956
|
+
await unlink2(lockPath);
|
|
865
957
|
} catch (error) {
|
|
866
958
|
if (error.code !== "ENOENT") {
|
|
867
959
|
throw error;
|
|
@@ -872,31 +964,23 @@ class FileSystemBackend extends Backend {
|
|
|
872
964
|
}
|
|
873
965
|
async reclaimStaleSteps(staleThreshold, reclaimedBy) {
|
|
874
966
|
const reclaimed = [];
|
|
875
|
-
const now =
|
|
967
|
+
const now = getMicrosecondTimestamp2();
|
|
876
968
|
try {
|
|
877
969
|
const workflows = await readdir(this.baseDir);
|
|
878
970
|
for (const workflowSlug of workflows) {
|
|
879
971
|
if (workflowSlug.startsWith("."))
|
|
880
972
|
continue;
|
|
881
|
-
const workflowDir =
|
|
973
|
+
const workflowDir = join2(this.baseDir, workflowSlug);
|
|
882
974
|
const runDirs = await readdir(workflowDir);
|
|
883
975
|
for (const runId of runDirs) {
|
|
884
976
|
try {
|
|
885
|
-
const stepEventsDir = this.getStepEventsDir(workflowSlug, runId);
|
|
977
|
+
const stepEventsDir = this.paths.getStepEventsDir(workflowSlug, runId);
|
|
886
978
|
try {
|
|
887
979
|
await access(stepEventsDir);
|
|
888
980
|
} catch {
|
|
889
981
|
continue;
|
|
890
982
|
}
|
|
891
|
-
const
|
|
892
|
-
const stepIds = new Set;
|
|
893
|
-
for (const file of eventFiles) {
|
|
894
|
-
const parts = file.replace(".json", "").split("-");
|
|
895
|
-
if (parts.length >= 3) {
|
|
896
|
-
const stepId = parts.slice(1, -1).join("-");
|
|
897
|
-
stepIds.add(stepId);
|
|
898
|
-
}
|
|
899
|
-
}
|
|
983
|
+
const stepIds = await this.discoverStepIdsFromEvents(stepEventsDir);
|
|
900
984
|
for (const stepId of stepIds) {
|
|
901
985
|
const events = await this.loadEvents(workflowSlug, runId, { category: "step", stepId });
|
|
902
986
|
if (events.length === 0)
|
|
@@ -918,7 +1002,7 @@ class FileSystemBackend extends Backend {
|
|
|
918
1002
|
await this.saveStepScheduled(workflowSlug, runId, stepId, {
|
|
919
1003
|
availableAt: now,
|
|
920
1004
|
reason: "retry",
|
|
921
|
-
attemptNumber: state.attemptNumber,
|
|
1005
|
+
attemptNumber: state.attemptNumber + 1,
|
|
922
1006
|
retryDelayMs: 0
|
|
923
1007
|
});
|
|
924
1008
|
reclaimed.push({ workflowSlug, runId, stepId });
|
|
@@ -934,33 +1018,27 @@ class FileSystemBackend extends Backend {
|
|
|
934
1018
|
return reclaimed;
|
|
935
1019
|
}
|
|
936
1020
|
}
|
|
937
|
-
getRegistryDir() {
|
|
938
|
-
return join(this.baseDir, ".registry");
|
|
939
|
-
}
|
|
940
|
-
getWorkflowRegistryDir(slug) {
|
|
941
|
-
return join(this.getRegistryDir(), slug);
|
|
942
|
-
}
|
|
943
1021
|
async registerWorkflow(registration) {
|
|
944
|
-
const registryDir = this.getWorkflowRegistryDir(registration.slug);
|
|
1022
|
+
const registryDir = this.paths.getWorkflowRegistryDir(registration.slug);
|
|
945
1023
|
try {
|
|
946
|
-
await
|
|
947
|
-
const metadataPath =
|
|
1024
|
+
await mkdir2(registryDir, { recursive: true });
|
|
1025
|
+
const metadataPath = join2(registryDir, "metadata.json");
|
|
948
1026
|
const metadata = {
|
|
949
1027
|
slug: registration.slug,
|
|
950
1028
|
name: registration.name,
|
|
951
1029
|
location: registration.location,
|
|
952
1030
|
inputSchemaJSON: registration.inputSchemaJSON
|
|
953
1031
|
};
|
|
954
|
-
await
|
|
955
|
-
const stepsPath =
|
|
956
|
-
await
|
|
1032
|
+
await writeFile2(metadataPath, JSON.stringify(metadata, null, 2), "utf-8");
|
|
1033
|
+
const stepsPath = join2(registryDir, "steps.json");
|
|
1034
|
+
await writeFile2(stepsPath, JSON.stringify(registration.steps, null, 2), "utf-8");
|
|
957
1035
|
} catch (error) {
|
|
958
1036
|
console.error(`Failed to register workflow ${registration.slug}:`, error);
|
|
959
1037
|
throw new Error(`Failed to register workflow: ${error instanceof Error ? error.message : "Unknown error"}`);
|
|
960
1038
|
}
|
|
961
1039
|
}
|
|
962
1040
|
async getWorkflowMetadata(slug) {
|
|
963
|
-
const metadataPath =
|
|
1041
|
+
const metadataPath = join2(this.paths.getWorkflowRegistryDir(slug), "metadata.json");
|
|
964
1042
|
try {
|
|
965
1043
|
const content = await readFile(metadataPath, "utf-8");
|
|
966
1044
|
return JSON.parse(content);
|
|
@@ -973,7 +1051,7 @@ class FileSystemBackend extends Backend {
|
|
|
973
1051
|
}
|
|
974
1052
|
}
|
|
975
1053
|
async listWorkflowMetadata() {
|
|
976
|
-
const registryDir = this.getRegistryDir();
|
|
1054
|
+
const registryDir = this.paths.getRegistryDir();
|
|
977
1055
|
try {
|
|
978
1056
|
try {
|
|
979
1057
|
await access(registryDir);
|
|
@@ -997,7 +1075,7 @@ class FileSystemBackend extends Backend {
|
|
|
997
1075
|
}
|
|
998
1076
|
}
|
|
999
1077
|
async getWorkflowSteps(slug) {
|
|
1000
|
-
const stepsPath =
|
|
1078
|
+
const stepsPath = join2(this.paths.getWorkflowRegistryDir(slug), "steps.json");
|
|
1001
1079
|
try {
|
|
1002
1080
|
const content = await readFile(stepsPath, "utf-8");
|
|
1003
1081
|
return JSON.parse(content);
|
|
@@ -1010,7 +1088,7 @@ class FileSystemBackend extends Backend {
|
|
|
1010
1088
|
}
|
|
1011
1089
|
}
|
|
1012
1090
|
async listRunIds(workflowSlug) {
|
|
1013
|
-
const workflowDir =
|
|
1091
|
+
const workflowDir = join2(this.baseDir, workflowSlug);
|
|
1014
1092
|
try {
|
|
1015
1093
|
try {
|
|
1016
1094
|
await access(workflowDir);
|
|
@@ -1030,9 +1108,92 @@ class FileSystemBackend extends Backend {
|
|
|
1030
1108
|
throw new Error(`Failed to list run IDs: ${error instanceof Error ? error.message : "Unknown error"}`);
|
|
1031
1109
|
}
|
|
1032
1110
|
}
|
|
1111
|
+
async createWorkflowVersion(version) {
|
|
1112
|
+
const versionDir = join2(this.paths.getWorkflowRegistryDir(version.workflowSlug), "versions", version.versionId);
|
|
1113
|
+
try {
|
|
1114
|
+
await mkdir2(versionDir, { recursive: true });
|
|
1115
|
+
const versionPath = join2(versionDir, "version.json");
|
|
1116
|
+
try {
|
|
1117
|
+
await access(versionPath);
|
|
1118
|
+
return;
|
|
1119
|
+
} catch {}
|
|
1120
|
+
await writeFile2(versionPath, JSON.stringify(version, null, 2), "utf-8");
|
|
1121
|
+
} catch (error) {
|
|
1122
|
+
console.error(`Failed to create workflow version ${version.versionId}:`, error);
|
|
1123
|
+
throw new Error(`Failed to create workflow version: ${error instanceof Error ? error.message : "Unknown error"}`);
|
|
1124
|
+
}
|
|
1125
|
+
}
|
|
1126
|
+
async getWorkflowVersion(workflowSlug, versionId) {
|
|
1127
|
+
const versionPath = join2(this.paths.getWorkflowRegistryDir(workflowSlug), "versions", versionId, "version.json");
|
|
1128
|
+
try {
|
|
1129
|
+
const content = await readFile(versionPath, "utf-8");
|
|
1130
|
+
const parsed = JSON.parse(content);
|
|
1131
|
+
return WorkflowVersionSchema.parse(parsed);
|
|
1132
|
+
} catch (error) {
|
|
1133
|
+
if (error && typeof error === "object" && "code" in error && error.code === "ENOENT") {
|
|
1134
|
+
return null;
|
|
1135
|
+
}
|
|
1136
|
+
console.error(`Failed to get workflow version ${versionId}:`, error);
|
|
1137
|
+
throw new Error(`Failed to load workflow version: ${error instanceof Error ? error.message : "Unknown error"}`);
|
|
1138
|
+
}
|
|
1139
|
+
}
|
|
1140
|
+
async getCurrentWorkflowVersion(workflowSlug) {
|
|
1141
|
+
const versionsDir = join2(this.paths.getWorkflowRegistryDir(workflowSlug), "versions");
|
|
1142
|
+
try {
|
|
1143
|
+
try {
|
|
1144
|
+
await access(versionsDir);
|
|
1145
|
+
} catch {
|
|
1146
|
+
return null;
|
|
1147
|
+
}
|
|
1148
|
+
const entries = await readdir(versionsDir, { withFileTypes: true });
|
|
1149
|
+
const versions = [];
|
|
1150
|
+
for (const entry of entries) {
|
|
1151
|
+
if (entry.isDirectory()) {
|
|
1152
|
+
const version = await this.getWorkflowVersion(workflowSlug, entry.name);
|
|
1153
|
+
if (version) {
|
|
1154
|
+
versions.push(version);
|
|
1155
|
+
}
|
|
1156
|
+
}
|
|
1157
|
+
}
|
|
1158
|
+
if (versions.length === 0) {
|
|
1159
|
+
return null;
|
|
1160
|
+
}
|
|
1161
|
+
versions.sort((a, b) => b.createdAt - a.createdAt);
|
|
1162
|
+
return versions[0] ?? null;
|
|
1163
|
+
} catch (error) {
|
|
1164
|
+
console.error(`Failed to get current workflow version for ${workflowSlug}:`, error);
|
|
1165
|
+
throw new Error(`Failed to get current workflow version: ${error instanceof Error ? error.message : "Unknown error"}`);
|
|
1166
|
+
}
|
|
1167
|
+
}
|
|
1168
|
+
async listWorkflowVersions(workflowSlug, options) {
|
|
1169
|
+
const versionsDir = join2(this.paths.getWorkflowRegistryDir(workflowSlug), "versions");
|
|
1170
|
+
try {
|
|
1171
|
+
try {
|
|
1172
|
+
await access(versionsDir);
|
|
1173
|
+
} catch {
|
|
1174
|
+
return [];
|
|
1175
|
+
}
|
|
1176
|
+
const entries = await readdir(versionsDir, { withFileTypes: true });
|
|
1177
|
+
const versions = [];
|
|
1178
|
+
for (const entry of entries) {
|
|
1179
|
+
if (entry.isDirectory()) {
|
|
1180
|
+
const version = await this.getWorkflowVersion(workflowSlug, entry.name);
|
|
1181
|
+
if (version) {
|
|
1182
|
+
versions.push(version);
|
|
1183
|
+
}
|
|
1184
|
+
}
|
|
1185
|
+
}
|
|
1186
|
+
versions.sort((a, b) => b.createdAt - a.createdAt);
|
|
1187
|
+
const limit = options?.limit ?? versions.length;
|
|
1188
|
+
return versions.slice(0, limit);
|
|
1189
|
+
} catch (error) {
|
|
1190
|
+
console.error(`Failed to list workflow versions for ${workflowSlug}:`, error);
|
|
1191
|
+
throw new Error(`Failed to list workflow versions: ${error instanceof Error ? error.message : "Unknown error"}`);
|
|
1192
|
+
}
|
|
1193
|
+
}
|
|
1033
1194
|
async close() {}
|
|
1034
1195
|
async loadEventsForAnalytics(options) {
|
|
1035
|
-
const now =
|
|
1196
|
+
const now = getMicrosecondTimestamp2();
|
|
1036
1197
|
const startUs = options?.startUs ?? now - 24 * 60 * 60 * 1000 * 1000;
|
|
1037
1198
|
const endUs = options?.endUs ?? now;
|
|
1038
1199
|
const allStepEvents = [];
|
|
@@ -1211,7 +1372,7 @@ class FileSystemBackend extends Backend {
|
|
|
1211
1372
|
}
|
|
1212
1373
|
async getThroughput(options) {
|
|
1213
1374
|
const { stepEvents, workflowEvents } = await this.loadEventsForAnalytics(options);
|
|
1214
|
-
const now =
|
|
1375
|
+
const now = getMicrosecondTimestamp2();
|
|
1215
1376
|
const startUs = options?.startUs ?? now - 24 * 60 * 60 * 1000 * 1000;
|
|
1216
1377
|
const endUs = options?.endUs ?? now;
|
|
1217
1378
|
const timeRangeUs = endUs - startUs;
|
|
@@ -1329,7 +1490,7 @@ class FileSystemBackend extends Backend {
|
|
|
1329
1490
|
return computeSuccessRate(stepEvents, workflowEvents, options?.workflowSlug, options?.stepId);
|
|
1330
1491
|
}
|
|
1331
1492
|
async getAnalyticsSummary(options) {
|
|
1332
|
-
const now =
|
|
1493
|
+
const now = getMicrosecondTimestamp2();
|
|
1333
1494
|
const startUs = options?.startUs ?? now - 24 * 60 * 60 * 1000 * 1000;
|
|
1334
1495
|
const endUs = options?.endUs ?? now;
|
|
1335
1496
|
const [
|
|
@@ -1378,4 +1539,4 @@ export {
|
|
|
1378
1539
|
FileSystemBackend
|
|
1379
1540
|
};
|
|
1380
1541
|
|
|
1381
|
-
//# debugId=
|
|
1542
|
+
//# debugId=7E7E3DE80AE950DB64756E2164756E21
|