@bratsos/workflow-engine 0.0.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +986 -0
- package/package.json +112 -0
- package/skills/workflow-engine/SKILL.md +539 -0
- package/skills/workflow-engine/references/01-stage-definitions.md +535 -0
- package/skills/workflow-engine/references/02-workflow-builder.md +421 -0
- package/skills/workflow-engine/references/03-runtime-setup.md +497 -0
- package/skills/workflow-engine/references/04-ai-integration.md +587 -0
- package/skills/workflow-engine/references/05-persistence-setup.md +614 -0
- package/skills/workflow-engine/references/06-async-batch-stages.md +514 -0
- package/skills/workflow-engine/references/07-testing-patterns.md +546 -0
- package/skills/workflow-engine/references/08-common-patterns.md +503 -0
|
@@ -0,0 +1,614 @@
|
|
|
1
|
+
# Persistence Setup
|
|
2
|
+
|
|
3
|
+
Complete guide for setting up workflow persistence with Prisma.
|
|
4
|
+
|
|
5
|
+
## ⚠️ Pre-Setup Checklist
|
|
6
|
+
|
|
7
|
+
Before creating persistence instances, verify your Prisma schema:
|
|
8
|
+
|
|
9
|
+
- [ ] `WorkflowRun` model exists with `duration` field (not `durationMs`)
|
|
10
|
+
- [ ] `WorkflowStage` model exists with `duration` field (not `durationMs`)
|
|
11
|
+
- [ ] `WorkflowLog` model exists (required for `ctx.log()`)
|
|
12
|
+
- [ ] `WorkflowArtifact` model exists (required for stage outputs)
|
|
13
|
+
- [ ] `JobQueue` model exists (required for job processing)
|
|
14
|
+
- [ ] `Status` enum exists with all values: PENDING, RUNNING, SUSPENDED, COMPLETED, FAILED, CANCELLED, SKIPPED
|
|
15
|
+
- [ ] `LogLevel` enum exists: DEBUG, INFO, WARN, ERROR
|
|
16
|
+
- [ ] `ArtifactType` enum exists: STAGE_OUTPUT, ARTIFACT, METADATA
|
|
17
|
+
- [ ] Table names use `@@map` (e.g., `@@map("workflow_runs")`)
|
|
18
|
+
- [ ] Run `prisma db push` or `prisma migrate dev` after schema changes
|
|
19
|
+
|
|
20
|
+
**Common Errors:**
|
|
21
|
+
| Error | Cause | Fix |
|
|
22
|
+
|-------|-------|-----|
|
|
23
|
+
| `Cannot read properties of undefined (reading 'create')` | Missing `WorkflowLog` model | Add `WorkflowLog` to schema |
|
|
24
|
+
| `Cannot read properties of undefined (reading 'upsert')` | Missing `WorkflowArtifact` model | Add `WorkflowArtifact` to schema |
|
|
25
|
+
| `Unknown argument 'duration'. Did you mean 'durationMs'?` | Wrong field name | Rename `durationMs` to `duration` |
|
|
26
|
+
| `near "FOR": syntax error` | Using SQLite without `databaseType: "sqlite"` | Pass `{ databaseType: "sqlite" }` to factory functions |
|
|
27
|
+
|
|
28
|
+
## Interfaces Overview
|
|
29
|
+
|
|
30
|
+
The workflow engine uses three persistence interfaces:
|
|
31
|
+
|
|
32
|
+
| Interface | Purpose |
|
|
33
|
+
|-----------|---------|
|
|
34
|
+
| `WorkflowPersistence` | Workflow runs, stages, logs, artifacts |
|
|
35
|
+
| `JobQueue` | Job scheduling and processing |
|
|
36
|
+
| `AICallLogger` | AI call tracking and stats |
|
|
37
|
+
|
|
38
|
+
## WorkflowPersistence Interface
|
|
39
|
+
|
|
40
|
+
```typescript
|
|
41
|
+
interface WorkflowPersistence {
|
|
42
|
+
// WorkflowRun operations
|
|
43
|
+
createRun(data: CreateRunInput): Promise<WorkflowRunRecord>;
|
|
44
|
+
updateRun(id: string, data: UpdateRunInput): Promise<void>;
|
|
45
|
+
getRun(id: string): Promise<WorkflowRunRecord | null>;
|
|
46
|
+
getRunStatus(id: string): Promise<WorkflowStatus | null>;
|
|
47
|
+
getRunsByStatus(status: WorkflowStatus): Promise<WorkflowRunRecord[]>;
|
|
48
|
+
|
|
49
|
+
// WorkflowStage operations
|
|
50
|
+
createStage(data: CreateStageInput): Promise<WorkflowStageRecord>;
|
|
51
|
+
upsertStage(data: UpsertStageInput): Promise<WorkflowStageRecord>;
|
|
52
|
+
updateStage(id: string, data: UpdateStageInput): Promise<void>;
|
|
53
|
+
updateStageByRunAndStageId(runId, stageId, data): Promise<void>;
|
|
54
|
+
getStage(runId, stageId): Promise<WorkflowStageRecord | null>;
|
|
55
|
+
getStageById(id): Promise<WorkflowStageRecord | null>;
|
|
56
|
+
getStagesByRun(runId, options?): Promise<WorkflowStageRecord[]>;
|
|
57
|
+
getSuspendedStages(beforeDate): Promise<WorkflowStageRecord[]>;
|
|
58
|
+
getFirstSuspendedStageReadyToResume(runId): Promise<WorkflowStageRecord | null>;
|
|
59
|
+
getFirstFailedStage(runId): Promise<WorkflowStageRecord | null>;
|
|
60
|
+
getLastCompletedStage(runId): Promise<WorkflowStageRecord | null>;
|
|
61
|
+
getLastCompletedStageBefore(runId, executionGroup): Promise<WorkflowStageRecord | null>;
|
|
62
|
+
deleteStage(id): Promise<void>;
|
|
63
|
+
|
|
64
|
+
// WorkflowLog operations
|
|
65
|
+
createLog(data: CreateLogInput): Promise<void>;
|
|
66
|
+
|
|
67
|
+
// WorkflowArtifact operations
|
|
68
|
+
saveArtifact(data: SaveArtifactInput): Promise<void>;
|
|
69
|
+
loadArtifact(runId, key): Promise<unknown>;
|
|
70
|
+
hasArtifact(runId, key): Promise<boolean>;
|
|
71
|
+
deleteArtifact(runId, key): Promise<void>;
|
|
72
|
+
listArtifacts(runId): Promise<WorkflowArtifactRecord[]>;
|
|
73
|
+
getStageIdForArtifact(runId, stageId): Promise<string | null>;
|
|
74
|
+
|
|
75
|
+
// Stage output convenience
|
|
76
|
+
saveStageOutput(runId, workflowType, stageId, output): Promise<string>;
|
|
77
|
+
}
|
|
78
|
+
```
|
|
79
|
+
|
|
80
|
+
## JobQueue Interface
|
|
81
|
+
|
|
82
|
+
```typescript
|
|
83
|
+
interface JobQueue {
|
|
84
|
+
enqueue(options: EnqueueJobInput): Promise<string>;
|
|
85
|
+
enqueueParallel(jobs: EnqueueJobInput[]): Promise<string[]>;
|
|
86
|
+
dequeue(): Promise<DequeueResult | null>;
|
|
87
|
+
complete(jobId: string): Promise<void>;
|
|
88
|
+
suspend(jobId: string, nextPollAt: Date): Promise<void>;
|
|
89
|
+
fail(jobId: string, error: string, shouldRetry?: boolean): Promise<void>;
|
|
90
|
+
getSuspendedJobsReadyToPoll(): Promise<Array<{ jobId, stageId, workflowRunId }>>;
|
|
91
|
+
releaseStaleJobs(staleThresholdMs?: number): Promise<number>;
|
|
92
|
+
}
|
|
93
|
+
```
|
|
94
|
+
|
|
95
|
+
## AICallLogger Interface
|
|
96
|
+
|
|
97
|
+
```typescript
|
|
98
|
+
interface AICallLogger {
|
|
99
|
+
logCall(call: CreateAICallInput): void;
|
|
100
|
+
logBatchResults(batchId: string, results: CreateAICallInput[]): Promise<void>;
|
|
101
|
+
getStats(topicPrefix: string): Promise<AIHelperStats>;
|
|
102
|
+
isRecorded(batchId: string): Promise<boolean>;
|
|
103
|
+
}
|
|
104
|
+
```
|
|
105
|
+
|
|
106
|
+
## Prisma Schema
|
|
107
|
+
|
|
108
|
+
### Required Enums
|
|
109
|
+
|
|
110
|
+
```prisma
|
|
111
|
+
// Unified status enum for workflows, stages, and jobs
|
|
112
|
+
enum Status {
|
|
113
|
+
PENDING
|
|
114
|
+
RUNNING
|
|
115
|
+
SUSPENDED
|
|
116
|
+
COMPLETED
|
|
117
|
+
FAILED
|
|
118
|
+
CANCELLED
|
|
119
|
+
SKIPPED
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
enum LogLevel {
|
|
123
|
+
DEBUG
|
|
124
|
+
INFO
|
|
125
|
+
WARN
|
|
126
|
+
ERROR
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
enum ArtifactType {
|
|
130
|
+
STAGE_OUTPUT
|
|
131
|
+
ARTIFACT
|
|
132
|
+
METADATA
|
|
133
|
+
}
|
|
134
|
+
```
|
|
135
|
+
|
|
136
|
+
### WorkflowRun Model
|
|
137
|
+
|
|
138
|
+
```prisma
|
|
139
|
+
model WorkflowRun {
|
|
140
|
+
id String @id @default(cuid())
|
|
141
|
+
createdAt DateTime @default(now())
|
|
142
|
+
updatedAt DateTime @updatedAt
|
|
143
|
+
workflowId String
|
|
144
|
+
workflowName String
|
|
145
|
+
workflowType String
|
|
146
|
+
status Status @default(PENDING)
|
|
147
|
+
startedAt DateTime?
|
|
148
|
+
completedAt DateTime?
|
|
149
|
+
duration Int?
|
|
150
|
+
input Json
|
|
151
|
+
output Json?
|
|
152
|
+
config Json @default("{}")
|
|
153
|
+
totalCost Float @default(0)
|
|
154
|
+
totalTokens Int @default(0)
|
|
155
|
+
priority Int @default(5)
|
|
156
|
+
|
|
157
|
+
// Relations
|
|
158
|
+
stages WorkflowStage[]
|
|
159
|
+
logs WorkflowLog[]
|
|
160
|
+
artifacts WorkflowArtifact[]
|
|
161
|
+
|
|
162
|
+
// Indexes for common queries
|
|
163
|
+
@@index([status])
|
|
164
|
+
@@index([workflowType])
|
|
165
|
+
@@index([createdAt])
|
|
166
|
+
|
|
167
|
+
@@map("workflow_runs")
|
|
168
|
+
}
|
|
169
|
+
```
|
|
170
|
+
|
|
171
|
+
### WorkflowStage Model
|
|
172
|
+
|
|
173
|
+
```prisma
|
|
174
|
+
model WorkflowStage {
|
|
175
|
+
id String @id @default(cuid())
|
|
176
|
+
createdAt DateTime @default(now())
|
|
177
|
+
updatedAt DateTime @updatedAt
|
|
178
|
+
workflowRunId String
|
|
179
|
+
stageId String
|
|
180
|
+
stageName String
|
|
181
|
+
stageNumber Int
|
|
182
|
+
executionGroup Int
|
|
183
|
+
status Status @default(PENDING)
|
|
184
|
+
startedAt DateTime?
|
|
185
|
+
completedAt DateTime?
|
|
186
|
+
duration Int?
|
|
187
|
+
inputData Json?
|
|
188
|
+
outputData Json?
|
|
189
|
+
config Json?
|
|
190
|
+
suspendedState Json?
|
|
191
|
+
resumeData Json?
|
|
192
|
+
nextPollAt DateTime?
|
|
193
|
+
pollInterval Int?
|
|
194
|
+
maxWaitUntil DateTime?
|
|
195
|
+
metrics Json?
|
|
196
|
+
embeddingInfo Json?
|
|
197
|
+
errorMessage String?
|
|
198
|
+
|
|
199
|
+
// Relations
|
|
200
|
+
workflowRun WorkflowRun @relation(fields: [workflowRunId], references: [id], onDelete: Cascade)
|
|
201
|
+
logs WorkflowLog[]
|
|
202
|
+
artifacts WorkflowArtifact[]
|
|
203
|
+
|
|
204
|
+
// Unique constraint for stage lookup
|
|
205
|
+
@@unique([workflowRunId, stageId])
|
|
206
|
+
@@index([status])
|
|
207
|
+
@@index([nextPollAt])
|
|
208
|
+
|
|
209
|
+
@@map("workflow_stages")
|
|
210
|
+
}
|
|
211
|
+
```
|
|
212
|
+
|
|
213
|
+
### WorkflowLog Model
|
|
214
|
+
|
|
215
|
+
```prisma
|
|
216
|
+
model WorkflowLog {
|
|
217
|
+
id String @id @default(cuid())
|
|
218
|
+
createdAt DateTime @default(now())
|
|
219
|
+
workflowRunId String?
|
|
220
|
+
workflowStageId String?
|
|
221
|
+
level LogLevel
|
|
222
|
+
message String
|
|
223
|
+
metadata Json?
|
|
224
|
+
|
|
225
|
+
// Relations
|
|
226
|
+
workflowRun WorkflowRun? @relation(fields: [workflowRunId], references: [id], onDelete: Cascade)
|
|
227
|
+
workflowStage WorkflowStage? @relation(fields: [workflowStageId], references: [id], onDelete: Cascade)
|
|
228
|
+
|
|
229
|
+
@@index([workflowRunId])
|
|
230
|
+
@@index([createdAt])
|
|
231
|
+
|
|
232
|
+
@@map("workflow_logs")
|
|
233
|
+
}
|
|
234
|
+
```
|
|
235
|
+
|
|
236
|
+
### WorkflowArtifact Model
|
|
237
|
+
|
|
238
|
+
```prisma
|
|
239
|
+
model WorkflowArtifact {
|
|
240
|
+
id String @id @default(cuid())
|
|
241
|
+
createdAt DateTime @default(now())
|
|
242
|
+
updatedAt DateTime @updatedAt
|
|
243
|
+
workflowRunId String
|
|
244
|
+
workflowStageId String?
|
|
245
|
+
key String
|
|
246
|
+
type ArtifactType
|
|
247
|
+
data Json
|
|
248
|
+
size Int
|
|
249
|
+
metadata Json?
|
|
250
|
+
|
|
251
|
+
// Relations
|
|
252
|
+
workflowRun WorkflowRun @relation(fields: [workflowRunId], references: [id], onDelete: Cascade)
|
|
253
|
+
workflowStage WorkflowStage? @relation(fields: [workflowStageId], references: [id], onDelete: Cascade)
|
|
254
|
+
|
|
255
|
+
// Unique constraint for artifact lookup
|
|
256
|
+
@@unique([workflowRunId, key])
|
|
257
|
+
@@index([type])
|
|
258
|
+
|
|
259
|
+
@@map("workflow_artifacts")
|
|
260
|
+
}
|
|
261
|
+
```
|
|
262
|
+
|
|
263
|
+
### JobQueue Model
|
|
264
|
+
|
|
265
|
+
```prisma
|
|
266
|
+
model JobQueue {
|
|
267
|
+
id String @id @default(cuid())
|
|
268
|
+
createdAt DateTime @default(now())
|
|
269
|
+
updatedAt DateTime @updatedAt
|
|
270
|
+
workflowRunId String
|
|
271
|
+
stageId String
|
|
272
|
+
status Status @default(PENDING)
|
|
273
|
+
priority Int @default(5)
|
|
274
|
+
workerId String?
|
|
275
|
+
lockedAt DateTime?
|
|
276
|
+
startedAt DateTime?
|
|
277
|
+
completedAt DateTime?
|
|
278
|
+
attempt Int @default(0)
|
|
279
|
+
maxAttempts Int @default(3)
|
|
280
|
+
lastError String?
|
|
281
|
+
nextPollAt DateTime?
|
|
282
|
+
payload Json @default("{}")
|
|
283
|
+
|
|
284
|
+
@@index([status, nextPollAt])
|
|
285
|
+
@@index([workflowRunId])
|
|
286
|
+
|
|
287
|
+
@@map("job_queue")
|
|
288
|
+
}
|
|
289
|
+
```
|
|
290
|
+
|
|
291
|
+
### AICall Model
|
|
292
|
+
|
|
293
|
+
```prisma
|
|
294
|
+
model AICall {
|
|
295
|
+
id String @id @default(cuid())
|
|
296
|
+
createdAt DateTime @default(now())
|
|
297
|
+
topic String
|
|
298
|
+
callType String
|
|
299
|
+
modelKey String
|
|
300
|
+
modelId String
|
|
301
|
+
prompt String @db.Text
|
|
302
|
+
response String @db.Text
|
|
303
|
+
inputTokens Int
|
|
304
|
+
outputTokens Int
|
|
305
|
+
cost Float
|
|
306
|
+
metadata Json?
|
|
307
|
+
|
|
308
|
+
@@index([topic])
|
|
309
|
+
@@index([createdAt])
|
|
310
|
+
@@index([modelKey])
|
|
311
|
+
|
|
312
|
+
@@map("ai_calls")
|
|
313
|
+
}
|
|
314
|
+
```
|
|
315
|
+
|
|
316
|
+
## Creating Persistence Implementations
|
|
317
|
+
|
|
318
|
+
```typescript
|
|
319
|
+
import {
|
|
320
|
+
createPrismaWorkflowPersistence,
|
|
321
|
+
createPrismaJobQueue,
|
|
322
|
+
createPrismaAICallLogger,
|
|
323
|
+
} from "@bratsos/workflow-engine/persistence/prisma";
|
|
324
|
+
import { PrismaClient } from "@prisma/client";
|
|
325
|
+
|
|
326
|
+
const prisma = new PrismaClient();
|
|
327
|
+
|
|
328
|
+
// PostgreSQL (default)
|
|
329
|
+
const persistence = createPrismaWorkflowPersistence(prisma);
|
|
330
|
+
const jobQueue = createPrismaJobQueue(prisma, { workerId: "my-worker-id" });
|
|
331
|
+
const aiCallLogger = createPrismaAICallLogger(prisma);
|
|
332
|
+
|
|
333
|
+
// SQLite - uses optimistic locking instead of FOR UPDATE SKIP LOCKED
|
|
334
|
+
const persistence = createPrismaWorkflowPersistence(prisma, {
|
|
335
|
+
databaseType: "sqlite"
|
|
336
|
+
});
|
|
337
|
+
const jobQueue = createPrismaJobQueue(prisma, {
|
|
338
|
+
databaseType: "sqlite",
|
|
339
|
+
workerId: "my-worker-id" // optional
|
|
340
|
+
});
|
|
341
|
+
```
|
|
342
|
+
|
|
343
|
+
## Database Type Options
|
|
344
|
+
|
|
345
|
+
The Prisma implementations support both PostgreSQL and SQLite:
|
|
346
|
+
|
|
347
|
+
| Database | Locking Strategy | Use Case |
|
|
348
|
+
|----------|-----------------|----------|
|
|
349
|
+
| `postgresql` (default) | `FOR UPDATE SKIP LOCKED` | Production, multi-worker |
|
|
350
|
+
| `sqlite` | Optimistic locking with retry | Development, single-worker |
|
|
351
|
+
|
|
352
|
+
```typescript
|
|
353
|
+
type DatabaseType = "postgresql" | "sqlite";
|
|
354
|
+
|
|
355
|
+
interface PrismaWorkflowPersistenceOptions {
|
|
356
|
+
databaseType?: DatabaseType; // Default: "postgresql"
|
|
357
|
+
}
|
|
358
|
+
|
|
359
|
+
interface PrismaJobQueueOptions {
|
|
360
|
+
workerId?: string; // Default: auto-generated
|
|
361
|
+
databaseType?: DatabaseType; // Default: "postgresql"
|
|
362
|
+
}
|
|
363
|
+
```
|
|
364
|
+
|
|
365
|
+
**Important:** When using SQLite, pass `{ databaseType: "sqlite" }` to both `createPrismaWorkflowPersistence` and `createPrismaJobQueue`. Otherwise, you'll get SQL syntax errors from PostgreSQL-specific queries.
|
|
366
|
+
|
|
367
|
+
## Prisma Version Compatibility
|
|
368
|
+
|
|
369
|
+
The library supports both Prisma 6.x and 7.x:
|
|
370
|
+
|
|
371
|
+
**Prisma 6.x**: Status values are strings
|
|
372
|
+
```typescript
|
|
373
|
+
status: "PENDING"
|
|
374
|
+
```
|
|
375
|
+
|
|
376
|
+
**Prisma 7.x**: Status values are typed enums
|
|
377
|
+
```typescript
|
|
378
|
+
status: prisma.$Enums.Status.PENDING
|
|
379
|
+
```
|
|
380
|
+
|
|
381
|
+
The library automatically detects your Prisma version and handles this difference internally via the enum compatibility layer.
|
|
382
|
+
|
|
383
|
+
## Status Type
|
|
384
|
+
|
|
385
|
+
```typescript
|
|
386
|
+
// Unified status for all entities
|
|
387
|
+
type Status =
|
|
388
|
+
| "PENDING" // Not started
|
|
389
|
+
| "RUNNING" // Executing
|
|
390
|
+
| "SUSPENDED" // Paused (waiting for external event)
|
|
391
|
+
| "COMPLETED" // Finished successfully
|
|
392
|
+
| "FAILED" // Finished with error
|
|
393
|
+
| "CANCELLED" // Manually stopped
|
|
394
|
+
| "SKIPPED"; // Bypassed (stage-specific)
|
|
395
|
+
|
|
396
|
+
// Deprecated aliases (use Status instead)
|
|
397
|
+
type WorkflowStatus = Status;
|
|
398
|
+
type WorkflowStageStatus = Status;
|
|
399
|
+
type JobStatus = Status;
|
|
400
|
+
```
|
|
401
|
+
|
|
402
|
+
## Record Types
|
|
403
|
+
|
|
404
|
+
```typescript
|
|
405
|
+
interface WorkflowRunRecord {
|
|
406
|
+
id: string;
|
|
407
|
+
createdAt: Date;
|
|
408
|
+
updatedAt: Date;
|
|
409
|
+
workflowId: string;
|
|
410
|
+
workflowName: string;
|
|
411
|
+
workflowType: string;
|
|
412
|
+
status: WorkflowStatus;
|
|
413
|
+
startedAt: Date | null;
|
|
414
|
+
completedAt: Date | null;
|
|
415
|
+
duration: number | null;
|
|
416
|
+
input: unknown;
|
|
417
|
+
output: unknown | null;
|
|
418
|
+
config: unknown;
|
|
419
|
+
totalCost: number;
|
|
420
|
+
totalTokens: number;
|
|
421
|
+
priority: number;
|
|
422
|
+
}
|
|
423
|
+
|
|
424
|
+
interface WorkflowStageRecord {
|
|
425
|
+
id: string;
|
|
426
|
+
createdAt: Date;
|
|
427
|
+
updatedAt: Date;
|
|
428
|
+
workflowRunId: string;
|
|
429
|
+
stageId: string;
|
|
430
|
+
stageName: string;
|
|
431
|
+
stageNumber: number;
|
|
432
|
+
executionGroup: number;
|
|
433
|
+
status: WorkflowStageStatus;
|
|
434
|
+
startedAt: Date | null;
|
|
435
|
+
completedAt: Date | null;
|
|
436
|
+
duration: number | null;
|
|
437
|
+
inputData: unknown | null;
|
|
438
|
+
outputData: unknown | null;
|
|
439
|
+
config: unknown | null;
|
|
440
|
+
suspendedState: unknown | null;
|
|
441
|
+
resumeData: unknown | null;
|
|
442
|
+
nextPollAt: Date | null;
|
|
443
|
+
pollInterval: number | null;
|
|
444
|
+
maxWaitUntil: Date | null;
|
|
445
|
+
metrics: unknown | null;
|
|
446
|
+
embeddingInfo: unknown | null;
|
|
447
|
+
errorMessage: string | null;
|
|
448
|
+
}
|
|
449
|
+
```
|
|
450
|
+
|
|
451
|
+
## Input Types
|
|
452
|
+
|
|
453
|
+
```typescript
|
|
454
|
+
interface CreateRunInput {
|
|
455
|
+
id?: string;
|
|
456
|
+
workflowId: string;
|
|
457
|
+
workflowName: string;
|
|
458
|
+
workflowType: string;
|
|
459
|
+
input: unknown;
|
|
460
|
+
config?: unknown;
|
|
461
|
+
priority?: number;
|
|
462
|
+
metadata?: Record<string, unknown>; // Domain-specific fields
|
|
463
|
+
}
|
|
464
|
+
|
|
465
|
+
interface UpdateRunInput {
|
|
466
|
+
status?: WorkflowStatus;
|
|
467
|
+
startedAt?: Date;
|
|
468
|
+
completedAt?: Date;
|
|
469
|
+
duration?: number;
|
|
470
|
+
output?: unknown;
|
|
471
|
+
totalCost?: number;
|
|
472
|
+
totalTokens?: number;
|
|
473
|
+
}
|
|
474
|
+
|
|
475
|
+
interface CreateStageInput {
|
|
476
|
+
workflowRunId: string;
|
|
477
|
+
stageId: string;
|
|
478
|
+
stageName: string;
|
|
479
|
+
stageNumber: number;
|
|
480
|
+
executionGroup: number;
|
|
481
|
+
status?: WorkflowStageStatus;
|
|
482
|
+
startedAt?: Date;
|
|
483
|
+
config?: unknown;
|
|
484
|
+
inputData?: unknown;
|
|
485
|
+
}
|
|
486
|
+
|
|
487
|
+
interface UpdateStageInput {
|
|
488
|
+
status?: WorkflowStageStatus;
|
|
489
|
+
startedAt?: Date;
|
|
490
|
+
completedAt?: Date;
|
|
491
|
+
duration?: number;
|
|
492
|
+
outputData?: unknown;
|
|
493
|
+
config?: unknown;
|
|
494
|
+
suspendedState?: unknown;
|
|
495
|
+
resumeData?: unknown;
|
|
496
|
+
nextPollAt?: Date | null;
|
|
497
|
+
pollInterval?: number;
|
|
498
|
+
maxWaitUntil?: Date;
|
|
499
|
+
metrics?: unknown;
|
|
500
|
+
embeddingInfo?: unknown;
|
|
501
|
+
errorMessage?: string;
|
|
502
|
+
}
|
|
503
|
+
```
|
|
504
|
+
|
|
505
|
+
## Custom Persistence Implementation
|
|
506
|
+
|
|
507
|
+
For non-Prisma databases or testing:
|
|
508
|
+
|
|
509
|
+
```typescript
|
|
510
|
+
import type { WorkflowPersistence } from "@bratsos/workflow-engine";
|
|
511
|
+
|
|
512
|
+
class CustomPersistence implements WorkflowPersistence {
|
|
513
|
+
private runs = new Map<string, WorkflowRunRecord>();
|
|
514
|
+
private stages = new Map<string, WorkflowStageRecord>();
|
|
515
|
+
|
|
516
|
+
async createRun(data: CreateRunInput): Promise<WorkflowRunRecord> {
|
|
517
|
+
const run: WorkflowRunRecord = {
|
|
518
|
+
id: data.id ?? crypto.randomUUID(),
|
|
519
|
+
createdAt: new Date(),
|
|
520
|
+
updatedAt: new Date(),
|
|
521
|
+
workflowId: data.workflowId,
|
|
522
|
+
workflowName: data.workflowName,
|
|
523
|
+
workflowType: data.workflowType,
|
|
524
|
+
status: "PENDING",
|
|
525
|
+
startedAt: null,
|
|
526
|
+
completedAt: null,
|
|
527
|
+
duration: null,
|
|
528
|
+
input: data.input,
|
|
529
|
+
output: null,
|
|
530
|
+
config: data.config ?? {},
|
|
531
|
+
totalCost: 0,
|
|
532
|
+
totalTokens: 0,
|
|
533
|
+
priority: data.priority ?? 5,
|
|
534
|
+
};
|
|
535
|
+
this.runs.set(run.id, run);
|
|
536
|
+
return run;
|
|
537
|
+
}
|
|
538
|
+
|
|
539
|
+
async updateRun(id: string, data: UpdateRunInput): Promise<void> {
|
|
540
|
+
const run = this.runs.get(id);
|
|
541
|
+
if (!run) throw new Error(`Run ${id} not found`);
|
|
542
|
+
Object.assign(run, data, { updatedAt: new Date() });
|
|
543
|
+
}
|
|
544
|
+
|
|
545
|
+
async getRun(id: string): Promise<WorkflowRunRecord | null> {
|
|
546
|
+
return this.runs.get(id) ?? null;
|
|
547
|
+
}
|
|
548
|
+
|
|
549
|
+
// ... implement other methods
|
|
550
|
+
}
|
|
551
|
+
```
|
|
552
|
+
|
|
553
|
+
## Database Migrations
|
|
554
|
+
|
|
555
|
+
After adding the Prisma schema:
|
|
556
|
+
|
|
557
|
+
```bash
|
|
558
|
+
# Generate migration
|
|
559
|
+
npx prisma migrate dev --name add_workflow_tables
|
|
560
|
+
|
|
561
|
+
# Apply to production
|
|
562
|
+
npx prisma migrate deploy
|
|
563
|
+
|
|
564
|
+
# Generate Prisma client
|
|
565
|
+
npx prisma generate
|
|
566
|
+
```
|
|
567
|
+
|
|
568
|
+
## Performance Considerations
|
|
569
|
+
|
|
570
|
+
### Indexes
|
|
571
|
+
|
|
572
|
+
The schema includes indexes for common query patterns:
|
|
573
|
+
- `status` - for polling pending/running workflows
|
|
574
|
+
- `nextPollAt` - for suspended stage polling
|
|
575
|
+
- `workflowRunId` - for stage/log lookups
|
|
576
|
+
- `createdAt` - for ordering
|
|
577
|
+
|
|
578
|
+
### Job Queue Atomicity
|
|
579
|
+
|
|
580
|
+
**PostgreSQL** uses `FOR UPDATE SKIP LOCKED` for atomic dequeue:
|
|
581
|
+
|
|
582
|
+
```sql
|
|
583
|
+
UPDATE job_queue
|
|
584
|
+
SET status = 'RUNNING', workerId = $1, lockedAt = NOW()
|
|
585
|
+
WHERE id = (
|
|
586
|
+
SELECT id FROM job_queue
|
|
587
|
+
WHERE status = 'PENDING'
|
|
588
|
+
AND (nextPollAt IS NULL OR nextPollAt <= NOW())
|
|
589
|
+
ORDER BY priority DESC, createdAt ASC
|
|
590
|
+
LIMIT 1
|
|
591
|
+
FOR UPDATE SKIP LOCKED
|
|
592
|
+
)
|
|
593
|
+
RETURNING *;
|
|
594
|
+
```
|
|
595
|
+
|
|
596
|
+
**SQLite** uses optimistic locking with retry:
|
|
597
|
+
|
|
598
|
+
```typescript
|
|
599
|
+
// 1. Find PENDING job
|
|
600
|
+
const job = await prisma.jobQueue.findFirst({
|
|
601
|
+
where: { status: "PENDING" },
|
|
602
|
+
orderBy: [{ priority: "desc" }, { createdAt: "asc" }],
|
|
603
|
+
});
|
|
604
|
+
|
|
605
|
+
// 2. Atomically claim it (fails if already claimed)
|
|
606
|
+
const result = await prisma.jobQueue.updateMany({
|
|
607
|
+
where: { id: job.id, status: "PENDING" },
|
|
608
|
+
data: { status: "RUNNING", workerId, lockedAt: new Date() },
|
|
609
|
+
});
|
|
610
|
+
|
|
611
|
+
// 3. If count === 0, another worker claimed it → retry
|
|
612
|
+
```
|
|
613
|
+
|
|
614
|
+
Both approaches ensure each job is processed by exactly one worker, even with multiple workers competing.
|