@spfn/workflow 0.1.0-alpha.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +702 -0
- package/dist/entities/workflow-execution.d.ts +169 -0
- package/dist/entities/workflow-execution.js +48 -0
- package/dist/entities/workflow-execution.js.map +1 -0
- package/dist/entities/workflow-step-execution.d.ts +203 -0
- package/dist/entities/workflow-step-execution.js +94 -0
- package/dist/entities/workflow-step-execution.js.map +1 -0
- package/dist/index.d.ts +545 -0
- package/dist/index.js +824 -0
- package/dist/index.js.map +1 -0
- package/dist/status-JJY5KGcN.d.ts +10 -0
- package/migrations/0000_even_thunderbolt_ross.sql +36 -0
- package/migrations/meta/0000_snapshot.json +308 -0
- package/migrations/meta/_journal.json +13 -0
- package/package.json +76 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2025 INFLIKE Inc.
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,702 @@
|
|
|
1
|
+
# @spfn/workflow
|
|
2
|
+
|
|
3
|
+
Lightweight workflow engine - Pipeline orchestration based on `@spfn/core` Jobs
|
|
4
|
+
|
|
5
|
+
## Overview
|
|
6
|
+
|
|
7
|
+
`@spfn/workflow` is a workflow engine that defines and executes complex business processes by chaining multiple Jobs together.
|
|
8
|
+
|
|
9
|
+
```
|
|
10
|
+
┌─────────┐ data ┌─────────┐ data ┌─────────┐
|
|
11
|
+
│ Job A │ ────────▶ │ Job B │ ────────▶ │ Job C │
|
|
12
|
+
└─────────┘ └─────────┘ └─────────┘
|
|
13
|
+
│ │ │
|
|
14
|
+
└─────────────────────┴─────────────────────┘
|
|
15
|
+
Workflow
|
|
16
|
+
```
|
|
17
|
+
|
|
18
|
+
## Installation
|
|
19
|
+
|
|
20
|
+
```bash
|
|
21
|
+
pnpm add @spfn/workflow
|
|
22
|
+
```
|
|
23
|
+
|
|
24
|
+
## Prerequisites
|
|
25
|
+
|
|
26
|
+
- `@spfn/core` server with database enabled (PostgreSQL)
|
|
27
|
+
- Workflow tables (`spfn_workflow.executions`, `spfn_workflow.step_executions`) are created automatically via Drizzle migration
|
|
28
|
+
|
|
29
|
+
## Core Concepts
|
|
30
|
+
|
|
31
|
+
| Concept | Description |
|
|
32
|
+
|---------|-------------|
|
|
33
|
+
| **Job** | Independent unit of work (defined in `@spfn/core`) |
|
|
34
|
+
| **Workflow** | Pipeline that chains Jobs together |
|
|
35
|
+
| **Step** | Individual execution unit within a Workflow |
|
|
36
|
+
| **Engine** | Workflow execution and state management |
|
|
37
|
+
|
|
38
|
+
## Quick Start
|
|
39
|
+
|
|
40
|
+
### 1. Define Jobs (@spfn/core)
|
|
41
|
+
|
|
42
|
+
```typescript
|
|
43
|
+
import { job } from '@spfn/core/job';
|
|
44
|
+
import { Type } from '@sinclair/typebox';
|
|
45
|
+
|
|
46
|
+
const createRepo = job('create-repo')
|
|
47
|
+
.input(Type.Object({
|
|
48
|
+
tenantId: Type.String(),
|
|
49
|
+
}))
|
|
50
|
+
.output(Type.Object({
|
|
51
|
+
repoId: Type.String(),
|
|
52
|
+
repoUrl: Type.String(),
|
|
53
|
+
}))
|
|
54
|
+
.compensate(async (input, output) => {
|
|
55
|
+
// Compensation logic executed during rollback
|
|
56
|
+
await gitea.deleteRepo(output.repoId);
|
|
57
|
+
})
|
|
58
|
+
.handler(async (input) => {
|
|
59
|
+
const repo = await gitea.create(input.tenantId);
|
|
60
|
+
return { repoId: repo.id, repoUrl: repo.url };
|
|
61
|
+
});
|
|
62
|
+
```
|
|
63
|
+
|
|
64
|
+
### 2. Define Workflow
|
|
65
|
+
|
|
66
|
+
```typescript
|
|
67
|
+
import { workflow } from '@spfn/workflow';
|
|
68
|
+
|
|
69
|
+
export const provisionTenant = workflow('provision-tenant')
|
|
70
|
+
.input(Type.Object({
|
|
71
|
+
tenantId: Type.String(),
|
|
72
|
+
plan: Type.String(),
|
|
73
|
+
}))
|
|
74
|
+
.resumable(true) // Resume from failure point
|
|
75
|
+
.rollback(true) // Enable rollback on failure
|
|
76
|
+
|
|
77
|
+
// Sequential execution
|
|
78
|
+
.pipe(createPodIdentity, (ctx) => ({
|
|
79
|
+
tenantId: ctx.input.tenantId,
|
|
80
|
+
plan: ctx.input.plan,
|
|
81
|
+
}))
|
|
82
|
+
|
|
83
|
+
// Parallel execution
|
|
84
|
+
.parallel({
|
|
85
|
+
appRepo: [createAppRepo, (ctx) => ({
|
|
86
|
+
tenantId: ctx.input.tenantId,
|
|
87
|
+
})],
|
|
88
|
+
gitopsRepo: [createGitopsRepo, (ctx) => ({
|
|
89
|
+
tenantId: ctx.input.tenantId,
|
|
90
|
+
})],
|
|
91
|
+
})
|
|
92
|
+
|
|
93
|
+
// Reference previous results (type-inferred)
|
|
94
|
+
.pipe(notifyComplete, (ctx) => ({
|
|
95
|
+
tenantId: ctx.input.tenantId,
|
|
96
|
+
appRepoUrl: ctx.results.appRepo.repoUrl,
|
|
97
|
+
gitopsRepoUrl: ctx.results.gitopsRepo.repoUrl,
|
|
98
|
+
}))
|
|
99
|
+
|
|
100
|
+
.build();
|
|
101
|
+
```
|
|
102
|
+
|
|
103
|
+
### 3. Define Workflow Router
|
|
104
|
+
|
|
105
|
+
```typescript
|
|
106
|
+
// workflow.router.ts
|
|
107
|
+
import { defineWorkflowRouter } from '@spfn/workflow';
|
|
108
|
+
|
|
109
|
+
export const workflowRouter = defineWorkflowRouter([
|
|
110
|
+
provisionTenant,
|
|
111
|
+
deprovisionTenant,
|
|
112
|
+
]);
|
|
113
|
+
```
|
|
114
|
+
|
|
115
|
+
### 4. Register in Server Config
|
|
116
|
+
|
|
117
|
+
```typescript
|
|
118
|
+
// server.config.ts
|
|
119
|
+
import { defineServerConfig } from '@spfn/core/server';
|
|
120
|
+
import { workflowRouter } from './workflow.router';
|
|
121
|
+
|
|
122
|
+
export default defineServerConfig()
|
|
123
|
+
.routes(appRouter)
|
|
124
|
+
.workflows(workflowRouter)
|
|
125
|
+
.build();
|
|
126
|
+
```
|
|
127
|
+
|
|
128
|
+
Server starts → DB initialized → Workflow engine auto-initialized. After this, `workflowRouter.engine` is ready to use.
|
|
129
|
+
|
|
130
|
+
### 5. Execute Workflow
|
|
131
|
+
|
|
132
|
+
Use `workflowRouter.engine` in route handlers or services after the server has started.
|
|
133
|
+
|
|
134
|
+
```typescript
|
|
135
|
+
// In a route handler or service
|
|
136
|
+
import { workflowRouter } from './workflow.router';
|
|
137
|
+
|
|
138
|
+
// Execute (async)
|
|
139
|
+
const execution = await workflowRouter.engine.start('provision-tenant', {
|
|
140
|
+
tenantId: 'abc',
|
|
141
|
+
plan: 'pro',
|
|
142
|
+
});
|
|
143
|
+
|
|
144
|
+
// Check status
|
|
145
|
+
const status = await workflowRouter.engine.get(execution.id);
|
|
146
|
+
|
|
147
|
+
// Get step output
|
|
148
|
+
const output = await workflowRouter.engine.getStepOutput(execution.id, 'appRepo');
|
|
149
|
+
```
|
|
150
|
+
|
|
151
|
+
## API Reference
|
|
152
|
+
|
|
153
|
+
### Workflow Builder
|
|
154
|
+
|
|
155
|
+
#### `workflow(name)`
|
|
156
|
+
|
|
157
|
+
Creates a new workflow builder.
|
|
158
|
+
|
|
159
|
+
```typescript
|
|
160
|
+
const wf = workflow('my-workflow');
|
|
161
|
+
```
|
|
162
|
+
|
|
163
|
+
#### `.input(schema)`
|
|
164
|
+
|
|
165
|
+
Defines the input schema (TypeBox).
|
|
166
|
+
|
|
167
|
+
```typescript
|
|
168
|
+
.input(Type.Object({
|
|
169
|
+
tenantId: Type.String(),
|
|
170
|
+
plan: Type.Union([Type.Literal('free'), Type.Literal('pro')]),
|
|
171
|
+
}))
|
|
172
|
+
```
|
|
173
|
+
|
|
174
|
+
#### `.pipe(job, mapper)`
|
|
175
|
+
|
|
176
|
+
Adds a sequential execution step. The mapper receives a `WorkflowContext` with:
|
|
177
|
+
|
|
178
|
+
- `ctx.input` — Original workflow input
|
|
179
|
+
- `ctx.results` — Results from previous steps (type-inferred)
|
|
180
|
+
- `ctx.execution` — Execution metadata (`id`, `workflowName`, `startedAt`)
|
|
181
|
+
|
|
182
|
+
```typescript
|
|
183
|
+
.pipe(createRepo, (ctx) => ({
|
|
184
|
+
tenantId: ctx.input.tenantId,
|
|
185
|
+
}))
|
|
186
|
+
```
|
|
187
|
+
|
|
188
|
+
#### `.parallel(steps)`
|
|
189
|
+
|
|
190
|
+
Adds parallel execution steps.
|
|
191
|
+
|
|
192
|
+
```typescript
|
|
193
|
+
.parallel({
|
|
194
|
+
appRepo: [createAppRepo, (ctx) => ({ tenantId: ctx.input.tenantId })],
|
|
195
|
+
gitopsRepo: [createGitopsRepo, (ctx) => ({ tenantId: ctx.input.tenantId })],
|
|
196
|
+
})
|
|
197
|
+
```
|
|
198
|
+
|
|
199
|
+
#### `.resumable(enabled)`
|
|
200
|
+
|
|
201
|
+
Sets whether the workflow can resume from failure point.
|
|
202
|
+
|
|
203
|
+
- `true`: Skip completed steps and restart from failure point
|
|
204
|
+
- `false` (default): Restart from beginning
|
|
205
|
+
|
|
206
|
+
#### `.rollback(enabled)`
|
|
207
|
+
|
|
208
|
+
Sets whether rollback is enabled on failure.
|
|
209
|
+
|
|
210
|
+
- `true` (default): Execute `compensate` functions of completed steps in reverse order
|
|
211
|
+
- `false`: No rollback
|
|
212
|
+
|
|
213
|
+
#### `.notify(config)`
|
|
214
|
+
|
|
215
|
+
Adds notification configuration.
|
|
216
|
+
|
|
217
|
+
```typescript
|
|
218
|
+
.notify({
|
|
219
|
+
on: ['failed', 'completed'],
|
|
220
|
+
when: (event) => event.input.plan === 'pro', // Conditional (optional)
|
|
221
|
+
providers: [consoleProvider, slackProvider],
|
|
222
|
+
})
|
|
223
|
+
```
|
|
224
|
+
|
|
225
|
+
#### `.build()`
|
|
226
|
+
|
|
227
|
+
Completes the workflow definition.
|
|
228
|
+
|
|
229
|
+
---
|
|
230
|
+
|
|
231
|
+
### Workflow Router
|
|
232
|
+
|
|
233
|
+
#### `defineWorkflowRouter(workflows)`
|
|
234
|
+
|
|
235
|
+
Defines a workflow router for server registration. The engine is lazily initialized when the server starts.
|
|
236
|
+
|
|
237
|
+
```typescript
|
|
238
|
+
import { defineWorkflowRouter } from '@spfn/workflow';
|
|
239
|
+
|
|
240
|
+
const router = defineWorkflowRouter([provisionTenant, deprovisionTenant]);
|
|
241
|
+
```
|
|
242
|
+
|
|
243
|
+
#### `router.engine`
|
|
244
|
+
|
|
245
|
+
Access the workflow engine instance. Throws if the server has not been started yet.
|
|
246
|
+
|
|
247
|
+
```typescript
|
|
248
|
+
const execution = await router.engine.start('provision-tenant', {
|
|
249
|
+
tenantId: 'abc',
|
|
250
|
+
plan: 'pro',
|
|
251
|
+
});
|
|
252
|
+
```
|
|
253
|
+
|
|
254
|
+
#### `router.isInitialized`
|
|
255
|
+
|
|
256
|
+
Check if the workflow engine has been initialized.
|
|
257
|
+
|
|
258
|
+
#### `isWorkflowRouter(value)`
|
|
259
|
+
|
|
260
|
+
Type guard to check if a value is a `WorkflowRouter`.
|
|
261
|
+
|
|
262
|
+
```typescript
|
|
263
|
+
import { isWorkflowRouter } from '@spfn/workflow';
|
|
264
|
+
|
|
265
|
+
if (isWorkflowRouter(value))
|
|
266
|
+
{
|
|
267
|
+
value.engine.start(...);
|
|
268
|
+
}
|
|
269
|
+
```
|
|
270
|
+
|
|
271
|
+
---
|
|
272
|
+
|
|
273
|
+
### Workflow Engine
|
|
274
|
+
|
|
275
|
+
#### `createWorkflowEngine(options)`
|
|
276
|
+
|
|
277
|
+
Creates a workflow engine directly (low-level API). Prefer `defineWorkflowRouter` for typical usage.
|
|
278
|
+
|
|
279
|
+
```typescript
|
|
280
|
+
const engine = createWorkflowEngine({
|
|
281
|
+
workflows: [provisionTenant],
|
|
282
|
+
db: database,
|
|
283
|
+
storage: s3Storage, // Optional
|
|
284
|
+
largeOutputThreshold: 1024 * 1024, // 1MB (optional)
|
|
285
|
+
logger: customLogger, // Optional (defaults to console)
|
|
286
|
+
validateInput: true, // Optional (default: true)
|
|
287
|
+
});
|
|
288
|
+
```
|
|
289
|
+
|
|
290
|
+
| Option | Type | Default | Description |
|
|
291
|
+
|--------|------|---------|-------------|
|
|
292
|
+
| `workflows` | `WorkflowDef[]` | (required) | Workflow definitions |
|
|
293
|
+
| `db` | `unknown` | (required) | Drizzle database instance |
|
|
294
|
+
| `storage` | `OutputStorage` | - | External storage for large outputs |
|
|
295
|
+
| `largeOutputThreshold` | `number` | `1048576` (1MB) | Byte threshold for external storage |
|
|
296
|
+
| `logger` | `WorkflowLogger` | `defaultLogger` | Custom logger |
|
|
297
|
+
| `validateInput` | `boolean` | `true` | Validate input against schema |
|
|
298
|
+
|
|
299
|
+
#### `engine.start(name, input)`
|
|
300
|
+
|
|
301
|
+
Executes a workflow asynchronously.
|
|
302
|
+
|
|
303
|
+
```typescript
|
|
304
|
+
const execution = await engine.start('provision-tenant', {
|
|
305
|
+
tenantId: 'abc',
|
|
306
|
+
plan: 'pro',
|
|
307
|
+
});
|
|
308
|
+
// { id: 'exec-123', workflowName: 'provision-tenant', status: 'pending' }
|
|
309
|
+
```
|
|
310
|
+
|
|
311
|
+
#### `engine.get(executionId)`
|
|
312
|
+
|
|
313
|
+
Gets execution status.
|
|
314
|
+
|
|
315
|
+
```typescript
|
|
316
|
+
const status = await engine.get('exec-123');
|
|
317
|
+
// { id, workflowName, status, input, steps: [...], createdAt, ... }
|
|
318
|
+
```
|
|
319
|
+
|
|
320
|
+
#### `engine.getStepOutput(executionId, stepName)`
|
|
321
|
+
|
|
322
|
+
Gets the output of a specific step.
|
|
323
|
+
|
|
324
|
+
```typescript
|
|
325
|
+
const output = await engine.getStepOutput('exec-123', 'appRepo');
|
|
326
|
+
// { repoId: 'repo-abc', repoUrl: 'https://...' }
|
|
327
|
+
```
|
|
328
|
+
|
|
329
|
+
#### `engine.list(options?)`
|
|
330
|
+
|
|
331
|
+
Lists executions.
|
|
332
|
+
|
|
333
|
+
```typescript
|
|
334
|
+
const list = await engine.list({
|
|
335
|
+
workflowName: 'provision-tenant',
|
|
336
|
+
status: 'failed',
|
|
337
|
+
limit: 10,
|
|
338
|
+
offset: 0,
|
|
339
|
+
});
|
|
340
|
+
```
|
|
341
|
+
|
|
342
|
+
#### `engine.retry(executionId)`
|
|
343
|
+
|
|
344
|
+
Retries a failed workflow.
|
|
345
|
+
|
|
346
|
+
- `resumable: true`: Resume from failure point
|
|
347
|
+
- `resumable: false`: Restart from beginning
|
|
348
|
+
|
|
349
|
+
```typescript
|
|
350
|
+
await engine.retry('exec-123');
|
|
351
|
+
```
|
|
352
|
+
|
|
353
|
+
#### `engine.cancel(executionId, options?)`
|
|
354
|
+
|
|
355
|
+
Cancels a running workflow.
|
|
356
|
+
|
|
357
|
+
```typescript
|
|
358
|
+
await engine.cancel('exec-123');
|
|
359
|
+
await engine.cancel('exec-123', { rollback: true }); // With rollback
|
|
360
|
+
```
|
|
361
|
+
|
|
362
|
+
#### `engine.subscribe(executionId, callback)`
|
|
363
|
+
|
|
364
|
+
Subscribes to execution events.
|
|
365
|
+
|
|
366
|
+
```typescript
|
|
367
|
+
const unsubscribe = engine.subscribe('exec-123', (event) => {
|
|
368
|
+
console.log(event.type, event.stepName);
|
|
369
|
+
});
|
|
370
|
+
|
|
371
|
+
// Unsubscribe
|
|
372
|
+
unsubscribe();
|
|
373
|
+
```
|
|
374
|
+
|
|
375
|
+
---
|
|
376
|
+
|
|
377
|
+
### Notification Providers
|
|
378
|
+
|
|
379
|
+
Only `consoleProvider` is provided by default. For email, SMS, Slack, etc., implement your own provider using `@spfn/notification`.
|
|
380
|
+
|
|
381
|
+
#### `consoleProvider`
|
|
382
|
+
|
|
383
|
+
Logs events to console (built-in).
|
|
384
|
+
|
|
385
|
+
```typescript
|
|
386
|
+
import { consoleProvider } from '@spfn/workflow';
|
|
387
|
+
|
|
388
|
+
.notify({
|
|
389
|
+
on: ['failed'],
|
|
390
|
+
providers: [consoleProvider],
|
|
391
|
+
})
|
|
392
|
+
```
|
|
393
|
+
|
|
394
|
+
#### `formatEventAsText(event)`
|
|
395
|
+
|
|
396
|
+
Helper function to format workflow events as plain text. Useful when implementing custom providers.
|
|
397
|
+
|
|
398
|
+
```typescript
|
|
399
|
+
import { formatEventAsText } from '@spfn/workflow';
|
|
400
|
+
|
|
401
|
+
const text = formatEventAsText(event);
|
|
402
|
+
// Output:
|
|
403
|
+
// Workflow: provision-tenant
|
|
404
|
+
// Event: failed
|
|
405
|
+
// Execution ID: exec-123
|
|
406
|
+
// Timestamp: 2024-01-01T00:00:00.000Z
|
|
407
|
+
// Error: Connection timeout
|
|
408
|
+
```
|
|
409
|
+
|
|
410
|
+
### Custom Providers with @spfn/notification
|
|
411
|
+
|
|
412
|
+
Implement custom notification providers using `@spfn/notification` for email, SMS, Slack, etc.
|
|
413
|
+
|
|
414
|
+
#### Email Provider
|
|
415
|
+
|
|
416
|
+
```typescript
|
|
417
|
+
import { formatEventAsText } from '@spfn/workflow';
|
|
418
|
+
import { sendEmail } from '@spfn/notification/server';
|
|
419
|
+
import type { NotificationProvider } from '@spfn/workflow';
|
|
420
|
+
|
|
421
|
+
const emailProvider: NotificationProvider = {
|
|
422
|
+
name: 'email',
|
|
423
|
+
async notify(event)
|
|
424
|
+
{
|
|
425
|
+
await sendEmail({
|
|
426
|
+
to: 'admin@example.com',
|
|
427
|
+
subject: `[Workflow] ${event.workflowName}: ${event.type}`,
|
|
428
|
+
text: formatEventAsText(event),
|
|
429
|
+
});
|
|
430
|
+
},
|
|
431
|
+
};
|
|
432
|
+
|
|
433
|
+
// Use in workflow
|
|
434
|
+
workflow('provision-tenant')
|
|
435
|
+
.pipe(...)
|
|
436
|
+
.notify({
|
|
437
|
+
on: ['failed', 'completed'],
|
|
438
|
+
providers: [emailProvider],
|
|
439
|
+
})
|
|
440
|
+
.build();
|
|
441
|
+
```
|
|
442
|
+
|
|
443
|
+
#### SMS Provider
|
|
444
|
+
|
|
445
|
+
```typescript
|
|
446
|
+
import { sendSMS } from '@spfn/notification/server';
|
|
447
|
+
import type { NotificationProvider } from '@spfn/workflow';
|
|
448
|
+
|
|
449
|
+
const smsProvider: NotificationProvider = {
|
|
450
|
+
name: 'sms',
|
|
451
|
+
async notify(event)
|
|
452
|
+
{
|
|
453
|
+
const message = `[${event.workflowName}] ${event.type}` +
|
|
454
|
+
(event.error ? `: ${event.error}` : '');
|
|
455
|
+
|
|
456
|
+
await sendSMS({
|
|
457
|
+
to: '+821012345678',
|
|
458
|
+
message,
|
|
459
|
+
});
|
|
460
|
+
},
|
|
461
|
+
};
|
|
462
|
+
```
|
|
463
|
+
|
|
464
|
+
#### Slack Provider
|
|
465
|
+
|
|
466
|
+
```typescript
|
|
467
|
+
import type { NotificationProvider } from '@spfn/workflow';
|
|
468
|
+
|
|
469
|
+
const slackProvider: NotificationProvider = {
|
|
470
|
+
name: 'slack',
|
|
471
|
+
async notify(event)
|
|
472
|
+
{
|
|
473
|
+
await fetch(process.env.SLACK_WEBHOOK_URL!, {
|
|
474
|
+
method: 'POST',
|
|
475
|
+
headers: { 'Content-Type': 'application/json' },
|
|
476
|
+
body: JSON.stringify({
|
|
477
|
+
text: `[${event.workflowName}] ${event.type}`,
|
|
478
|
+
attachments: [{
|
|
479
|
+
color: event.type === 'failed' ? 'danger' : 'good',
|
|
480
|
+
fields: [
|
|
481
|
+
{ title: 'Execution ID', value: event.executionId, short: true },
|
|
482
|
+
{ title: 'Event', value: event.type, short: true },
|
|
483
|
+
],
|
|
484
|
+
}],
|
|
485
|
+
}),
|
|
486
|
+
});
|
|
487
|
+
},
|
|
488
|
+
};
|
|
489
|
+
```
|
|
490
|
+
|
|
491
|
+
#### Combined Notifications
|
|
492
|
+
|
|
493
|
+
`.notify()` accepts a single configuration. To handle multiple event types with different providers, include all providers in one call and use the `when` condition for filtering:
|
|
494
|
+
|
|
495
|
+
```typescript
|
|
496
|
+
import { consoleProvider } from '@spfn/workflow';
|
|
497
|
+
|
|
498
|
+
workflow('provision-tenant')
|
|
499
|
+
.pipe(...)
|
|
500
|
+
.notify({
|
|
501
|
+
on: ['started', 'completed', 'failed'],
|
|
502
|
+
providers: [consoleProvider, emailProvider, slackProvider],
|
|
503
|
+
})
|
|
504
|
+
.build();
|
|
505
|
+
```
|
|
506
|
+
|
|
507
|
+
---
|
|
508
|
+
|
|
509
|
+
## Events
|
|
510
|
+
|
|
511
|
+
Events emitted during workflow execution:
|
|
512
|
+
|
|
513
|
+
| Event | Description |
|
|
514
|
+
|-------|-------------|
|
|
515
|
+
| `started` | Workflow execution started |
|
|
516
|
+
| `step.started` | Step execution started |
|
|
517
|
+
| `step.completed` | Step execution completed |
|
|
518
|
+
| `step.failed` | Step execution failed |
|
|
519
|
+
| `completed` | Entire workflow completed |
|
|
520
|
+
| `failed` | Workflow failed |
|
|
521
|
+
| `cancelled` | Workflow cancelled |
|
|
522
|
+
|
|
523
|
+
```typescript
|
|
524
|
+
interface WorkflowEvent {
|
|
525
|
+
type: 'started' | 'step.started' | 'step.completed' | ...;
|
|
526
|
+
workflowName: string;
|
|
527
|
+
executionId: string;
|
|
528
|
+
stepName?: string;
|
|
529
|
+
stepIndex?: number;
|
|
530
|
+
input?: unknown;
|
|
531
|
+
output?: unknown;
|
|
532
|
+
error?: string;
|
|
533
|
+
timestamp: Date;
|
|
534
|
+
}
|
|
535
|
+
```
|
|
536
|
+
|
|
537
|
+
---
|
|
538
|
+
|
|
539
|
+
## Status Flow
|
|
540
|
+
|
|
541
|
+
### Workflow Status
|
|
542
|
+
|
|
543
|
+
```
|
|
544
|
+
pending → running → completed
|
|
545
|
+
→ failed → compensating → compensated
|
|
546
|
+
→ cancelled
|
|
547
|
+
```
|
|
548
|
+
|
|
549
|
+
| Status | Description |
|
|
550
|
+
|--------|-------------|
|
|
551
|
+
| `pending` | Waiting for execution |
|
|
552
|
+
| `running` | Currently executing |
|
|
553
|
+
| `completed` | Successfully completed |
|
|
554
|
+
| `failed` | Failed |
|
|
555
|
+
| `compensating` | Rollback in progress |
|
|
556
|
+
| `compensated` | Rollback completed |
|
|
557
|
+
| `cancelled` | Cancelled |
|
|
558
|
+
|
|
559
|
+
### Step Status
|
|
560
|
+
|
|
561
|
+
| Status | Description |
|
|
562
|
+
|--------|-------------|
|
|
563
|
+
| `pending` | Waiting for execution |
|
|
564
|
+
| `running` | Currently executing |
|
|
565
|
+
| `completed` | Completed |
|
|
566
|
+
| `failed` | Failed |
|
|
567
|
+
| `skipped` | Skipped |
|
|
568
|
+
| `compensated` | Compensation completed |
|
|
569
|
+
|
|
570
|
+
---
|
|
571
|
+
|
|
572
|
+
## Rollback Strategy
|
|
573
|
+
|
|
574
|
+
```
|
|
575
|
+
Execution: Step1 ✓ → Step2 ✓ → Step3 ✗
|
|
576
|
+
|
|
577
|
+
Rollback: Step2.compensate() → Step1.compensate()
|
|
578
|
+
(Step3 doesn't need compensation as it failed)
|
|
579
|
+
(Continue even if compensation fails)
|
|
580
|
+
```
|
|
581
|
+
|
|
582
|
+
- Rollback only executes for Jobs with `compensate` function defined
|
|
583
|
+
- Continues with other compensations even if one fails
|
|
584
|
+
- Executes in reverse order (from last completed step)
|
|
585
|
+
|
|
586
|
+
---
|
|
587
|
+
|
|
588
|
+
## Database Entities
|
|
589
|
+
|
|
590
|
+
All tables are created in the `spfn_workflow` schema.
|
|
591
|
+
|
|
592
|
+
### spfn_workflow.executions
|
|
593
|
+
|
|
594
|
+
```typescript
|
|
595
|
+
{
|
|
596
|
+
id: string;
|
|
597
|
+
workflowName: string;
|
|
598
|
+
status: WorkflowStatus;
|
|
599
|
+
input: unknown; // jsonb
|
|
600
|
+
currentStep: number;
|
|
601
|
+
error?: string;
|
|
602
|
+
createdAt: Date;
|
|
603
|
+
updatedAt: Date;
|
|
604
|
+
completedAt?: Date;
|
|
605
|
+
}
|
|
606
|
+
```
|
|
607
|
+
|
|
608
|
+
### spfn_workflow.step_executions
|
|
609
|
+
|
|
610
|
+
```typescript
|
|
611
|
+
{
|
|
612
|
+
id: string;
|
|
613
|
+
executionId: string; // FK (cascade delete)
|
|
614
|
+
stepName: string;
|
|
615
|
+
stepIndex: number;
|
|
616
|
+
status: WorkflowStepStatus;
|
|
617
|
+
output?: unknown; // jsonb or { $ref: url }
|
|
618
|
+
error?: string;
|
|
619
|
+
startedAt?: Date;
|
|
620
|
+
completedAt?: Date;
|
|
621
|
+
createdAt: Date;
|
|
622
|
+
updatedAt: Date;
|
|
623
|
+
}
|
|
624
|
+
```
|
|
625
|
+
|
|
626
|
+
---
|
|
627
|
+
|
|
628
|
+
## Large Output Handling
|
|
629
|
+
|
|
630
|
+
When step output exceeds threshold (default 1MB), it's stored in external storage.
|
|
631
|
+
|
|
632
|
+
```typescript
|
|
633
|
+
// Storage interface
|
|
634
|
+
interface OutputStorage {
|
|
635
|
+
upload(data: unknown): Promise<string>; // Returns URL
|
|
636
|
+
download(url: string): Promise<unknown>;
|
|
637
|
+
}
|
|
638
|
+
|
|
639
|
+
// Engine configuration
|
|
640
|
+
const engine = createWorkflowEngine({
|
|
641
|
+
workflows: [...],
|
|
642
|
+
db: database,
|
|
643
|
+
storage: {
|
|
644
|
+
async upload(data) {
|
|
645
|
+
const key = `outputs/${crypto.randomUUID()}.json`;
|
|
646
|
+
await s3.putObject({ Key: key, Body: JSON.stringify(data) });
|
|
647
|
+
return `s3://${bucket}/${key}`;
|
|
648
|
+
},
|
|
649
|
+
async download(url) {
|
|
650
|
+
const key = url.replace(`s3://${bucket}/`, '');
|
|
651
|
+
const { Body } = await s3.getObject({ Key: key });
|
|
652
|
+
return JSON.parse(await Body.transformToString());
|
|
653
|
+
},
|
|
654
|
+
},
|
|
655
|
+
largeOutputThreshold: 1024 * 1024, // 1MB
|
|
656
|
+
});
|
|
657
|
+
```
|
|
658
|
+
|
|
659
|
+
---
|
|
660
|
+
|
|
661
|
+
## Project Structure
|
|
662
|
+
|
|
663
|
+
```
|
|
664
|
+
src/
|
|
665
|
+
├── index.ts # Public exports
|
|
666
|
+
├── builder/ # Workflow definition API
|
|
667
|
+
│ ├── workflow-builder.ts # Builder implementation
|
|
668
|
+
│ └── types.ts # Type definitions
|
|
669
|
+
├── engine/ # Execution engine
|
|
670
|
+
│ ├── workflow-engine.ts # Engine implementation
|
|
671
|
+
│ └── types.ts # Type definitions
|
|
672
|
+
├── entities/ # DB entities
|
|
673
|
+
│ ├── schema.ts # PostgreSQL schema definition
|
|
674
|
+
│ ├── workflow-execution.entity.ts
|
|
675
|
+
│ └── workflow-step-execution.entity.ts
|
|
676
|
+
├── notification/ # Notification system
|
|
677
|
+
│ ├── providers.ts # Built-in providers
|
|
678
|
+
│ └── types.ts # Type definitions
|
|
679
|
+
├── config/ # Configuration utilities
|
|
680
|
+
│ ├── workflow-router.ts # defineWorkflowRouter, isWorkflowRouter
|
|
681
|
+
│ └── types.ts # WorkflowRouter, WorkflowRouterConfig
|
|
682
|
+
└── types/ # Common types
|
|
683
|
+
└── status.ts # Status types
|
|
684
|
+
```
|
|
685
|
+
|
|
686
|
+
---
|
|
687
|
+
|
|
688
|
+
## Design Principles
|
|
689
|
+
|
|
690
|
+
| Principle | Description |
|
|
691
|
+
|-----------|-------------|
|
|
692
|
+
| **Step Independence** | Steps (Jobs) are unaware of other steps |
|
|
693
|
+
| **Step Reusability** | A single step can participate in multiple workflows |
|
|
694
|
+
| **Composition** | New workflows = combination of existing steps |
|
|
695
|
+
| **Explicit Mapping** | Data transfer between steps is explicitly defined |
|
|
696
|
+
| **Type Safety** | Only registered workflows can be executed, input/output types are inferred |
|
|
697
|
+
|
|
698
|
+
---
|
|
699
|
+
|
|
700
|
+
## License
|
|
701
|
+
|
|
702
|
+
MIT
|