@gravito/flux 1.0.0-alpha.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +295 -0
- package/dist/builder/WorkflowBuilder.d.ts +96 -0
- package/dist/builder/WorkflowBuilder.d.ts.map +1 -0
- package/dist/builder/index.d.ts +2 -0
- package/dist/builder/index.d.ts.map +1 -0
- package/dist/bun.d.ts +9 -0
- package/dist/bun.d.ts.map +1 -0
- package/dist/bun.js +7 -0
- package/dist/chunk-qjdtqchy.js +145 -0
- package/dist/core/ContextManager.d.ts +40 -0
- package/dist/core/ContextManager.d.ts.map +1 -0
- package/dist/core/StateMachine.d.ts +43 -0
- package/dist/core/StateMachine.d.ts.map +1 -0
- package/dist/core/StepExecutor.d.ts +34 -0
- package/dist/core/StepExecutor.d.ts.map +1 -0
- package/dist/core/index.d.ts +4 -0
- package/dist/core/index.d.ts.map +1 -0
- package/dist/engine/FluxEngine.d.ts +66 -0
- package/dist/engine/FluxEngine.d.ts.map +1 -0
- package/dist/engine/index.d.ts +2 -0
- package/dist/engine/index.d.ts.map +1 -0
- package/dist/index.d.ts +38 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +479 -0
- package/dist/index.node.d.ts +18 -0
- package/dist/index.node.d.ts.map +1 -0
- package/dist/logger/FluxLogger.d.ts +40 -0
- package/dist/logger/FluxLogger.d.ts.map +1 -0
- package/dist/logger/index.d.ts +2 -0
- package/dist/logger/index.d.ts.map +1 -0
- package/dist/node/index.cjs +651 -0
- package/dist/node/index.mjs +619 -0
- package/dist/orbit/OrbitFlux.d.ts +107 -0
- package/dist/orbit/OrbitFlux.d.ts.map +1 -0
- package/dist/orbit/index.d.ts +2 -0
- package/dist/orbit/index.d.ts.map +1 -0
- package/dist/storage/BunSQLiteStorage.d.ts +73 -0
- package/dist/storage/BunSQLiteStorage.d.ts.map +1 -0
- package/dist/storage/MemoryStorage.d.ts +28 -0
- package/dist/storage/MemoryStorage.d.ts.map +1 -0
- package/dist/storage/index.d.ts +3 -0
- package/dist/storage/index.d.ts.map +1 -0
- package/dist/types.d.ts +194 -0
- package/dist/types.d.ts.map +1 -0
- package/package.json +68 -0
package/README.md
ADDED
|
@@ -0,0 +1,295 @@
|
|
|
1
|
+
# @gravito/flux
|
|
2
|
+
|
|
3
|
+
> ⚡ Platform-agnostic, high-performance workflow engine for Gravito
|
|
4
|
+
|
|
5
|
+
## Features
|
|
6
|
+
|
|
7
|
+
- **Pure State Machine** - No runtime dependencies, Web Standard APIs only
|
|
8
|
+
- **Fluent Builder API** - Type-safe, chainable workflow definitions
|
|
9
|
+
- **Storage Adapters** - Memory, SQLite (Bun), PostgreSQL (coming soon)
|
|
10
|
+
- **Retry & Timeout** - Automatic retry with exponential backoff
|
|
11
|
+
- **Event Hooks** - Subscribe to workflow/step lifecycle events
|
|
12
|
+
- **Dual Platform** - Works with both Bun and Node.js
|
|
13
|
+
|
|
14
|
+
## Installation
|
|
15
|
+
|
|
16
|
+
```bash
|
|
17
|
+
# Bun
|
|
18
|
+
bun add @gravito/flux
|
|
19
|
+
|
|
20
|
+
# npm
|
|
21
|
+
npm install @gravito/flux
|
|
22
|
+
```
|
|
23
|
+
|
|
24
|
+
## Quick Start
|
|
25
|
+
|
|
26
|
+
```typescript
|
|
27
|
+
import { FluxEngine, createWorkflow } from '@gravito/flux'
|
|
28
|
+
|
|
29
|
+
// 1. Define workflow
|
|
30
|
+
const orderFlow = createWorkflow('order-process')
|
|
31
|
+
.input<{ orderId: string }>()
|
|
32
|
+
.step('fetch', async (ctx) => {
|
|
33
|
+
ctx.data.order = await db.orders.find(ctx.input.orderId)
|
|
34
|
+
})
|
|
35
|
+
.step('validate', async (ctx) => {
|
|
36
|
+
if (!ctx.data.order.isPaid) throw new Error('Unpaid order')
|
|
37
|
+
})
|
|
38
|
+
.commit('fulfill', async (ctx) => {
|
|
39
|
+
await fulfillment.ship(ctx.data.order)
|
|
40
|
+
})
|
|
41
|
+
|
|
42
|
+
// 2. Execute
|
|
43
|
+
const engine = new FluxEngine()
|
|
44
|
+
const result = await engine.execute(orderFlow, { orderId: '123' })
|
|
45
|
+
|
|
46
|
+
if (result.status === 'completed') {
|
|
47
|
+
console.log('Order processed:', result.data)
|
|
48
|
+
}
|
|
49
|
+
```
|
|
50
|
+
|
|
51
|
+
## Examples
|
|
52
|
+
|
|
53
|
+
### 📦 Order Fulfillment
|
|
54
|
+
|
|
55
|
+
```typescript
|
|
56
|
+
const orderWorkflow = createWorkflow('order-fulfillment')
|
|
57
|
+
.input<{ orderId: string; items: Item[] }>()
|
|
58
|
+
.step('validate', async (ctx) => {
|
|
59
|
+
for (const item of ctx.input.items) {
|
|
60
|
+
const stock = await db.products.getStock(item.productId)
|
|
61
|
+
if (stock < item.qty) throw new Error(`Out of stock: ${item.productId}`)
|
|
62
|
+
}
|
|
63
|
+
})
|
|
64
|
+
.step('reserve', async (ctx) => {
|
|
65
|
+
ctx.data.reservationIds = await inventory.reserve(ctx.input.items)
|
|
66
|
+
})
|
|
67
|
+
.step('payment', async (ctx) => {
|
|
68
|
+
ctx.data.payment = await payment.charge(ctx.input.orderId)
|
|
69
|
+
}, { retries: 3, timeout: 30000 })
|
|
70
|
+
.commit('deduct', async (ctx) => {
|
|
71
|
+
await inventory.deduct(ctx.data.reservationIds)
|
|
72
|
+
})
|
|
73
|
+
.commit('notify', async (ctx) => {
|
|
74
|
+
await email.send(ctx.input.userId, 'order-confirmed', ctx.data)
|
|
75
|
+
})
|
|
76
|
+
```
|
|
77
|
+
|
|
78
|
+
### 🖼️ Image Processing
|
|
79
|
+
|
|
80
|
+
```typescript
|
|
81
|
+
const uploadWorkflow = createWorkflow('image-processing')
|
|
82
|
+
.input<{ fileBuffer: Buffer; fileName: string; userId: string }>()
|
|
83
|
+
.step('validate', async (ctx) => {
|
|
84
|
+
if (ctx.input.fileBuffer.length > 10 * 1024 * 1024) {
|
|
85
|
+
throw new Error('File size exceeds 10MB')
|
|
86
|
+
}
|
|
87
|
+
ctx.data.mimeType = await detectMimeType(ctx.input.fileBuffer)
|
|
88
|
+
})
|
|
89
|
+
.step('scan', async (ctx) => {
|
|
90
|
+
const result = await virusScanner.scan(ctx.input.fileBuffer)
|
|
91
|
+
if (!result.clean) throw new Error('Malicious file detected')
|
|
92
|
+
})
|
|
93
|
+
.step('resize', async (ctx) => {
|
|
94
|
+
ctx.data.thumbnail = await sharp(ctx.input.fileBuffer).resize(200).toBuffer()
|
|
95
|
+
})
|
|
96
|
+
.commit('upload', async (ctx) => {
|
|
97
|
+
ctx.data.url = await s3.upload(ctx.input.fileName, ctx.data.compressed)
|
|
98
|
+
})
|
|
99
|
+
```
|
|
100
|
+
|
|
101
|
+
### 👤 User Signup
|
|
102
|
+
|
|
103
|
+
```typescript
|
|
104
|
+
const signupWorkflow = createWorkflow('user-signup')
|
|
105
|
+
.input<{ email: string; password: string; name: string }>()
|
|
106
|
+
.step('validate', async (ctx) => {
|
|
107
|
+
const exists = await db.users.findByEmail(ctx.input.email)
|
|
108
|
+
if (exists) throw new Error('Email already in use')
|
|
109
|
+
})
|
|
110
|
+
.step('hash', async (ctx) => {
|
|
111
|
+
ctx.data.hashedPassword = await bcrypt.hash(ctx.input.password, 12)
|
|
112
|
+
})
|
|
113
|
+
.commit('create', async (ctx) => {
|
|
114
|
+
ctx.data.user = await db.users.create({
|
|
115
|
+
email: ctx.input.email,
|
|
116
|
+
password: ctx.data.hashedPassword,
|
|
117
|
+
name: ctx.input.name,
|
|
118
|
+
})
|
|
119
|
+
})
|
|
120
|
+
.commit('sendVerification', async (ctx) => {
|
|
121
|
+
const token = await generateToken(ctx.data.user.id)
|
|
122
|
+
await email.send(ctx.input.email, 'verify-email', { token })
|
|
123
|
+
})
|
|
124
|
+
```
|
|
125
|
+
|
|
126
|
+
### 📈 Report Generation
|
|
127
|
+
|
|
128
|
+
```typescript
|
|
129
|
+
const reportWorkflow = createWorkflow('generate-report')
|
|
130
|
+
.input<{ reportType: string; dateRange: DateRange; requestedBy: string }>()
|
|
131
|
+
.step('fetch-data', async (ctx) => {
|
|
132
|
+
ctx.data.sales = await db.orders.aggregate(ctx.input.dateRange)
|
|
133
|
+
ctx.data.users = await db.users.getMetrics(ctx.input.dateRange)
|
|
134
|
+
}, { timeout: 60000 })
|
|
135
|
+
.step('calculate', async (ctx) => {
|
|
136
|
+
ctx.data.metrics = {
|
|
137
|
+
revenue: ctx.data.sales.reduce((sum, s) => sum + s.total, 0),
|
|
138
|
+
orders: ctx.data.sales.length,
|
|
139
|
+
}
|
|
140
|
+
})
|
|
141
|
+
.step('generate-pdf', async (ctx) => {
|
|
142
|
+
ctx.data.pdf = await pdfGenerator.create(ctx.data.metrics)
|
|
143
|
+
})
|
|
144
|
+
.commit('upload', async (ctx) => {
|
|
145
|
+
ctx.data.url = await s3.upload(`reports/${ctx.id}.pdf`, ctx.data.pdf)
|
|
146
|
+
})
|
|
147
|
+
.commit('notify', async (ctx) => {
|
|
148
|
+
await email.send(ctx.input.requestedBy, 'report-ready', { url: ctx.data.url })
|
|
149
|
+
})
|
|
150
|
+
```
|
|
151
|
+
|
|
152
|
+
## API
|
|
153
|
+
|
|
154
|
+
### `createWorkflow(name)`
|
|
155
|
+
|
|
156
|
+
Create a workflow builder.
|
|
157
|
+
|
|
158
|
+
```typescript
|
|
159
|
+
const flow = createWorkflow('my-workflow')
|
|
160
|
+
.input<{ value: number }>() // Define input type
|
|
161
|
+
.step('step1', handler) // Add step
|
|
162
|
+
.step('step2', handler, opts) // With options
|
|
163
|
+
.commit('save', handler) // Commit step (always runs)
|
|
164
|
+
```
|
|
165
|
+
|
|
166
|
+
### `FluxEngine`
|
|
167
|
+
|
|
168
|
+
Execute workflows.
|
|
169
|
+
|
|
170
|
+
```typescript
|
|
171
|
+
const engine = new FluxEngine({
|
|
172
|
+
storage: new MemoryStorage(), // Default
|
|
173
|
+
defaultRetries: 3, // Default retry count
|
|
174
|
+
defaultTimeout: 30000, // Default 30s timeout
|
|
175
|
+
logger: new FluxConsoleLogger(),
|
|
176
|
+
on: {
|
|
177
|
+
stepStart: (step, ctx) => {},
|
|
178
|
+
stepComplete: (step, ctx, result) => {},
|
|
179
|
+
stepError: (step, ctx, error) => {},
|
|
180
|
+
workflowComplete: (ctx) => {},
|
|
181
|
+
workflowError: (ctx, error) => {},
|
|
182
|
+
}
|
|
183
|
+
})
|
|
184
|
+
|
|
185
|
+
const result = await engine.execute(workflow, input)
|
|
186
|
+
```
|
|
187
|
+
|
|
188
|
+
### Step Options
|
|
189
|
+
|
|
190
|
+
```typescript
|
|
191
|
+
.step('name', handler, {
|
|
192
|
+
retries: 5, // Override retry count
|
|
193
|
+
timeout: 60000, // Override timeout (ms)
|
|
194
|
+
when: (ctx) => ctx.data.x > 0, // Conditional execution
|
|
195
|
+
})
|
|
196
|
+
```
|
|
197
|
+
|
|
198
|
+
### Commit Steps
|
|
199
|
+
|
|
200
|
+
Commit steps are marked to always execute, even on workflow replay:
|
|
201
|
+
|
|
202
|
+
```typescript
|
|
203
|
+
.commit('save-to-db', async (ctx) => {
|
|
204
|
+
await db.insert(ctx.data) // Side effect
|
|
205
|
+
})
|
|
206
|
+
```
|
|
207
|
+
|
|
208
|
+
## Storage Adapters
|
|
209
|
+
|
|
210
|
+
### MemoryStorage (Default)
|
|
211
|
+
|
|
212
|
+
In-memory, for development:
|
|
213
|
+
|
|
214
|
+
```typescript
|
|
215
|
+
import { MemoryStorage } from '@gravito/flux'
|
|
216
|
+
const engine = new FluxEngine({ storage: new MemoryStorage() })
|
|
217
|
+
```
|
|
218
|
+
|
|
219
|
+
### BunSQLiteStorage (Bun only)
|
|
220
|
+
|
|
221
|
+
High-performance SQLite:
|
|
222
|
+
|
|
223
|
+
```typescript
|
|
224
|
+
import { FluxEngine } from '@gravito/flux'
|
|
225
|
+
import { BunSQLiteStorage } from '@gravito/flux/bun'
|
|
226
|
+
|
|
227
|
+
const engine = new FluxEngine({
|
|
228
|
+
storage: new BunSQLiteStorage({ path: './data/workflows.db' })
|
|
229
|
+
})
|
|
230
|
+
```
|
|
231
|
+
|
|
232
|
+
### Custom Storage
|
|
233
|
+
|
|
234
|
+
Implement `WorkflowStorage` interface:
|
|
235
|
+
|
|
236
|
+
```typescript
|
|
237
|
+
interface WorkflowStorage {
|
|
238
|
+
save(state: WorkflowState): Promise<void>
|
|
239
|
+
load(id: string): Promise<WorkflowState | null>
|
|
240
|
+
list(filter?: WorkflowFilter): Promise<WorkflowState[]>
|
|
241
|
+
delete(id: string): Promise<void>
|
|
242
|
+
init?(): Promise<void>
|
|
243
|
+
close?(): Promise<void>
|
|
244
|
+
}
|
|
245
|
+
```
|
|
246
|
+
|
|
247
|
+
## Gravito Integration
|
|
248
|
+
|
|
249
|
+
```typescript
|
|
250
|
+
import { OrbitFlux } from '@gravito/flux'
|
|
251
|
+
|
|
252
|
+
const core = await PlanetCore.boot({
|
|
253
|
+
orbits: [
|
|
254
|
+
new OrbitFlux({
|
|
255
|
+
storage: 'sqlite',
|
|
256
|
+
dbPath: './data/workflows.db',
|
|
257
|
+
})
|
|
258
|
+
]
|
|
259
|
+
})
|
|
260
|
+
|
|
261
|
+
// Access via services
|
|
262
|
+
const flux = core.services.get<FluxEngine>('flux')
|
|
263
|
+
await flux.execute(myWorkflow, input)
|
|
264
|
+
```
|
|
265
|
+
|
|
266
|
+
## Platform Support
|
|
267
|
+
|
|
268
|
+
| Feature | Bun | Node.js |
|
|
269
|
+
|---------|-----|---------|
|
|
270
|
+
| FluxEngine | ✅ | ✅ |
|
|
271
|
+
| MemoryStorage | ✅ | ✅ |
|
|
272
|
+
| BunSQLiteStorage | ✅ | ❌ |
|
|
273
|
+
| OrbitFlux | ✅ | ✅ |
|
|
274
|
+
|
|
275
|
+
## Run Examples
|
|
276
|
+
|
|
277
|
+
```bash
|
|
278
|
+
cd packages/flux
|
|
279
|
+
|
|
280
|
+
# Order fulfillment
|
|
281
|
+
bun run examples/order-fulfillment.ts
|
|
282
|
+
|
|
283
|
+
# Image processing
|
|
284
|
+
bun run examples/image-processing.ts
|
|
285
|
+
|
|
286
|
+
# User signup
|
|
287
|
+
bun run examples/user-signup.ts
|
|
288
|
+
|
|
289
|
+
# Report generation
|
|
290
|
+
bun run examples/report-generation.ts
|
|
291
|
+
```
|
|
292
|
+
|
|
293
|
+
## License
|
|
294
|
+
|
|
295
|
+
MIT
|
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Workflow Builder - Fluent API for defining workflows
|
|
3
|
+
*
|
|
4
|
+
* Type-safe, chainable workflow definition.
|
|
5
|
+
*
|
|
6
|
+
* @module @gravito/flux/builder
|
|
7
|
+
*/
|
|
8
|
+
import type { WorkflowContext, WorkflowDefinition } from '../types';
|
|
9
|
+
/**
|
|
10
|
+
* Step options
|
|
11
|
+
*/
|
|
12
|
+
interface StepOptions {
|
|
13
|
+
retries?: number;
|
|
14
|
+
timeout?: number;
|
|
15
|
+
when?: (ctx: WorkflowContext) => boolean;
|
|
16
|
+
}
|
|
17
|
+
/**
|
|
18
|
+
* Workflow Builder
|
|
19
|
+
*
|
|
20
|
+
* Provides fluent API for defining workflows with type inference.
|
|
21
|
+
*
|
|
22
|
+
* @example
|
|
23
|
+
* ```typescript
|
|
24
|
+
* const workflow = createWorkflow('order-process')
|
|
25
|
+
* .input<{ orderId: string }>()
|
|
26
|
+
* .step('validate', async (ctx) => {
|
|
27
|
+
* ctx.data.order = await fetchOrder(ctx.input.orderId)
|
|
28
|
+
* })
|
|
29
|
+
* .step('process', async (ctx) => {
|
|
30
|
+
* await processOrder(ctx.data.order)
|
|
31
|
+
* })
|
|
32
|
+
* .commit('notify', async (ctx) => {
|
|
33
|
+
* await sendEmail(ctx.data.order.email)
|
|
34
|
+
* })
|
|
35
|
+
* ```
|
|
36
|
+
*/
|
|
37
|
+
export declare class WorkflowBuilder<TInput = unknown> {
|
|
38
|
+
private _name;
|
|
39
|
+
private _steps;
|
|
40
|
+
private _validateInput?;
|
|
41
|
+
constructor(name: string);
|
|
42
|
+
/**
|
|
43
|
+
* Define input type
|
|
44
|
+
*
|
|
45
|
+
* This method is used for TypeScript type inference.
|
|
46
|
+
*/
|
|
47
|
+
input<T>(): WorkflowBuilder<T>;
|
|
48
|
+
/**
|
|
49
|
+
* Add input validator
|
|
50
|
+
*/
|
|
51
|
+
validate(validator: (input: unknown) => input is TInput): this;
|
|
52
|
+
/**
|
|
53
|
+
* Add a step to the workflow
|
|
54
|
+
*/
|
|
55
|
+
step(name: string, handler: (ctx: WorkflowContext<TInput>) => Promise<void> | void, options?: StepOptions): this;
|
|
56
|
+
/**
|
|
57
|
+
* Add a commit step (always executes, even on replay)
|
|
58
|
+
*
|
|
59
|
+
* Commit steps are for side effects that should not be skipped,
|
|
60
|
+
* such as database writes or external API calls.
|
|
61
|
+
*/
|
|
62
|
+
commit(name: string, handler: (ctx: WorkflowContext<TInput>) => Promise<void> | void, options?: StepOptions): this;
|
|
63
|
+
/**
|
|
64
|
+
* Build the workflow definition
|
|
65
|
+
*/
|
|
66
|
+
build(): WorkflowDefinition<TInput>;
|
|
67
|
+
/**
|
|
68
|
+
* Get workflow name
|
|
69
|
+
*/
|
|
70
|
+
get name(): string;
|
|
71
|
+
/**
|
|
72
|
+
* Get step count
|
|
73
|
+
*/
|
|
74
|
+
get stepCount(): number;
|
|
75
|
+
}
|
|
76
|
+
/**
|
|
77
|
+
* Create a new workflow builder
|
|
78
|
+
*
|
|
79
|
+
* @param name - Unique workflow name
|
|
80
|
+
* @returns WorkflowBuilder instance
|
|
81
|
+
*
|
|
82
|
+
* @example
|
|
83
|
+
* ```typescript
|
|
84
|
+
* const uploadFlow = createWorkflow('image-upload')
|
|
85
|
+
* .input<{ file: Buffer }>()
|
|
86
|
+
* .step('resize', async (ctx) => {
|
|
87
|
+
* ctx.data.resized = await sharp(ctx.input.file).resize(200).toBuffer()
|
|
88
|
+
* })
|
|
89
|
+
* .commit('save', async (ctx) => {
|
|
90
|
+
* await storage.put(ctx.data.resized)
|
|
91
|
+
* })
|
|
92
|
+
* ```
|
|
93
|
+
*/
|
|
94
|
+
export declare function createWorkflow(name: string): WorkflowBuilder;
|
|
95
|
+
export {};
|
|
96
|
+
//# sourceMappingURL=WorkflowBuilder.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"WorkflowBuilder.d.ts","sourceRoot":"","sources":["../../src/builder/WorkflowBuilder.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,KAAK,EAAkB,eAAe,EAAE,kBAAkB,EAAE,MAAM,UAAU,CAAA;AAEnF;;GAEG;AACH,UAAU,WAAW;IACnB,OAAO,CAAC,EAAE,MAAM,CAAA;IAChB,OAAO,CAAC,EAAE,MAAM,CAAA;IAChB,IAAI,CAAC,EAAE,CAAC,GAAG,EAAE,eAAe,KAAK,OAAO,CAAA;CACzC;AAED;;;;;;;;;;;;;;;;;;;GAmBG;AACH,qBAAa,eAAe,CAAC,MAAM,GAAG,OAAO;IAC3C,OAAO,CAAC,KAAK,CAAQ;IACrB,OAAO,CAAC,MAAM,CAAuB;IACrC,OAAO,CAAC,cAAc,CAAC,CAAqC;gBAEhD,IAAI,EAAE,MAAM;IAIxB;;;;OAIG;IACH,KAAK,CAAC,CAAC,KAAK,eAAe,CAAC,CAAC,CAAC;IAI9B;;OAEG;IACH,QAAQ,CAAC,SAAS,EAAE,CAAC,KAAK,EAAE,OAAO,KAAK,KAAK,IAAI,MAAM,GAAG,IAAI;IAK9D;;OAEG;IACH,IAAI,CACF,IAAI,EAAE,MAAM,EACZ,OAAO,EAAE,CAAC,GAAG,EAAE,eAAe,CAAC,MAAM,CAAC,KAAK,OAAO,CAAC,IAAI,CAAC,GAAG,IAAI,EAC/D,OAAO,CAAC,EAAE,WAAW,GACpB,IAAI;IAYP;;;;;OAKG;IACH,MAAM,CACJ,IAAI,EAAE,MAAM,EACZ,OAAO,EAAE,CAAC,GAAG,EAAE,eAAe,CAAC,MAAM,CAAC,KAAK,OAAO,CAAC,IAAI,CAAC,GAAG,IAAI,EAC/D,OAAO,CAAC,EAAE,WAAW,GACpB,IAAI;IAYP;;OAEG;IACH,KAAK,IAAI,kBAAkB,CAAC,MAAM,CAAC;IAYnC;;OAEG;IACH,IAAI,IAAI,IAAI,MAAM,CAEjB;IAED;;OAEG;IACH,IAAI,SAAS,IAAI,MAAM,CAEtB;CACF;AAED;;;;;;;;;;;;;;;;;GAiBG;AACH,wBAAgB,cAAc,CAAC,IAAI,EAAE,MAAM,GAAG,eAAe,CAE5D"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/builder/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,cAAc,EAAE,eAAe,EAAE,MAAM,mBAAmB,CAAA"}
|
package/dist/bun.d.ts
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Bun-specific exports
|
|
3
|
+
*
|
|
4
|
+
* This entry exports Bun-optimized components.
|
|
5
|
+
*
|
|
6
|
+
* @module @gravito/flux/bun
|
|
7
|
+
*/
|
|
8
|
+
export { BunSQLiteStorage, type BunSQLiteStorageOptions } from './storage/BunSQLiteStorage';
|
|
9
|
+
//# sourceMappingURL=bun.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"bun.d.ts","sourceRoot":"","sources":["../src/bun.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,EAAE,gBAAgB,EAAE,KAAK,uBAAuB,EAAE,MAAM,4BAA4B,CAAA"}
|
package/dist/bun.js
ADDED
|
@@ -0,0 +1,145 @@
|
|
|
1
|
+
// @bun
|
|
2
|
+
// src/storage/BunSQLiteStorage.ts
|
|
3
|
+
import { Database } from "bun:sqlite";
|
|
4
|
+
|
|
5
|
+
class BunSQLiteStorage {
|
|
6
|
+
db;
|
|
7
|
+
tableName;
|
|
8
|
+
initialized = false;
|
|
9
|
+
constructor(options = {}) {
|
|
10
|
+
this.db = new Database(options.path ?? ":memory:");
|
|
11
|
+
this.tableName = options.tableName ?? "flux_workflows";
|
|
12
|
+
}
|
|
13
|
+
async init() {
|
|
14
|
+
if (this.initialized) {
|
|
15
|
+
return;
|
|
16
|
+
}
|
|
17
|
+
this.db.run(`
|
|
18
|
+
CREATE TABLE IF NOT EXISTS ${this.tableName} (
|
|
19
|
+
id TEXT PRIMARY KEY,
|
|
20
|
+
name TEXT NOT NULL,
|
|
21
|
+
status TEXT NOT NULL,
|
|
22
|
+
input TEXT NOT NULL,
|
|
23
|
+
data TEXT NOT NULL,
|
|
24
|
+
current_step INTEGER NOT NULL,
|
|
25
|
+
history TEXT NOT NULL,
|
|
26
|
+
error TEXT,
|
|
27
|
+
created_at TEXT NOT NULL,
|
|
28
|
+
updated_at TEXT NOT NULL,
|
|
29
|
+
completed_at TEXT
|
|
30
|
+
)
|
|
31
|
+
`);
|
|
32
|
+
this.db.run(`
|
|
33
|
+
CREATE INDEX IF NOT EXISTS idx_${this.tableName}_name
|
|
34
|
+
ON ${this.tableName}(name)
|
|
35
|
+
`);
|
|
36
|
+
this.db.run(`
|
|
37
|
+
CREATE INDEX IF NOT EXISTS idx_${this.tableName}_status
|
|
38
|
+
ON ${this.tableName}(status)
|
|
39
|
+
`);
|
|
40
|
+
this.db.run(`
|
|
41
|
+
CREATE INDEX IF NOT EXISTS idx_${this.tableName}_created
|
|
42
|
+
ON ${this.tableName}(created_at DESC)
|
|
43
|
+
`);
|
|
44
|
+
this.initialized = true;
|
|
45
|
+
}
|
|
46
|
+
async save(state) {
|
|
47
|
+
await this.init();
|
|
48
|
+
const stmt = this.db.prepare(`
|
|
49
|
+
INSERT OR REPLACE INTO ${this.tableName}
|
|
50
|
+
(id, name, status, input, data, current_step, history, error, created_at, updated_at, completed_at)
|
|
51
|
+
VALUES ($id, $name, $status, $input, $data, $currentStep, $history, $error, $createdAt, $updatedAt, $completedAt)
|
|
52
|
+
`);
|
|
53
|
+
stmt.run({
|
|
54
|
+
$id: state.id,
|
|
55
|
+
$name: state.name,
|
|
56
|
+
$status: state.status,
|
|
57
|
+
$input: JSON.stringify(state.input),
|
|
58
|
+
$data: JSON.stringify(state.data),
|
|
59
|
+
$currentStep: state.currentStep,
|
|
60
|
+
$history: JSON.stringify(state.history),
|
|
61
|
+
$error: state.error ?? null,
|
|
62
|
+
$createdAt: state.createdAt.toISOString(),
|
|
63
|
+
$updatedAt: state.updatedAt.toISOString(),
|
|
64
|
+
$completedAt: state.completedAt?.toISOString() ?? null
|
|
65
|
+
});
|
|
66
|
+
}
|
|
67
|
+
async load(id) {
|
|
68
|
+
await this.init();
|
|
69
|
+
const stmt = this.db.prepare(`
|
|
70
|
+
SELECT * FROM ${this.tableName} WHERE id = $id
|
|
71
|
+
`);
|
|
72
|
+
const row = stmt.get({ $id: id });
|
|
73
|
+
if (!row) {
|
|
74
|
+
return null;
|
|
75
|
+
}
|
|
76
|
+
return this.rowToState(row);
|
|
77
|
+
}
|
|
78
|
+
async list(filter) {
|
|
79
|
+
await this.init();
|
|
80
|
+
let query = `SELECT * FROM ${this.tableName} WHERE 1=1`;
|
|
81
|
+
const params = {};
|
|
82
|
+
if (filter?.name) {
|
|
83
|
+
query += " AND name = $name";
|
|
84
|
+
params.$name = filter.name;
|
|
85
|
+
}
|
|
86
|
+
if (filter?.status) {
|
|
87
|
+
if (Array.isArray(filter.status)) {
|
|
88
|
+
const placeholders = filter.status.map((_, i) => `$status${i}`).join(", ");
|
|
89
|
+
query += ` AND status IN (${placeholders})`;
|
|
90
|
+
filter.status.forEach((s, i) => {
|
|
91
|
+
params[`$status${i}`] = s;
|
|
92
|
+
});
|
|
93
|
+
} else {
|
|
94
|
+
query += " AND status = $status";
|
|
95
|
+
params.$status = filter.status;
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
query += " ORDER BY created_at DESC";
|
|
99
|
+
if (filter?.limit) {
|
|
100
|
+
query += " LIMIT $limit";
|
|
101
|
+
params.$limit = filter.limit;
|
|
102
|
+
}
|
|
103
|
+
if (filter?.offset) {
|
|
104
|
+
query += " OFFSET $offset";
|
|
105
|
+
params.$offset = filter.offset;
|
|
106
|
+
}
|
|
107
|
+
const stmt = this.db.prepare(query);
|
|
108
|
+
const rows = stmt.all(params);
|
|
109
|
+
return rows.map((row) => this.rowToState(row));
|
|
110
|
+
}
|
|
111
|
+
async delete(id) {
|
|
112
|
+
await this.init();
|
|
113
|
+
const stmt = this.db.prepare(`
|
|
114
|
+
DELETE FROM ${this.tableName} WHERE id = $id
|
|
115
|
+
`);
|
|
116
|
+
stmt.run({ $id: id });
|
|
117
|
+
}
|
|
118
|
+
async close() {
|
|
119
|
+
this.db.close();
|
|
120
|
+
this.initialized = false;
|
|
121
|
+
}
|
|
122
|
+
rowToState(row) {
|
|
123
|
+
return {
|
|
124
|
+
id: row.id,
|
|
125
|
+
name: row.name,
|
|
126
|
+
status: row.status,
|
|
127
|
+
input: JSON.parse(row.input),
|
|
128
|
+
data: JSON.parse(row.data),
|
|
129
|
+
currentStep: row.current_step,
|
|
130
|
+
history: JSON.parse(row.history),
|
|
131
|
+
error: row.error ?? undefined,
|
|
132
|
+
createdAt: new Date(row.created_at),
|
|
133
|
+
updatedAt: new Date(row.updated_at),
|
|
134
|
+
completedAt: row.completed_at ? new Date(row.completed_at) : undefined
|
|
135
|
+
};
|
|
136
|
+
}
|
|
137
|
+
getDatabase() {
|
|
138
|
+
return this.db;
|
|
139
|
+
}
|
|
140
|
+
vacuum() {
|
|
141
|
+
this.db.run("VACUUM");
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
export { BunSQLiteStorage };
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Context Manager for workflow execution
|
|
3
|
+
*
|
|
4
|
+
* Manages workflow context lifecycle and state snapshots.
|
|
5
|
+
*
|
|
6
|
+
* @module @gravito/flux/core
|
|
7
|
+
*/
|
|
8
|
+
import type { WorkflowContext, WorkflowState, WorkflowStatus } from '../types';
|
|
9
|
+
/**
|
|
10
|
+
* Context Manager
|
|
11
|
+
*
|
|
12
|
+
* Creates and manages workflow execution contexts.
|
|
13
|
+
*/
|
|
14
|
+
export declare class ContextManager {
|
|
15
|
+
/**
|
|
16
|
+
* Create a new workflow context
|
|
17
|
+
*/
|
|
18
|
+
create<TInput>(name: string, input: TInput, stepCount: number): WorkflowContext<TInput>;
|
|
19
|
+
/**
|
|
20
|
+
* Restore context from saved state
|
|
21
|
+
*/
|
|
22
|
+
restore<TInput>(state: WorkflowState): WorkflowContext<TInput>;
|
|
23
|
+
/**
|
|
24
|
+
* Convert context to serializable state
|
|
25
|
+
*/
|
|
26
|
+
toState(ctx: WorkflowContext): WorkflowState;
|
|
27
|
+
/**
|
|
28
|
+
* Update context status (returns new context for immutability)
|
|
29
|
+
*/
|
|
30
|
+
updateStatus(ctx: WorkflowContext, status: WorkflowStatus): WorkflowContext;
|
|
31
|
+
/**
|
|
32
|
+
* Advance to next step
|
|
33
|
+
*/
|
|
34
|
+
advanceStep(ctx: WorkflowContext): WorkflowContext;
|
|
35
|
+
/**
|
|
36
|
+
* Update step name in history
|
|
37
|
+
*/
|
|
38
|
+
setStepName(ctx: WorkflowContext, index: number, name: string): void;
|
|
39
|
+
}
|
|
40
|
+
//# sourceMappingURL=ContextManager.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"ContextManager.d.ts","sourceRoot":"","sources":["../../src/core/ContextManager.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,KAAK,EAAiB,eAAe,EAAE,aAAa,EAAE,cAAc,EAAE,MAAM,UAAU,CAAA;AAS7F;;;;GAIG;AACH,qBAAa,cAAc;IACzB;;OAEG;IACH,MAAM,CAAC,MAAM,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,GAAG,eAAe,CAAC,MAAM,CAAC;IAkBvF;;OAEG;IACH,OAAO,CAAC,MAAM,EAAE,KAAK,EAAE,aAAa,GAAG,eAAe,CAAC,MAAM,CAAC;IAY9D;;OAEG;IACH,OAAO,CAAC,GAAG,EAAE,eAAe,GAAG,aAAa;IAc5C;;OAEG;IACH,YAAY,CAAC,GAAG,EAAE,eAAe,EAAE,MAAM,EAAE,cAAc,GAAG,eAAe;IAO3E;;OAEG;IACH,WAAW,CAAC,GAAG,EAAE,eAAe,GAAG,eAAe;IAOlD;;OAEG;IACH,WAAW,CAAC,GAAG,EAAE,eAAe,EAAE,KAAK,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,GAAG,IAAI;CAKrE"}
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview State Machine for workflow status transitions
|
|
3
|
+
*
|
|
4
|
+
* Pure state machine with no runtime dependencies.
|
|
5
|
+
*
|
|
6
|
+
* @module @gravito/flux/core
|
|
7
|
+
*/
|
|
8
|
+
import type { WorkflowStatus } from '../types';
|
|
9
|
+
/**
|
|
10
|
+
* State Machine for workflow status management
|
|
11
|
+
*
|
|
12
|
+
* Provides validated state transitions using EventTarget for events.
|
|
13
|
+
*/
|
|
14
|
+
export declare class StateMachine extends EventTarget {
|
|
15
|
+
private _status;
|
|
16
|
+
/**
|
|
17
|
+
* Current status
|
|
18
|
+
*/
|
|
19
|
+
get status(): WorkflowStatus;
|
|
20
|
+
/**
|
|
21
|
+
* Check if transition to target status is allowed
|
|
22
|
+
*/
|
|
23
|
+
canTransition(to: WorkflowStatus): boolean;
|
|
24
|
+
/**
|
|
25
|
+
* Transition to a new status
|
|
26
|
+
*
|
|
27
|
+
* @throws {Error} If transition is not allowed
|
|
28
|
+
*/
|
|
29
|
+
transition(to: WorkflowStatus): void;
|
|
30
|
+
/**
|
|
31
|
+
* Force set status (for replay/restore)
|
|
32
|
+
*/
|
|
33
|
+
forceStatus(status: WorkflowStatus): void;
|
|
34
|
+
/**
|
|
35
|
+
* Check if workflow is in terminal state
|
|
36
|
+
*/
|
|
37
|
+
isTerminal(): boolean;
|
|
38
|
+
/**
|
|
39
|
+
* Check if workflow can be executed
|
|
40
|
+
*/
|
|
41
|
+
canExecute(): boolean;
|
|
42
|
+
}
|
|
43
|
+
//# sourceMappingURL=StateMachine.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"StateMachine.d.ts","sourceRoot":"","sources":["../../src/core/StateMachine.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,UAAU,CAAA;AAa9C;;;;GAIG;AACH,qBAAa,YAAa,SAAQ,WAAW;IAC3C,OAAO,CAAC,OAAO,CAA4B;IAE3C;;OAEG;IACH,IAAI,MAAM,IAAI,cAAc,CAE3B;IAED;;OAEG;IACH,aAAa,CAAC,EAAE,EAAE,cAAc,GAAG,OAAO;IAI1C;;;;OAIG;IACH,UAAU,CAAC,EAAE,EAAE,cAAc,GAAG,IAAI;IAgBpC;;OAEG;IACH,WAAW,CAAC,MAAM,EAAE,cAAc,GAAG,IAAI;IAIzC;;OAEG;IACH,UAAU,IAAI,OAAO;IAIrB;;OAEG;IACH,UAAU,IAAI,OAAO;CAGtB"}
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Step Executor for workflow steps
|
|
3
|
+
*
|
|
4
|
+
* Handles step execution with retry and timeout support.
|
|
5
|
+
*
|
|
6
|
+
* @module @gravito/flux/core
|
|
7
|
+
*/
|
|
8
|
+
import type { StepDefinition, StepExecution, StepResult, WorkflowContext } from '../types';
|
|
9
|
+
/**
|
|
10
|
+
* Step Executor
|
|
11
|
+
*
|
|
12
|
+
* Executes individual workflow steps with retry and timeout support.
|
|
13
|
+
*/
|
|
14
|
+
export declare class StepExecutor {
|
|
15
|
+
private defaultRetries;
|
|
16
|
+
private defaultTimeout;
|
|
17
|
+
constructor(options?: {
|
|
18
|
+
defaultRetries?: number;
|
|
19
|
+
defaultTimeout?: number;
|
|
20
|
+
});
|
|
21
|
+
/**
|
|
22
|
+
* Execute a step with retry and timeout
|
|
23
|
+
*/
|
|
24
|
+
execute(step: StepDefinition, ctx: WorkflowContext, execution: StepExecution): Promise<StepResult>;
|
|
25
|
+
/**
|
|
26
|
+
* Execute handler with timeout
|
|
27
|
+
*/
|
|
28
|
+
private executeWithTimeout;
|
|
29
|
+
/**
|
|
30
|
+
* Sleep helper
|
|
31
|
+
*/
|
|
32
|
+
private sleep;
|
|
33
|
+
}
|
|
34
|
+
//# sourceMappingURL=StepExecutor.d.ts.map
|