@bobtail.software/b-durable 1.0.6 → 1.0.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -207
- package/dist/compiler/cli.mjs +42 -37
- package/dist/index.d.mts +96 -66
- package/dist/index.mjs +91 -1
- package/package.json +2 -1
package/README.md
CHANGED
|
@@ -1,207 +1 @@
|
|
|
1
|
-
#
|
|
2
|
-
|
|
3
|
-

|
|
4
|
-

|
|
5
|
-
|
|
6
|
-
`b-durable` is a production-ready system that transforms standard `async` functions into **composable, interactive, durable, and resilient workflows**. It lets you write long-running business logic—spanning hours, days, or months—as simple, linear `async/await` code. The system handles state persistence, orchestration, crash recovery, strict versioning, and observability.
|
|
7
|
-
|
|
8
|
-
## The Problem
|
|
9
|
-
|
|
10
|
-
Standard `async/await` is great for short-lived operations, but it breaks down for complex, long-running processes:
|
|
11
|
-
|
|
12
|
-
1. **Fragility**: If your server restarts mid-execution, all in-memory state is lost.
|
|
13
|
-
2. **Inefficiency**: An operation like `await bSleep('7 days')` is impossible standard Node.js.
|
|
14
|
-
3. **Operational Blindness**: It's hard to know the state of a multi-step process running across distributed services.
|
|
15
|
-
4. **Deployment Risks**: Deploying new code while old processes are running can corrupt memory/state.
|
|
16
|
-
|
|
17
|
-
## The `b-durable` Solution
|
|
18
|
-
|
|
19
|
-
`b-durable` allows you to express this complexity as a single, readable `async` function. The system automatically persists the workflow's state after each `await` step in Redis.
|
|
20
|
-
|
|
21
|
-
### Key Capabilities
|
|
22
|
-
|
|
23
|
-
- **🛡️ Strict Versioning**: Prevents state corruption by ensuring running workflows only execute code compatible with their version.
|
|
24
|
-
- **💀 The Reaper (Reliability)**: Automatically detects crashed workers and recovers "lost" tasks, ensuring zero data loss.
|
|
25
|
-
- **👁️ Observability First**: Injectable Loggers, `getState` inspection API, and detailed tracking.
|
|
26
|
-
- **♻️ Dead Letter Queue (DLQ)**: Automatic retries for failed tasks; moves persistently failing tasks to a DLQ for manual inspection.
|
|
27
|
-
- **🛑 Cancellation**: Gracefully cancel running workflows with support for cleanup logic (`try/catch/finally`).
|
|
28
|
-
- **🧹 Auto-Retention**: Automatically expire completed/failed workflows from Redis to manage storage costs.
|
|
29
|
-
|
|
30
|
-
## Example: E-Commerce Order
|
|
31
|
-
|
|
32
|
-
```typescript
|
|
33
|
-
import { bDurable } from '@bobtail.software/b-durable';
|
|
34
|
-
|
|
35
|
-
// Define contracts
|
|
36
|
-
interface OrderEvents { 'order.approved': { approverId: string }; }
|
|
37
|
-
interface OrderSignals { 'status.update': { status: string }; }
|
|
38
|
-
|
|
39
|
-
export const orderProcessingWorkflow = bDurable<
|
|
40
|
-
{ orderId: string; amount: number },
|
|
41
|
-
{ status: 'completed' | 'failed' },
|
|
42
|
-
OrderEvents,
|
|
43
|
-
OrderSignals
|
|
44
|
-
>({
|
|
45
|
-
// VERSIONING IS MANDATORY
|
|
46
|
-
version: '1.0',
|
|
47
|
-
workflow: async (input, context) => {
|
|
48
|
-
try {
|
|
49
|
-
// 1. Execute sub-workflow
|
|
50
|
-
const payment = await context.bExecute(paymentWorkflow, { amount: input.amount });
|
|
51
|
-
|
|
52
|
-
// 2. Emit non-blocking signal
|
|
53
|
-
await context.bSignal('status.update', { status: 'paid' });
|
|
54
|
-
|
|
55
|
-
// 3. Wait for external event (human approval)
|
|
56
|
-
const approval = await context.bWaitForEvent('order.approved');
|
|
57
|
-
context.log(`Order approved by ${approval.approverId}`);
|
|
58
|
-
|
|
59
|
-
// 4. Schedule shipping
|
|
60
|
-
await shipOrder(input.orderId);
|
|
61
|
-
|
|
62
|
-
return { status: 'completed' };
|
|
63
|
-
} catch (error) {
|
|
64
|
-
// 5. Handle errors (and cancellations!) durably
|
|
65
|
-
if (error.isCancellation) {
|
|
66
|
-
await releaseInventory(input.orderId); // Cleanup
|
|
67
|
-
throw error;
|
|
68
|
-
}
|
|
69
|
-
await notifyFailure(input.orderId);
|
|
70
|
-
return { status: 'failed' };
|
|
71
|
-
}
|
|
72
|
-
},
|
|
73
|
-
});
|
|
74
|
-
```
|
|
75
|
-
|
|
76
|
-
## Getting Started
|
|
77
|
-
|
|
78
|
-
### 1. Installation
|
|
79
|
-
|
|
80
|
-
```bash
|
|
81
|
-
pnpm add @bobtail.software/b-durable ioredis ms
|
|
82
|
-
pnpm add -D @bobtail.software/eslint-plugin-b-durable
|
|
83
|
-
```
|
|
84
|
-
|
|
85
|
-
### 2. Define a Workflow
|
|
86
|
-
|
|
87
|
-
Create a `.workflow.ts` file. Note the mandatory `version` field.
|
|
88
|
-
|
|
89
|
-
```typescript
|
|
90
|
-
// src/workflows/user.workflow.ts
|
|
91
|
-
import { bDurable } from '@bobtail.software/b-durable';
|
|
92
|
-
import { sendEmail } from '../services';
|
|
93
|
-
|
|
94
|
-
export const userOnboarding = bDurable({
|
|
95
|
-
name: 'userOnboarding',
|
|
96
|
-
version: '1.0', // Required for safety
|
|
97
|
-
workflow: async (input: { email: string }, context) => {
|
|
98
|
-
await context.bSleep('1 day');
|
|
99
|
-
await sendEmail(input.email, 'Welcome!');
|
|
100
|
-
return 'sent';
|
|
101
|
-
},
|
|
102
|
-
});
|
|
103
|
-
```
|
|
104
|
-
|
|
105
|
-
### 3. Compile
|
|
106
|
-
|
|
107
|
-
Add to `package.json`:
|
|
108
|
-
```json
|
|
109
|
-
"scripts": {
|
|
110
|
-
"compile-workflows": "b-durable-compiler --in src/workflows --out src/generated"
|
|
111
|
-
}
|
|
112
|
-
```
|
|
113
|
-
Run `pnpm compile-workflows`.
|
|
114
|
-
|
|
115
|
-
### 4. Initialize the Runtime
|
|
116
|
-
|
|
117
|
-
Initialize the system with Redis connections and configuration options.
|
|
118
|
-
|
|
119
|
-
```typescript
|
|
120
|
-
// src/main.ts
|
|
121
|
-
import { bDurableInitialize } from '@bobtail.software/b-durable';
|
|
122
|
-
import Redis from 'ioredis';
|
|
123
|
-
import durableFunctions from './generated';
|
|
124
|
-
import { myLogger } from './logger'; // Your Winston/Pino logger
|
|
125
|
-
|
|
126
|
-
const durableSystem = bDurableInitialize({
|
|
127
|
-
durableFunctions,
|
|
128
|
-
sourceRoot: process.cwd(),
|
|
129
|
-
redisClient: new Redis(),
|
|
130
|
-
blockingRedisClient: new Redis(), // Dedicated connection for queues
|
|
131
|
-
|
|
132
|
-
// --- Production Configuration ---
|
|
133
|
-
retention: '7 days', // Auto-delete finished workflows after 7 days
|
|
134
|
-
pollingInterval: 5000, // Scheduler/Heartbeat frequency (default: 5000ms)
|
|
135
|
-
logger: { // Inject your logger for better observability
|
|
136
|
-
info: (msg, meta) => myLogger.info(msg, meta),
|
|
137
|
-
error: (msg, meta) => myLogger.error(msg, meta),
|
|
138
|
-
warn: (msg, meta) => myLogger.warn(msg, meta),
|
|
139
|
-
debug: (msg, meta) => myLogger.debug(msg, meta),
|
|
140
|
-
}
|
|
141
|
-
});
|
|
142
|
-
|
|
143
|
-
// Start a workflow
|
|
144
|
-
const { workflowId } = await durableSystem.start(userOnboarding, {
|
|
145
|
-
input: { email: 'test@example.com' }
|
|
146
|
-
});
|
|
147
|
-
|
|
148
|
-
// Inspect state in real-time
|
|
149
|
-
const state = await durableSystem.getState(workflowId);
|
|
150
|
-
console.log(`Current step: ${state.step}, Status: ${state.status}`);
|
|
151
|
-
```
|
|
152
|
-
|
|
153
|
-
## Advanced Features
|
|
154
|
-
|
|
155
|
-
### Strict Versioning & Deployment
|
|
156
|
-
|
|
157
|
-
When you modify a workflow, you **must** increment the `version` string (e.g., `'1.0'` -> `'1.1'`).
|
|
158
|
-
|
|
159
|
-
* **Runtime Check**: Before executing a step, the worker checks if the database version matches the code version.
|
|
160
|
-
* **Mismatch**: If versions differ, the workflow halts with status `VERSION_MISMATCH`. This prevents "Frankenstein" workflows where step 2 of version 1 tries to run step 3 of version 2.
|
|
161
|
-
* **Strategy**: Run new workers alongside old workers (Blue/Green) or drain queues before deploying breaking changes.
|
|
162
|
-
|
|
163
|
-
### Reliability & The Reaper
|
|
164
|
-
|
|
165
|
-
* **Heartbeats**: Every worker sends a heartbeat to Redis every few seconds.
|
|
166
|
-
* **The Reaper**: If a worker crashes (OOM, power failure) while holding a task, the Reaper detects the missing heartbeat and automatically re-queues the task for another worker. No manual intervention required.
|
|
167
|
-
|
|
168
|
-
### Error Handling & Dead Letter Queue (DLQ)
|
|
169
|
-
|
|
170
|
-
* **Retries**: Tasks are automatically retried 3 times on failure with backoff.
|
|
171
|
-
* **DLQ**: After 3 failures, the task payload and error stack are moved to the Redis list `queue:dead`.
|
|
172
|
-
* **Sub-workflows**: Failures in sub-workflows bubble up to the parent as standard JavaScript exceptions, catchable with `try/catch`.
|
|
173
|
-
|
|
174
|
-
### Cancellation
|
|
175
|
-
|
|
176
|
-
You can cancel a running workflow at any time.
|
|
177
|
-
|
|
178
|
-
```typescript
|
|
179
|
-
await durableSystem.cancel(workflowId, 'User requested cancellation');
|
|
180
|
-
```
|
|
181
|
-
|
|
182
|
-
Inside the workflow, this throws a `WorkflowCancellationError`. You can catch this to perform cleanup (e.g., reverting a payment) before re-throwing or returning.
|
|
183
|
-
|
|
184
|
-
```typescript
|
|
185
|
-
try {
|
|
186
|
-
await context.bWaitForEvent('approval');
|
|
187
|
-
} catch (e) {
|
|
188
|
-
if (e.isCancellation) {
|
|
189
|
-
await refundPayment();
|
|
190
|
-
// Workflow ends here as CANCELLED
|
|
191
|
-
}
|
|
192
|
-
throw e;
|
|
193
|
-
}
|
|
194
|
-
```
|
|
195
|
-
|
|
196
|
-
## Architecture
|
|
197
|
-
|
|
198
|
-
1. **Compiler**: Analyzes `await` points and transforms code into a deterministic state machine.
|
|
199
|
-
2. **Redis**: Stores state, task queues, locks, and signals.
|
|
200
|
-
3. **Runtime**:
|
|
201
|
-
* **Dispatcher**: Routes tasks to service functions.
|
|
202
|
-
* **Scheduler**: Manages timers (`bSleep`) and the Reaper.
|
|
203
|
-
* **Signal Bus**: Uses Redis Pub/Sub for real-time communication.
|
|
204
|
-
|
|
205
|
-
## License
|
|
206
|
-
|
|
207
|
-
GPL-3.0
|
|
1
|
+
# Soon™
|
package/dist/compiler/cli.mjs
CHANGED
|
@@ -1,59 +1,64 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
|
-
import
|
|
3
|
-
Procesando archivo: ${u.getBaseName()}`);let
|
|
4
|
-
`),
|
|
5
|
-
`),
|
|
6
|
-
`),
|
|
7
|
-
-> Archivo de \xEDndice generado: ${
|
|
8
|
-
Formateando archivos generados con Prettier...`);for(let u of
|
|
9
|
-
Compilaci\xF3n completada exitosamente.`)}function
|
|
10
|
-
`),
|
|
2
|
+
import H from"path";import{existsSync as X,mkdirSync as Y,rmSync as J}from"fs";import T from"path";import*as M from"prettier";import{Node as i,Project as Q,SyntaxKind as b,ts as _,VariableDeclarationKind as z}from"ts-morph";var Z="bDurable",V=_.TypeFormatFlags.UseAliasDefinedOutsideCurrentScope|_.TypeFormatFlags.NoTruncation;async function ee(e){let n=e.getFilePath(),o=e.getFullText(),s=await M.resolveConfig(n),a=await M.format(o,{...s,parser:"typescript"});e.replaceWithText(a)}async function U(e){console.log("Iniciando compilador de workflows duraderos...");let{inputDir:n,outputDir:o,packageName:s}=e,a=new Q({tsConfigFilePath:T.resolve(process.cwd(),"tsconfig.json")}),c=a.addSourceFilesAtPaths(`${n}/**/*.ts`);X(o)&&(console.log(`Limpiando directorio de salida: ${o}`),J(o,{recursive:!0,force:!0})),Y(o,{recursive:!0});let g=a.createDirectory(o);console.log(`Encontrados ${c.length} archivos de workflow para procesar.`);let r=[],l=[];for(let u of c){console.log(`
|
|
3
|
+
Procesando archivo: ${u.getBaseName()}`);let t=u.getDescendantsOfKind(b.CallExpression).filter(m=>m.getExpression().getText()===Z);if(t.length!==0)for(let m of t){let p=m.getParentIfKind(b.VariableDeclaration);if(!p)continue;let d=p.getName();console.log(` -> Transformando workflow: ${d}`);let[f]=m.getArguments();if(!i.isObjectLiteralExpression(f))continue;let S=f.getProperty("workflow");if(!S||!i.isPropertyAssignment(S))continue;let x=S.getInitializer();if(!x||!i.isArrowFunction(x))continue;let D=u.getBaseName().replace(/\.ts$/,".compiled.mts"),I=T.join(g.getPath(),D),w=a.createSourceFile(I,"",{overwrite:!0});l.push(w),te(d,x,m,w,s),console.log(` -> Archivo generado: ${T.relative(process.cwd(),I)}`);let E=D;r.push({name:d,importPath:`./${E}`})}}if(r.length>0){let u=T.join(g.getPath(),"index.mts"),t=a.createSourceFile(u,"",{overwrite:!0});l.push(t),t.addStatements(`// Este archivo fue generado autom\xE1ticamente. NO EDITAR MANUALMENTE.
|
|
4
|
+
`),t.addImportDeclaration({isTypeOnly:!0,moduleSpecifier:s,namedImports:["DurableFunction"]});for(let p of r)t.addImportDeclaration({moduleSpecifier:p.importPath,namedImports:[p.name]});t.addExportDeclaration({namedExports:r.map(p=>p.name)}),t.addStatements(`
|
|
5
|
+
`),t.addVariableStatement({declarationKind:z.Const,declarations:[{name:"durableFunctions",type:"Map<string, DurableFunction<any, any, any, any>>",initializer:"new Map()"}]});let m=r.map(p=>`durableFunctions.set(${p.name}.name, ${p.name});`);t.addStatements(m),t.addStatements(`
|
|
6
|
+
`),t.addExportAssignment({isExportEquals:!1,expression:"durableFunctions"}),console.log(`
|
|
7
|
+
-> Archivo de \xEDndice generado: ${T.basename(u)}`)}console.log(`
|
|
8
|
+
Formateando archivos generados con Prettier...`);for(let u of l)await ee(u);await a.save(),console.log(`
|
|
9
|
+
Compilaci\xF3n completada exitosamente.`)}function te(e,n,o,s,a){let c=n.getBody();if(!i.isBlock(c))throw new Error(`El cuerpo del workflow '${e}' debe ser un bloque {}.`);let[g]=o.getArguments();if(!i.isObjectLiteralExpression(g))throw new Error("El argumento de bDurable debe ser un objeto.");let r=g.getProperty("version");if(!r||!i.isPropertyAssignment(r))throw new Error(`El workflow '${e}' debe tener una propiedad 'version'.`);let l=r.getInitializer();if(!l||!i.isStringLiteral(l))throw new Error(`La versi\xF3n del workflow '${e}' debe ser un string literal.`);let u=g.getProperty("retryOptions"),t="undefined";u&&i.isPropertyAssignment(u)&&(t=u.getInitializer()?.getText()||"undefined");let{clauses:m}=$(c.getStatements(),{step:0,persistedVariables:new Map}),p=n.getReturnType();p.getSymbol()?.getName()==="Promise"&&p.isObject()&&(p=p.getTypeArguments()[0]||p);let d=p.getText(void 0,V),f=new Set,S=n.getSourceFile(),x=o.getTypeArguments(),N=x.length>0?x[0].getText():"unknown",D=x.length>2?x[2].getText():"Record<string, never>",I=x.length>3?x[3].getText():"Record<string, never>";S.getImportDeclarations().forEach(y=>{if(y.getModuleSpecifierValue()===a)return;let h=y.getModuleSpecifierValue();if(h.includes(".workflow")){let A=T.parse(h),C=T.join(A.dir,A.base+".compiled.mts");!C.startsWith(".")&&!T.isAbsolute(C)&&(C="./"+C),h=C.replace(/\\/g,"/")}else h.startsWith(".")&&T.extname(h)===""&&(h+=".mjs");let R=[],B=[];y.getNamedImports().forEach(A=>{let C=A.getName(),j=A.getAliasNode()?.getText(),W=j?`${C} as ${j}`:C,K=(A.getNameNode().getSymbol()?.getAliasedSymbol()??A.getNameNode().getSymbol())?.getDeclarations()??[],G=K.some(O=>i.isEnumDeclaration(O));A.isTypeOnly()||!G&&K.every(O=>i.isInterfaceDeclaration(O)||i.isTypeAliasDeclaration(O))?B.push(W):R.push(W)}),R.length>0&&s.addImportDeclaration({moduleSpecifier:h,namedImports:R}),B.length>0&&s.addImportDeclaration({isTypeOnly:!0,moduleSpecifier:h,namedImports:B});let L=y.getDefaultImport();L&&s.addImportDeclaration({moduleSpecifier:h,defaultImport:L.getText()})}),S.getInterfaces().forEach(y=>{f.add(y.getText().startsWith("export")?y.getText():`export ${y.getText()}`)}),S.getTypeAliases().forEach(y=>{f.add(y.getText().startsWith("export")?y.getText():`export ${y.getText()}`)});let[w]=n.getParameters(),E="";if(w){let y=w.getNameNode().getText();y!=="input"&&(E=`const ${y} = input;`)}s.addImportDeclaration({isTypeOnly:!0,moduleSpecifier:a,namedImports:["DurableFunction","WorkflowContext","Instruction"]}),f.size>0&&(s.addStatements(`
|
|
10
|
+
`),s.addStatements(Array.from(f))),s.addStatements(`
|
|
11
11
|
// Este archivo fue generado autom\xE1ticamente. NO EDITAR MANUALMENTE.
|
|
12
|
-
`);let
|
|
12
|
+
`);let P=l.getLiteralValue(),F=`{
|
|
13
13
|
__isDurable: true,
|
|
14
14
|
name: '${e}',
|
|
15
|
-
version: '${
|
|
16
|
-
|
|
15
|
+
version: '${P}',
|
|
16
|
+
retryOptions: ${t},
|
|
17
|
+
async execute(context: WorkflowContext<${N}>): Promise<Instruction<${d}>> {
|
|
17
18
|
const { input, state, result, log, workflowId } = context;
|
|
18
|
-
${
|
|
19
|
+
${E}
|
|
19
20
|
while (true) {
|
|
20
21
|
switch (context.step) {
|
|
21
|
-
${
|
|
22
|
+
${m.join(`
|
|
22
23
|
`)}
|
|
23
24
|
default:
|
|
24
25
|
throw new Error(\`Paso desconocido: \${context.step}\`);
|
|
25
26
|
}
|
|
26
27
|
}
|
|
27
28
|
}
|
|
28
|
-
}`;
|
|
29
|
-
${
|
|
29
|
+
}`;s.addVariableStatement({isExported:!0,declarationKind:z.Const,declarations:[{name:e,type:`DurableFunction<${N}, ${d}, ${D}, ${I}>`,initializer:F}]}),s.organizeImports()}function $(e,n){if(e.length===0){let f=[];if(n.pendingStateAssignment){let S=`case ${n.step}: {
|
|
30
|
+
${n.pendingStateAssignment}
|
|
30
31
|
return { type: 'COMPLETE', result: undefined };
|
|
31
|
-
}`;
|
|
32
|
-
`),x=
|
|
33
|
-
return { type: 'COMPLETE', result: undefined };`;return{clauses:[`case ${
|
|
34
|
-
${
|
|
35
|
-
}`],nextStep:
|
|
36
|
-
`),
|
|
37
|
-
${
|
|
38
|
-
}`,
|
|
39
|
-
`),d=
|
|
40
|
-
case ${
|
|
41
|
-
${
|
|
42
|
-
if (${
|
|
43
|
-
context.step = ${
|
|
32
|
+
}`;f.push(S)}return{clauses:f,nextStep:n.step+1}}let{syncBlock:o,durableStatement:s,nextStatements:a}=le(e),{rewrittenSyncStatements:c,newlyPersistedVariables:g}=re(o,s?[s,...a]:[],n.persistedVariables);n.pendingStateAssignment&&c.unshift(n.pendingStateAssignment);let r=new Map([...n.persistedVariables,...g]);if(!s){let f=c.join(`
|
|
33
|
+
`),x=o.length>0&&i.isReturnStatement(o[o.length-1])?"":`
|
|
34
|
+
return { type: 'COMPLETE', result: undefined };`;return{clauses:[`case ${n.step}: {
|
|
35
|
+
${f}${x}
|
|
36
|
+
}`],nextStep:n.step+1}}if(i.isIfStatement(s))return ne(s,a,{...n,persistedVariables:r},c);if(i.isTryStatement(s))return se(s,a,{...n,persistedVariables:r},c);let{instruction:l,nextPendingStateAssignment:u}=ae(s,r);c.push(l);let t=c.join(`
|
|
37
|
+
`),m=`case ${n.step}: {
|
|
38
|
+
${t}
|
|
39
|
+
}`,p={step:n.step+1,persistedVariables:r,pendingStateAssignment:u},d=$(a,p);return{clauses:[m,...d.clauses],nextStep:d.nextStep}}function ne(e,n,o,s){let a=k(e.getExpression(),o.persistedVariables),c=e.getThenStatement(),g=i.isBlock(c)?c.getStatements():[c],r=$(g,{step:o.step+1,persistedVariables:new Map(o.persistedVariables)}),l,u=e.getElseStatement();if(u){let S=i.isBlock(u)?u.getStatements():[u];l=$(S,{step:r.nextStep,persistedVariables:new Map(o.persistedVariables)})}let t=l?l.nextStep:r.nextStep,m=$(n,{step:t,persistedVariables:o.persistedVariables}),p=s.join(`
|
|
40
|
+
`),d=r.nextStep;return{clauses:[`
|
|
41
|
+
case ${o.step}: {
|
|
42
|
+
${p}
|
|
43
|
+
if (${a}) {
|
|
44
|
+
context.step = ${o.step+1};
|
|
44
45
|
} else {
|
|
45
|
-
${u?`context.step = ${d};`:`context.step = ${
|
|
46
|
+
${u?`context.step = ${d};`:`context.step = ${t};`}
|
|
46
47
|
}
|
|
47
48
|
break;
|
|
48
49
|
}
|
|
49
|
-
`,...
|
|
50
|
-
case ${
|
|
51
|
-
${
|
|
50
|
+
`,...r.clauses,...l?l.clauses:[],...m.clauses],nextStep:m.nextStep}}function se(e,n,o,s){let{step:a,persistedVariables:c}=o,g=e.getTryBlock(),r=e.getCatchClause(),l=e.getFinallyBlock(),u=$(g.getStatements(),{step:a+1,persistedVariables:new Map(c)}),t,m,p=u.nextStep;if(r){let E=r.getBlock(),P=r.getVariableDeclaration();P&&(m=P.getName()),t=$(E.getStatements(),{step:p,persistedVariables:new Map(c)})}let d,f=t?t.nextStep:p;l&&(d=$(l.getStatements(),{step:f,persistedVariables:new Map(c)}));let S=d?d.nextStep:f,x=$(n,{step:S,persistedVariables:c}),N=`{ catchStep: ${r?p:"undefined"}, finallyStep: ${l?f:"undefined"} }`,D=`
|
|
51
|
+
case ${a}: {
|
|
52
|
+
${s.join(`
|
|
52
53
|
`)}
|
|
53
54
|
state.tryCatchStack = state.tryCatchStack || [];
|
|
54
|
-
state.tryCatchStack.push(${
|
|
55
|
-
context.step = ${
|
|
55
|
+
state.tryCatchStack.push(${N});
|
|
56
|
+
context.step = ${a+1}; // Salta al inicio del bloque try
|
|
56
57
|
break;
|
|
57
58
|
}
|
|
58
|
-
`,
|
|
59
|
-
const ${
|
|
59
|
+
`,I=u.clauses.pop()||"",w=l?f:S;if(u.clauses.push(I.replace(/return { type: 'COMPLETE'.* };/,`context.step = ${w}; break;`)),t){if(m){let P=t.clauses[0]||`case ${p}: {}`;t.clauses[0]=P.replace("{",`{
|
|
60
|
+
const ${m} = result as unknown;`)}let E=t.clauses.pop()||"";t.clauses.push(E.replace(/return { type: 'COMPLETE'.* };/,`context.step = ${w}; break;`))}if(d){let E=d.clauses.pop()||"";d.clauses.push(E.replace(/return { type: 'COMPLETE'.* };/,`state.tryCatchStack?.pop(); context.step = ${S}; break;`))}return{clauses:[D,...u.clauses,...t?t.clauses:[],...d?d.clauses:[],...x.clauses],nextStep:x.nextStep}}function re(e,n,o){let s=[],a=new Map,c=ie(n),g=new Map(o);for(let r of e){if(i.isVariableStatement(r))for(let u of r.getDeclarations()){let t=u.getInitializer();if(!t)continue;let m=oe(u),p=m.filter(d=>c.has(d.name));if(p.length>0){let d=k(t,o);for(let{name:f,type:S}of p){a.set(f,{type:S}),g.set(f,{type:S});let x=m.length>1?`${d}.${f}`:d;s.push(`state.${f} = ${x};`)}}}s.push(k(r,g))}return{rewrittenSyncStatements:s,newlyPersistedVariables:a}}function oe(e){let n=e.getNameNode(),o=[];if(i.isIdentifier(n)){let s=e.getType().getText(e,V);o.push({name:n.getText(),type:s})}else if(i.isObjectBindingPattern(n))for(let s of n.getElements()){let a=s.getName(),c=s.getType().getText(s,V);o.push({name:a,type:c})}return o}function ae(e,n){if(i.isReturnStatement(e))return{instruction:`return { type: 'COMPLETE', result: ${e.getExpression()?k(e.getExpressionOrThrow(),n):"undefined"} };`,nextPendingStateAssignment:void 0};if(e.getDescendantsOfKind(b.AwaitExpression).length>1)throw new Error(`[b-durable Compiler Error] Multiple 'await' expressions found in a single statement at line ${e.getStartLineNumber()}.
|
|
61
|
+
Please split them into separate lines/variables to ensure safe state persistence.`);let s,a=e.getFirstDescendantByKind(b.VariableDeclaration);if(a){let g=a.getName(),r=a.getType().getText(a,V);n.set(g,{type:r}),s=`state.${g} = result;`}let c=e.getFirstDescendantByKind(b.AwaitExpression);if(c){let g=c.getParent();if(!(i.isExpressionStatement(g)||i.isVariableDeclaration(g)||i.isCallExpression(g)&&i.isExpressionStatement(g.getParent()))&&a&&a.getInitializer()!==c)throw new Error(`[b-durable Compiler Error] Complex 'await' usage detected at line ${e.getStartLineNumber()}.
|
|
62
|
+
The 'await' keyword must be the direct value of the assignment.
|
|
63
|
+
Invalid: const x = 1 + await foo();
|
|
64
|
+
Valid: const temp = await foo(); const x = 1 + temp;`);let l=c.getExpression();if(i.isCallExpression(l))return{instruction:`return ${ce(l,n)};`,nextPendingStateAssignment:s}}return{instruction:k(e,n),nextPendingStateAssignment:s}}function ie(e){let n=new Set;for(let o of e)o.getDescendantsOfKind(b.Identifier).forEach(s=>{n.add(s.getText())});return n}function k(e,n){let o=e.getText(),s=e.getStart(),a=[],c=e.getDescendantsOfKind(b.Identifier);i.isIdentifier(e)&&c.push(e),c.forEach(r=>{let l=r.getText();if(!n.has(l))return;let u=r.getSymbol();if(u&&u.getDeclarations().some(y=>y.getStart()>=e.getStart()&&y.getEnd()<=e.getEnd()))return;let t=r.getParent(),m=i.isVariableDeclaration(t)&&t.getNameNode()===r,p=i.isPropertyAccessExpression(t)&&t.getNameNode()===r||i.isPropertyAssignment(t)&&t.getNameNode()===r,d=i.isBindingElement(t)&&t.getNameNode()===r,f=i.isShorthandPropertyAssignment(t)&&t.getNameNode()===r,S=i.isParameterDeclaration(t)&&t.getNameNode()===r,x=i.isFunctionDeclaration(t)&&t.getNameNode()===r,N=i.isClassDeclaration(t)&&t.getNameNode()===r,D=i.isInterfaceDeclaration(t)&&t.getNameNode()===r,I=i.isTypeAliasDeclaration(t)&&t.getNameNode()===r,w=i.isEnumDeclaration(t)&&t.getNameNode()===r,E=i.isMethodDeclaration(t)&&t.getNameNode()===r;if(!m&&!p&&!d&&!S&&!x&&!N&&!D&&!I&&!w&&!E){let P=n.get(l),F=r.getStart()-s,y=r.getEnd()-s,h=`(state.${l} as ${P.type})`;f&&(h=`${l}: ${h}`),a.push({start:F,end:y,text:h})}}),a.sort((r,l)=>l.start-r.start);let g=o;for(let{start:r,end:l,text:u}of a)g=g.substring(0,r)+u+g.substring(l);return g}function ce(e,n){let o=e.getExpression(),s,a=!1;i.isPropertyAccessExpression(o)?(o.getExpression().getText()==="context"&&(a=!0),s=o.getName()):s=o.getText();let c=e.getArguments().map(t=>k(t,n)).join(", ");if(a)switch(s){case"bSleep":return`{ type: 'SCHEDULE_SLEEP', duration: ${c} }`;case"bWaitForEvent":return`{ type: 'WAIT_FOR_SIGNAL', signalName: ${c} }`;case"bExecute":{let[t,m]=e.getArguments(),p=t.getText(),d=m?k(m,n):"undefined";return`{ type: 'EXECUTE_SUBWORKFLOW', workflowName: ${p}.name, input: ${d} }`}case"bSignal":{let[t,m]=e.getArguments().map(p=>k(p,n));return`{ type: 'EMIT_EVENT', eventName: ${t}, payload: ${m} }`}default:throw new Error(`Funci\xF3n de contexto durable desconocida: '${s}'.`)}let g=o.getSymbol();if(!g)throw new Error(`S\xEDmbolo no encontrado para '${s}'.`);let r=g.getDeclarations()[0]?.asKind(b.ImportSpecifier);if(!r)throw new Error(`'${s}' debe ser importada.`);let l=r.getImportDeclaration().getModuleSpecifierSourceFileOrThrow();return`{ type: 'SCHEDULE_TASK', modulePath: '${T.relative(process.cwd(),l.getFilePath()).replace(/\\/g,"/")}', exportName: '${s}', args: [${c}] }`}function v(e){for(let n of e.getDescendantsOfKind(b.AwaitExpression)){let o=n.getExpressionIfKind(b.CallExpression);if(o){let s=o.getExpression();if(i.isPropertyAccessExpression(s)){let a=s.getName();if(s.getExpression().getText()==="context"&&(a==="bSleep"||a==="bWaitForEvent"||a==="bExecute"||a==="bSignal")||s.getSymbol()?.getDeclarations()[0]?.isKind(b.ImportSpecifier))return!0}else if(s.getSymbol()?.getDeclarations()[0]?.isKind(b.ImportSpecifier))return!0}}if(i.isTryStatement(e)&&(v(e.getTryBlock())||e.getCatchClause()&&v(e.getCatchClause().getBlock())||e.getFinallyBlock()&&v(e.getFinallyBlock())))return!0;if(i.isIfStatement(e)){let n=v(e.getThenStatement()),o=e.getElseStatement()?v(e.getElseStatement()):!1;return n||o}return i.isBlock(e)?e.getStatements().some(v):!1}function le(e){for(let n=0;n<e.length;n++){let o=e[n];if(i.isReturnStatement(o)||v(o)||i.isTryStatement(o))return{syncBlock:e.slice(0,n),durableStatement:o,nextStatements:e.slice(n+1)}}return{syncBlock:e,durableStatement:null,nextStatements:[]}}var q=e=>{let n=process.argv.indexOf(e);if(n!==-1&&process.argv.length>n+1)return process.argv[n+1]};async function pe(){let e=q("--in"),n=q("--out");(!e||!n)&&(console.error("Uso: b-durable-compiler --in <directorio_entrada> --out <directorio_salida>"),process.exit(1));let o=H.resolve(process.cwd(),e),s=H.resolve(process.cwd(),n);await U({inputDir:o,outputDir:s,packageName:"@bobtail.software/b-durable"})}pe().catch(e=>{console.error("Error durante la compilaci\xF3n:",e),process.exit(1)});
|
package/dist/index.d.mts
CHANGED
|
@@ -24,22 +24,6 @@ interface WorkflowStateInfo<TInput = unknown, TOutput = unknown> {
|
|
|
24
24
|
attempts: number;
|
|
25
25
|
};
|
|
26
26
|
}
|
|
27
|
-
interface StartOptions<TInput, TSignals, TOutput> {
|
|
28
|
-
input: TInput;
|
|
29
|
-
workflowId?: string;
|
|
30
|
-
/**
|
|
31
|
-
* Un callback opcional para recibir señales emitidas por el workflow en tiempo real.
|
|
32
|
-
* Se garantiza que la suscripción está activa antes de que se ejecute el primer paso.
|
|
33
|
-
*/
|
|
34
|
-
subscribe?: (signal: WorkflowSignal<TSignals, TOutput>) => void;
|
|
35
|
-
}
|
|
36
|
-
interface StartedWorkflowHandle {
|
|
37
|
-
workflowId: string;
|
|
38
|
-
/**
|
|
39
|
-
* Cierra la suscripción a las señales creada en `start`.
|
|
40
|
-
*/
|
|
41
|
-
unsubscribe: () => Promise<void>;
|
|
42
|
-
}
|
|
43
27
|
interface WorkflowState {
|
|
44
28
|
tryCatchStack?: {
|
|
45
29
|
catchStep?: number;
|
|
@@ -55,6 +39,16 @@ interface WorkflowContext<TInput = unknown> {
|
|
|
55
39
|
result?: unknown;
|
|
56
40
|
log: (message: string, meta?: Record<string, unknown>) => void;
|
|
57
41
|
}
|
|
42
|
+
interface RetryOptions {
|
|
43
|
+
/** Número máximo de intentos. Infinity para reintentar siempre. Por defecto: 3 */
|
|
44
|
+
maxAttempts?: number;
|
|
45
|
+
/** Tiempo de espera antes del primer reintento (ej: '1s'). Por defecto: '1s' */
|
|
46
|
+
initialInterval?: ms.StringValue;
|
|
47
|
+
/** Coeficiente de multiplicación para backoff exponencial. Por defecto: 2 */
|
|
48
|
+
backoffCoefficient?: number;
|
|
49
|
+
/** Tiempo máximo de espera entre reintentos (ej: '1h'). Por defecto: '1h' */
|
|
50
|
+
maxInterval?: ms.StringValue;
|
|
51
|
+
}
|
|
58
52
|
type Instruction<TOutput = unknown> = {
|
|
59
53
|
type: 'SCHEDULE_TASK';
|
|
60
54
|
modulePath: string;
|
|
@@ -64,31 +58,47 @@ type Instruction<TOutput = unknown> = {
|
|
|
64
58
|
type: 'SCHEDULE_SLEEP';
|
|
65
59
|
duration: ms.StringValue;
|
|
66
60
|
} | {
|
|
67
|
-
type: '
|
|
68
|
-
|
|
61
|
+
type: 'WAIT_FOR_SIGNAL';
|
|
62
|
+
signalName: string;
|
|
69
63
|
} | {
|
|
70
64
|
type: 'EXECUTE_SUBWORKFLOW';
|
|
71
65
|
workflowName: string;
|
|
72
66
|
input: unknown;
|
|
73
67
|
} | {
|
|
74
|
-
type: '
|
|
75
|
-
|
|
68
|
+
type: 'EMIT_EVENT';
|
|
69
|
+
eventName: string;
|
|
76
70
|
payload: unknown;
|
|
77
71
|
} | {
|
|
78
72
|
type: 'COMPLETE';
|
|
79
73
|
result: TOutput;
|
|
80
74
|
};
|
|
75
|
+
interface DurableFunction<TInput = unknown, TOutput = unknown, TSignals = Record<string, never>, // INPUT: Señales que el workflow puede RECIBIR
|
|
76
|
+
TEvents = Record<string, never>> {
|
|
77
|
+
__isDurable: true;
|
|
78
|
+
name: string;
|
|
79
|
+
version: string;
|
|
80
|
+
retryOptions?: RetryOptions;
|
|
81
|
+
execute: (context: WorkflowContext<TInput>) => Promise<Instruction<TOutput>>;
|
|
82
|
+
_TSignals?: TSignals;
|
|
83
|
+
_TEvents?: TEvents;
|
|
84
|
+
}
|
|
85
|
+
/**
|
|
86
|
+
* Transforma un mapa de eventos en props de listeners.
|
|
87
|
+
* Ejemplo: { started: { id: string } } => { onStarted?: (payload: { id: string }) => void }
|
|
88
|
+
*/
|
|
89
|
+
type WorkflowEventListeners<TEvents> = {
|
|
90
|
+
[K in keyof TEvents as `on${Capitalize<string & K>}`]?: (payload: TEvents[K]) => void;
|
|
91
|
+
};
|
|
81
92
|
/**
|
|
82
|
-
* Representa
|
|
83
|
-
*
|
|
84
|
-
* Incluye las señales personalizadas (TSignals) y las señales del sistema.
|
|
93
|
+
* Representa cualquier evento que sale del workflow, ya sea definido por el usuario
|
|
94
|
+
* o por el sistema (completado/fallido).
|
|
85
95
|
*/
|
|
86
|
-
type
|
|
87
|
-
[K in keyof
|
|
96
|
+
type WorkflowEvent<TEvents, TOutput> = {
|
|
97
|
+
[K in keyof TEvents]: {
|
|
88
98
|
name: K;
|
|
89
|
-
payload:
|
|
99
|
+
payload: TEvents[K];
|
|
90
100
|
};
|
|
91
|
-
}[keyof
|
|
101
|
+
}[keyof TEvents] | {
|
|
92
102
|
name: 'workflow:completed';
|
|
93
103
|
payload: TOutput;
|
|
94
104
|
} | {
|
|
@@ -98,16 +108,48 @@ type WorkflowSignal<TSignals, TOutput> = {
|
|
|
98
108
|
};
|
|
99
109
|
};
|
|
100
110
|
/**
|
|
101
|
-
*
|
|
111
|
+
* Utilidad para "aplanar" intersecciones de tipos.
|
|
112
|
+
* Esto fuerza a TypeScript a computar el objeto final inmediatamente,
|
|
113
|
+
* permitiendo que la inferencia contextual de los argumentos funcione correctamente.
|
|
102
114
|
*/
|
|
103
|
-
|
|
115
|
+
type Compute<T> = {
|
|
116
|
+
[K in keyof T]: T[K];
|
|
117
|
+
} & unknown;
|
|
118
|
+
/**
|
|
119
|
+
* Opciones para iniciar el workflow. Combina input estándar + listeners dinámicos.
|
|
120
|
+
*/
|
|
121
|
+
type StartOptions<TInput, TEvents> = Compute<{
|
|
122
|
+
input: TInput;
|
|
123
|
+
workflowId?: string;
|
|
124
|
+
} & WorkflowEventListeners<TEvents>>;
|
|
125
|
+
interface StartedWorkflowHandle {
|
|
126
|
+
workflowId: string;
|
|
127
|
+
unsubscribe: () => Promise<void>;
|
|
128
|
+
}
|
|
129
|
+
/**
|
|
130
|
+
* Handle para interactuar con un workflow en ejecución.
|
|
131
|
+
*/
|
|
132
|
+
interface WorkflowHandle<TSignals, TEvents, TOutput> {
|
|
104
133
|
workflowId: string;
|
|
105
134
|
/**
|
|
106
|
-
*
|
|
107
|
-
* @param
|
|
108
|
-
* @
|
|
135
|
+
* Envía una señal (INPUT) al workflow.
|
|
136
|
+
* @param name Nombre de la señal (definido en TSignals).
|
|
137
|
+
* @param payload Datos de la señal.
|
|
109
138
|
*/
|
|
110
|
-
|
|
139
|
+
signal: <K extends keyof TSignals>(name: K, payload: TSignals[K]) => Promise<void>;
|
|
140
|
+
/**
|
|
141
|
+
* Suscribe un callback a un evento específico (OUTPUT).
|
|
142
|
+
* @param eventName Nombre del evento (definido en TEvents).
|
|
143
|
+
* @param handler Función que recibe el payload.
|
|
144
|
+
*/
|
|
145
|
+
on: <K extends keyof TEvents>(eventName: K, handler: (payload: TEvents[K]) => void) => Promise<{
|
|
146
|
+
unsubscribe: () => void;
|
|
147
|
+
}>;
|
|
148
|
+
/**
|
|
149
|
+
* Suscribe un callback a TODOS los eventos (incluyendo sistema).
|
|
150
|
+
* Útil para logging o debug.
|
|
151
|
+
*/
|
|
152
|
+
subscribe: (handler: (event: WorkflowEvent<TEvents, TOutput>) => void) => Promise<{
|
|
111
153
|
unsubscribe: () => void;
|
|
112
154
|
}>;
|
|
113
155
|
}
|
|
@@ -115,14 +157,6 @@ declare class WorkflowCancellationError extends Error {
|
|
|
115
157
|
readonly isCancellation = true;
|
|
116
158
|
constructor(message: string);
|
|
117
159
|
}
|
|
118
|
-
interface DurableFunction<TInput = unknown, TOutput = unknown, TEvents = Record<string, never>, TSignals = Record<string, never>> {
|
|
119
|
-
__isDurable: true;
|
|
120
|
-
name: string;
|
|
121
|
-
version: string;
|
|
122
|
-
execute: (context: WorkflowContext<TInput>) => Promise<Instruction<TOutput>>;
|
|
123
|
-
_TEvents?: TEvents;
|
|
124
|
-
_TSignals?: TSignals;
|
|
125
|
-
}
|
|
126
160
|
|
|
127
161
|
declare class DurableRuntime {
|
|
128
162
|
private durableFns;
|
|
@@ -142,22 +176,23 @@ declare class DurableRuntime {
|
|
|
142
176
|
logger?: Logger;
|
|
143
177
|
});
|
|
144
178
|
getState(workflowId: string): Promise<WorkflowStateInfo | null>;
|
|
145
|
-
start<TInput, TOutput,
|
|
179
|
+
start<TInput, TOutput, TSignals, TEvents>(durableFn: DurableFunction<TInput, TOutput, TSignals, TEvents>, options: StartOptions<TInput, TEvents>, parentId?: string): Promise<StartedWorkflowHandle>;
|
|
146
180
|
private scheduleExecution;
|
|
147
181
|
private _executeStep;
|
|
148
182
|
private handleInstruction;
|
|
149
183
|
private handleFailure;
|
|
150
184
|
private resumeParentWorkflow;
|
|
151
185
|
private propagateFailureToParent;
|
|
152
|
-
|
|
186
|
+
signal<T>(workflowId: string, signalName: string, payload: T): Promise<void>;
|
|
153
187
|
cancel(workflowId: string, reason: string): Promise<void>;
|
|
154
188
|
private startScheduler;
|
|
189
|
+
private checkDelayedTasks;
|
|
155
190
|
private checkSleepers;
|
|
156
191
|
private reapDeadWorkers;
|
|
157
192
|
private startHeartbeat;
|
|
158
193
|
private startWorker;
|
|
159
|
-
run(durableFns: Map<string, DurableFunction<unknown, unknown,
|
|
160
|
-
stop(): void
|
|
194
|
+
run(durableFns: Map<string, DurableFunction<unknown, unknown, unknown, unknown>>): void;
|
|
195
|
+
stop(): Promise<void>;
|
|
161
196
|
}
|
|
162
197
|
|
|
163
198
|
/**
|
|
@@ -202,6 +237,11 @@ interface DurableWorkflowDef<TInput, TOutput, TEvents = Record<string, never>, T
|
|
|
202
237
|
* Versión del workflow. Se utiliza para identificar versiones de un workflow.
|
|
203
238
|
*/
|
|
204
239
|
version: string;
|
|
240
|
+
/**
|
|
241
|
+
* Política de reintento para las tareas ejecutadas por este workflow.
|
|
242
|
+
* Si falla una tarea, se aplicará esta configuración.
|
|
243
|
+
*/
|
|
244
|
+
retryOptions?: RetryOptions;
|
|
205
245
|
}
|
|
206
246
|
/**
|
|
207
247
|
* Marcador para que el compilador identifique y transforme una función en un workflow durable.
|
|
@@ -210,41 +250,31 @@ interface DurableWorkflowDef<TInput, TOutput, TEvents = Record<string, never>, T
|
|
|
210
250
|
declare const bDurable: <TInput = any, TOutput = any, TEvents = Record<string, never>, TSignals = Record<string, never>>(def: DurableWorkflowDef<TInput, TOutput, TEvents, TSignals>) => DurableFunction<TInput, TOutput, TEvents, TSignals>;
|
|
211
251
|
|
|
212
252
|
interface BDurableAPI {
|
|
213
|
-
|
|
253
|
+
/**
|
|
254
|
+
* Inicia un workflow durable.
|
|
255
|
+
* Permite definir listeners de eventos inmediatamente en las opciones (ej: { onProgress: ... })
|
|
256
|
+
* para evitar condiciones de carrera.
|
|
257
|
+
*/
|
|
258
|
+
start: <TInput, TOutput, TSignals, TEvents>(durableFn: DurableFunction<TInput, TOutput, TSignals, TEvents>, options: StartOptions<TInput, TEvents>) => Promise<StartedWorkflowHandle>;
|
|
214
259
|
stop: () => void;
|
|
215
260
|
runtime: DurableRuntime;
|
|
216
261
|
cancel: (workflowId: string, reason: string) => Promise<void>;
|
|
217
262
|
getState: (workflowId: string) => Promise<WorkflowStateInfo | null>;
|
|
218
263
|
/**
|
|
219
|
-
*
|
|
220
|
-
*
|
|
221
|
-
* @param durableFn La definición del workflow al que se le enviará el evento. Se usa para la inferencia de tipos.
|
|
222
|
-
* @param workflowId El ID del workflow al que se le enviará el evento.
|
|
223
|
-
* @param eventName El nombre del evento. Será autocompletado por el editor.
|
|
224
|
-
* @param payload La carga útil del evento. El tipo debe coincidir con el definido para `eventName`.
|
|
264
|
+
* Obtiene un "handle" para una instancia de workflow existente.
|
|
265
|
+
* Permite enviar señales (Input) y escuchar eventos (Output).
|
|
225
266
|
*/
|
|
226
|
-
|
|
227
|
-
/**
|
|
228
|
-
* Obtiene un "handle" para una instancia de workflow existente, permitiendo suscribirse a sus señales.
|
|
229
|
-
* @param durableFn La definición del workflow. Se usa para la inferencia de tipos de las señales.
|
|
230
|
-
* @param workflowId El ID de la instancia del workflow.
|
|
231
|
-
* @returns Un WorkflowHandle para la instancia especificada.
|
|
232
|
-
*/
|
|
233
|
-
getWorkflowHandle: <TInput, TOutput, TEvents, TSignals>(durableFn: DurableFunction<TInput, TOutput, TEvents, TSignals>, workflowId: string) => WorkflowHandle<TSignals, TOutput>;
|
|
267
|
+
getHandle: <TInput, TOutput, TSignals, TEvents>(durableFn: DurableFunction<TInput, TOutput, TSignals, TEvents>, workflowId: string) => WorkflowHandle<TSignals, TEvents, TOutput>;
|
|
234
268
|
}
|
|
235
269
|
interface InitializeOptions {
|
|
236
|
-
durableFunctions: Map<string, DurableFunction<unknown, unknown,
|
|
270
|
+
durableFunctions: Map<string, DurableFunction<unknown, unknown, any, any>>;
|
|
237
271
|
sourceRoot: string;
|
|
238
272
|
redisClient: Redis;
|
|
239
273
|
blockingRedisClient: Redis;
|
|
240
|
-
/**
|
|
241
|
-
* NUEVO: Período de retención para workflows completados/fallidos en Redis.
|
|
242
|
-
* Ejemplo: '7 days', '2h'. Si no se especifica, los workflows se guardan para siempre.
|
|
243
|
-
*/
|
|
244
274
|
retention?: ms.StringValue;
|
|
245
275
|
pollingInterval?: number;
|
|
246
276
|
logger?: Logger;
|
|
247
277
|
}
|
|
248
278
|
declare function bDurableInitialize(options: InitializeOptions): BDurableAPI;
|
|
249
279
|
|
|
250
|
-
export { type BDurableAPI, type DurableFunction, type Instruction, type Logger, type StartOptions, type StartedWorkflowHandle, WorkflowCancellationError, type WorkflowContext, type
|
|
280
|
+
export { type BDurableAPI, type DurableFunction, type Instruction, type Logger, type RetryOptions, type StartOptions, type StartedWorkflowHandle, WorkflowCancellationError, type WorkflowContext, type WorkflowEvent, type WorkflowEventListeners, type WorkflowHandle, type WorkflowState, type WorkflowStateInfo, bDurable, bDurableInitialize };
|
package/dist/index.mjs
CHANGED
|
@@ -1 +1,91 @@
|
|
|
1
|
-
import P from"ioredis";var w="queue:tasks",m="durable:sleepers",v="worker:heartbeat:",W="queue:dead";function O(u){return`workflow:${u}`}var l={RUNNING:"RUNNING",SLEEPING:"SLEEPING",COMPLETED:"COMPLETED",FAILED:"FAILED",AWAITING_EVENT:"AWAITING_EVENT",AWAITING_SUBWORKFLOW:"AWAITING_SUBWORKFLOW",CANCELLING:"CANCELLING",CANCELLED:"CANCELLED",VERSION_MISMATCH:"VERSION_MISMATCH"};var o,E;function N(u){if(o||E){console.warn("[Persistence] Los clientes de Redis ya han sido configurados. Omitiendo.");return}o=u.commandClient,E=u.blockingClient}import{randomUUID as R}from"crypto";import L from"ms";import{resolve as D}from"path";var k=class extends Error{isCancellation=!0;constructor(t){super(t),this.name="WorkflowCancellationError"}};var x={info:(u,t)=>console.log(`[INFO] ${u}`,t||""),error:(u,t)=>console.error(`[ERROR] ${u}`,t||""),warn:(u,t)=>console.warn(`[WARN] ${u}`,t||""),debug:(u,t)=>console.debug(`[DEBUG] ${u}`,t||"")},I=class{constructor(t){this.retention=t}getKey(t){return`workflow:${t}`}getLockKey(t){return`workflow:${t}:lock`}async acquireLock(t,e=10){let s=this.getLockKey(t);return await o.set(s,"locked","EX",e,"NX")==="OK"}async releaseLock(t){await o.del(this.getLockKey(t))}async get(t){let e=await o.hgetall(this.getKey(t));return!e||Object.keys(e).length===0?null:{workflowId:e.workflowId,name:e.name,version:e.version,status:e.status,step:parseInt(e.step,10),input:JSON.parse(e.input),state:JSON.parse(e.state),result:e.result?JSON.parse(e.result):void 0,error:e.error,parentId:e.parentId,subWorkflowId:e.subWorkflowId,awaitingEvent:e.awaitingEvent,createdAt:e.createdAt?parseInt(e.createdAt,10):0,updatedAt:e.updatedAt?parseInt(e.updatedAt,10):0}}async create(t){let e=Date.now(),s={...t,step:0,state:{},createdAt:e,updatedAt:e},n={...s,input:JSON.stringify(s.input),state:JSON.stringify(s.state)};n.version===void 0&&delete n.version;let a=o.pipeline();a.hset(this.getKey(s.workflowId),n),await a.exec()}async updateState(t,e){await o.hset(this.getKey(t),{state:JSON.stringify(e),updatedAt:Date.now()})}async updateStatus(t,e,s={}){await o.hset(this.getKey(t),{status:e,...s,updatedAt:Date.now()})}async incrementStep(t){return o.hincrby(this.getKey(t),"step",1)}async applyRetention(t){if(this.retention){let e=L(this.retention)/1e3;e>0&&await o.expire(this.getKey(t),e)}}async complete(t,e){await o.hset(this.getKey(t),{status:l.COMPLETED,result:JSON.stringify(e??null)}),await this.applyRetention(t)}async fail(t,e,s=l.FAILED){await o.hset(this.getKey(t),{status:s,error:e.message}),await this.applyRetention(t)}async scheduleSleep(t,e){await this.updateStatus(t,l.SLEEPING),await o.zadd(m,e,t)}async getWorkflowsToWake(){let t=Date.now(),e=await o.zrangebyscore(m,0,t);return e.length>0&&await o.zrem(m,...e),e}async enqueueTask(t){await o.lpush(w,JSON.stringify(t))}async resumeForCatch(t,e,s){let n=this.getKey(t);await o.hset(n,{state:JSON.stringify(e),status:l.RUNNING,step:s.toString()})}async moveToDLQ(t,e){let s={...t,failedAt:Date.now(),error:e.message,stack:e.stack};await o.lpush(W,JSON.stringify(s))}},S=class{durableFns=new Map;repo;workerId=R();isRunning=!1;schedulerInterval=null;heartbeatInterval=null;sourceRoot;pollingInterval;logger;maxTaskRetries=3;constructor(t){this.sourceRoot=t.sourceRoot,this.repo=new I(t.retention),this.pollingInterval=t.pollingInterval||5e3,this.logger=t.logger||x}async getState(t){let e=await this.repo.get(t);return e?{workflowId:e.workflowId,name:e.name,version:e.version,status:e.status,step:e.step,input:e.input,output:e.result,state:e.state,error:e.error,createdAt:e.createdAt,updatedAt:e.updatedAt}:null}async start(t,e,s){if(e.workflowId){let r=await this.repo.get(e.workflowId);if(r&&r.status!==l.COMPLETED&&r.status!==l.FAILED)throw new Error(`Workflow with ID '${e.workflowId}' already exists and is in a running state (${r.status}).`)}let n=e.workflowId??R();this.logger.info(`[RUNTIME] Iniciando workflow '${t.name}' v${t.version} con ID: ${n}`),await this.repo.create({workflowId:n,name:t.name,version:t.version,status:l.RUNNING,input:e.input,parentId:s});let a=async()=>{};if(e.subscribe){let r=o.duplicate(),i=`signal:${n}`;r.on("message",(c,g)=>{if(c===i)try{let p=JSON.parse(g),f={name:p.signalName,payload:p.payload};e.subscribe?.(f)}catch(p){this.logger.error("Error al procesar se\xF1al",{error:p,workflowId:n})}}),await r.subscribe(i),a=async()=>{r.status==="ready"&&(r.unsubscribe(i).catch(()=>{}),r.quit().catch(()=>{}))}}return setImmediate(()=>{this._executeStep(n,t).catch(r=>{this.logger.error("Error fatal en ejecuci\xF3n inicial",{error:r,workflowId:n})})}),{workflowId:n,unsubscribe:a}}async scheduleExecution(t,e,s,n){setImmediate(()=>{this._executeStep(t,e,s,n).catch(a=>{this.logger.error("Error no manejado en scheduleExecution",{error:a,workflowId:t})})})}async _executeStep(t,e,s,n){if(await this.repo.acquireLock(t))try{if(n)throw n;let r=await this.repo.get(t);if(!r)return;if(r.status===l.CANCELLING)throw new k(r.error||"Workflow cancelled");if(r.status!==l.RUNNING)return;let i=r.version==="undefined"?void 0:r.version,c=e.version==="undefined"?void 0:e.version;if(String(i??"")!==String(c??"")){let d=new Error(`Version mismatch: DB=${i}, Code=${c}`);await this.repo.fail(t,d,l.VERSION_MISMATCH);return}let g={workflowId:t,step:r.step,input:r.input,state:r.state,result:s,log:(d,T)=>this.logger.info(d,{...T,workflowId:t,step:r.step})},p=await e.execute(g);await this.repo.updateState(t,g.state),await this.handleInstruction(p,g,r.name)&&(await this.repo.incrementStep(t),this.scheduleExecution(t,e,void 0))}catch(r){let i=r instanceof Error?r:new Error(String(r));this.logger.error("Error en workflow",{workflowId:t,error:i.message}),await this.handleFailure(t,i,e,!0)}finally{await this.repo.releaseLock(t)}}async handleInstruction(t,e,s){let{workflowId:n}=e;switch(t.type){case"SCHEDULE_TASK":return await this.repo.enqueueTask({workflowId:n,durableFunctionName:s,...t}),!1;case"SCHEDULE_SLEEP":{let a=L(t.duration);if(typeof a!="number")throw new Error(`Invalid time value provided to bSleep: "${t.duration}"`);let r=Date.now()+a;return await this.repo.scheduleSleep(n,r),!1}case"WAIT_FOR_EVENT":return await this.repo.updateStatus(n,l.AWAITING_EVENT,{awaitingEvent:t.eventName}),await o.sadd(`events:awaiting:${t.eventName}`,n),!1;case"EXECUTE_SUBWORKFLOW":{let a=this.durableFns.get(t.workflowName);if(!a)throw new Error(`Sub-workflow '${t.workflowName}' no encontrado.`);let{workflowId:r}=await this.start(a,{input:t.input},n);return await this.repo.updateStatus(n,l.AWAITING_SUBWORKFLOW,{subWorkflowId:r}),!1}case"SEND_SIGNAL":{let a=`signal:${n}`,r=JSON.stringify({signalName:t.signalName,payload:t.payload});return await o.publish(a,r),!0}case"COMPLETE":{let a=`signal:${n}`,r=JSON.stringify({signalName:"workflow:completed",payload:t.result});return await o.publish(a,r),await this.repo.complete(n,t.result),await this.resumeParentWorkflow(n),!1}}}async handleFailure(t,e,s,n=!1){if(!n&&!await this.repo.acquireLock(t,20)){this.logger.warn(`No se pudo adquirir lock para fallo en ${t}`);return}try{if(e instanceof k){await this.repo.fail(t,e,l.CANCELLED);let g=await this.repo.get(t);g?.subWorkflowId&&await this.cancel(g.subWorkflowId,`Parent workflow ${t} was cancelled`);return}let a=await this.repo.get(t);if(!a||a.status===l.FAILED||a.status===l.COMPLETED)return;let r=a.state.tryCatchStack;if(r&&r.length>0){let p=r.pop()?.catchStep;if(p!==void 0){this.logger.info(`Capturando error en step ${p}`,{workflowId:t}),await this.repo.resumeForCatch(t,a.state,p),this.scheduleExecution(t,s,{name:e.name,message:e.message,stack:e.stack});return}}let i=`signal:${t}`,c=JSON.stringify({signalName:"workflow:failed",payload:{message:e.message}});await o.publish(i,c),await this.repo.fail(t,e),await this.propagateFailureToParent(t,e)}finally{n||await this.repo.releaseLock(t)}}async resumeParentWorkflow(t){let e=await this.repo.get(t);if(!e?.parentId)return;let s=e.parentId,n=await this.repo.get(s);if(!n||n.status!==l.AWAITING_SUBWORKFLOW||n.subWorkflowId!==t)return;let a=this.durableFns.get(n.name);if(!a){await this.repo.fail(s,new Error(`Definici\xF3n del workflow '${n.name}' no encontrada.`));return}await this.repo.updateStatus(s,l.RUNNING,{subWorkflowId:""}),await this.repo.incrementStep(s),this.scheduleExecution(s,a,e.result)}async propagateFailureToParent(t,e){let s=await this.repo.get(t);if(!s?.parentId)return;let n=s.parentId,a=await this.repo.get(n);if(!a||a.status!==l.AWAITING_SUBWORKFLOW||a.subWorkflowId!==t)return;let r=this.durableFns.get(a.name);if(!r){await this.repo.fail(n,new Error(`Definici\xF3n del workflow '${a.name}' no encontrada al propagar fallo.`));return}await this.repo.updateStatus(n,l.RUNNING,{subWorkflowId:""});let i=new Error(`Sub-workflow '${s.name}' (${t}) fall\xF3: ${e.message}`);i.stack=e.stack,this.scheduleExecution(n,r,void 0,i)}async sendEvent(t,e,s){let n=!1;for(let a=0;a<3&&(n=await this.repo.acquireLock(t),!n);a++)await new Promise(r=>setTimeout(r,50));if(!n)return this.logger.warn("Lock timeout en sendEvent",{workflowId:t});try{let a=await this.repo.get(t);if(!a)return this.logger.warn("Evento para workflow inexistente",{workflowId:t});if(a.status!==l.AWAITING_EVENT||a.awaitingEvent!==e)return this.logger.warn("Workflow no esperaba este evento",{workflowId:t,expected:a.awaitingEvent,received:e});let r=this.durableFns.get(a.name);if(!r){await this.repo.fail(t,new Error(`Funci\xF3n durable '${a.name}' no encontrada.`));return}await this.repo.updateStatus(t,l.RUNNING,{awaitingEvent:""}),await o.srem(`events:awaiting:${e}`,t),await this.repo.incrementStep(t),this.scheduleExecution(t,r,s)}catch(a){let r=a instanceof Error?a:new Error(String(a)),i=(await this.repo.get(t))?.name||"",c=this.durableFns.get(i);await this.handleFailure(t,r,c,!0)}finally{await this.repo.releaseLock(t)}}async cancel(t,e){if(!await this.repo.acquireLock(t))return await new Promise(n=>setTimeout(n,100)),this.cancel(t,e);try{let n=await this.repo.get(t);if(!n||[l.COMPLETED,l.FAILED,l.CANCELLED].includes(n.status))return;if(await this.repo.updateStatus(t,l.CANCELLING,{error:e}),n.status===l.SLEEPING){await o.zrem(m,t);let a=this.durableFns.get(n.name);this.scheduleExecution(t,a)}if(n.status===l.AWAITING_EVENT){let a=this.durableFns.get(n.name);this.scheduleExecution(t,a)}}finally{await this.repo.releaseLock(t)}}startScheduler(){if(this.schedulerInterval)return;this.logger.info(`Scheduler iniciado (${this.pollingInterval}ms)`);let t=async()=>{await this.checkSleepers(),await this.reapDeadWorkers()};this.schedulerInterval=setInterval(t,this.pollingInterval)}async checkSleepers(){let t=await this.repo.getWorkflowsToWake();for(let e of t){let s=await this.repo.get(e);if(s){let n=this.durableFns.get(s.name);n&&(this.logger.info("Despertando workflow",{workflowId:e}),await this.repo.updateStatus(e,l.RUNNING),await this.repo.incrementStep(e),this.scheduleExecution(e,n,void 0))}}}async reapDeadWorkers(){let t=await o.keys(`${w}:processing:*`);for(let e of t){let s=e.split(":").pop();if(!s||await o.exists(`${v}${s}`))continue;this.logger.warn(`Worker muerto ${s}. Recuperando tareas.`);let n=await o.rpoplpush(e,w);for(;n;)n=await o.rpoplpush(e,w);await o.del(e)}}startHeartbeat(){let t=`${v}${this.workerId}`,e=Math.max(Math.ceil(this.pollingInterval*3/1e3),5),s=()=>{this.isRunning&&o.set(t,Date.now().toString(),"EX",e).catch(()=>{})};this.heartbeatInterval=setInterval(s,this.pollingInterval),s()}startWorker(){if(this.isRunning)return;this.isRunning=!0;let t=`${w}:processing:${this.workerId}`;this.logger.info(`Worker ${this.workerId} iniciado`),this.startHeartbeat(),(async()=>{for(;this.isRunning;)try{let s=await E.brpoplpush(w,t,0);if(!s)continue;let n=JSON.parse(s);this.logger.debug(`Ejecutando tarea: ${n.exportName}`,{workflowId:n.workflowId});try{let a;n.modulePath.startsWith("virtual:")?a=await import(n.modulePath):a=await import(D(this.sourceRoot,n.modulePath));let r=a[n.exportName];if(typeof r!="function")throw new Error(`'${n.exportName}' no es una funci\xF3n.`);let i=await r(...n.args),c=this.durableFns.get(n.durableFunctionName);c&&(await this.repo.incrementStep(n.workflowId),this.scheduleExecution(n.workflowId,c,i)),await o.lrem(t,1,s)}catch(a){let r=a instanceof Error?a:new Error(String(a));this.logger.error(`Fallo en tarea ${n.exportName}`,{workflowId:n.workflowId,error:r.message});let i=(n.attempts||0)+1;if(i<=this.maxTaskRetries)this.logger.warn(`Reintentando tarea (intento ${i}/${this.maxTaskRetries})`,{workflowId:n.workflowId}),n.attempts=i,await o.lpush(w,JSON.stringify(n)),await o.lrem(t,1,s);else{this.logger.error("Reintentos agotados. Moviendo a DLQ.",{workflowId:n.workflowId}),await this.repo.moveToDLQ(n,r);let c=this.durableFns.get(n.durableFunctionName);c?await this.handleFailure(n.workflowId,r,c):await this.repo.fail(n.workflowId,new Error(`Def missing for ${n.durableFunctionName}`)),await o.lrem(t,1,s)}}}catch(s){if(!this.isRunning)break;this.logger.error("Error infraestructura worker",{error:s}),await new Promise(n=>setTimeout(n,5e3))}})()}run(t){this.durableFns=t,this.startWorker(),this.startScheduler()}stop(){this.isRunning=!1,this.schedulerInterval&&clearInterval(this.schedulerInterval),this.heartbeatInterval&&clearInterval(this.heartbeatInterval),this.logger.info("Runtime detenido")}};var F=u=>({...u,__isDurable:!0});var A={info:(u,t)=>console.log(`[INFO] ${u}`,t||""),error:(u,t)=>console.error(`[ERROR] ${u}`,t||""),warn:(u,t)=>console.warn(`[WARN] ${u}`,t||""),debug:(u,t)=>console.debug(`[DEBUG] ${u}`,t||"")};function it(u){let t=u.logger||A;t.info("--- Inicializando Sistema Durable ---");let e=new P(u.redisClient.options),s=new Map;e.psubscribe("signal:*",r=>{r&&t.error("Error fatal al suscribirse a los canales de se\xF1ales:",{error:r})}),e.on("pmessage",(r,i,c)=>{let g=s.get(i);if(g&&g.length>0)try{let p=JSON.parse(c),f={name:p.signalName,payload:p.payload};[...g].forEach(d=>d(f))}catch(p){t.error(`Error al parsear mensaje de se\xF1al en ${i}`,{error:p})}});let n=(r,i)=>{let c=()=>{};return{workflowId:i,subscribe:async g=>{let p=`signal:${i}`,f=O(i),d=await u.redisClient.hgetall(f);if(d.status===l.COMPLETED)return g({name:"workflow:completed",payload:JSON.parse(d.result||"null")},c),{unsubscribe:c};if(d.status===l.FAILED)return g({name:"workflow:failed",payload:{message:d.error||"Unknown error"}},c),{unsubscribe:c};let T=null,b=()=>{if(!T)return;let h=s.get(p);if(h){let y=h.indexOf(T);y>-1&&h.splice(y,1),h.length===0&&s.delete(p)}};return T=h=>{g(h,b)},s.has(p)||s.set(p,[]),s.get(p)?.push(T),{unsubscribe:b}}}};N({commandClient:u.redisClient,blockingClient:u.blockingRedisClient});let a=new S({sourceRoot:u.sourceRoot,retention:u.retention,pollingInterval:u.pollingInterval,logger:t});return a.run(u.durableFunctions),{start:async(r,i)=>a.start(r,i),cancel:(r,i)=>a.cancel(r,i),getState:r=>a.getState(r),getWorkflowHandle:(r,i)=>n(r,i),sendEvent:(r,i,c,g)=>a.sendEvent(i,c,g),stop:()=>{a.stop(),e.quit().catch(()=>{})},runtime:a}}export{k as WorkflowCancellationError,F as bDurable,it as bDurableInitialize};
|
|
1
|
+
import{randomUUID as z}from"crypto";import q from"ioredis";var E="queue:tasks",I="durable:sleepers",R="worker:heartbeat:",k="durable:workers",x="queue:dead",L="queue:tasks:delayed";function C(i){return`workflow:${i}`}var c={RUNNING:"RUNNING",SLEEPING:"SLEEPING",COMPLETED:"COMPLETED",FAILED:"FAILED",AWAITING_SIGNAL:"AWAITING_SIGNAL",AWAITING_SUBWORKFLOW:"AWAITING_SUBWORKFLOW",CANCELLING:"CANCELLING",CANCELLED:"CANCELLED",VERSION_MISMATCH:"VERSION_MISMATCH"};var l,y;function N(i){if(l||y){console.warn("[Persistence] Los clientes de Redis ya han sido configurados. Omitiendo.");return}l=i.commandClient,y=i.blockingClient}import{randomUUID as W}from"crypto";import v from"ms";import{resolve as V}from"path";var S=class extends Error{isCancellation=!0;constructor(t){super(t),this.name="WorkflowCancellationError"}};var P=`
|
|
2
|
+
if redis.call("get", KEYS[1]) == ARGV[1] then
|
|
3
|
+
return redis.call("del", KEYS[1])
|
|
4
|
+
else
|
|
5
|
+
return 0
|
|
6
|
+
end
|
|
7
|
+
`,D=`
|
|
8
|
+
if redis.call("get", KEYS[1]) == ARGV[1] then
|
|
9
|
+
return redis.call("expire", KEYS[1], ARGV[2])
|
|
10
|
+
else
|
|
11
|
+
return 0
|
|
12
|
+
end
|
|
13
|
+
`,A=`
|
|
14
|
+
local lockKey = KEYS[1]
|
|
15
|
+
local workflowKey = KEYS[2]
|
|
16
|
+
local token = ARGV[1]
|
|
17
|
+
local state = ARGV[2]
|
|
18
|
+
local updatedAt = ARGV[3]
|
|
19
|
+
local step = ARGV[4]
|
|
20
|
+
|
|
21
|
+
if redis.call("get", lockKey) == token then
|
|
22
|
+
redis.call("hset", workflowKey, "state", state, "updatedAt", updatedAt, "step", step)
|
|
23
|
+
return 1
|
|
24
|
+
else
|
|
25
|
+
return 0
|
|
26
|
+
end
|
|
27
|
+
`,_=`
|
|
28
|
+
local lockKey = KEYS[1]
|
|
29
|
+
local workflowKey = KEYS[2]
|
|
30
|
+
local token = ARGV[1]
|
|
31
|
+
|
|
32
|
+
if redis.call("get", lockKey) == token then
|
|
33
|
+
return redis.call("hincrby", workflowKey, "step", 1)
|
|
34
|
+
else
|
|
35
|
+
return -1
|
|
36
|
+
end
|
|
37
|
+
`,F=`
|
|
38
|
+
local lockKey = KEYS[1]
|
|
39
|
+
local workflowKey = KEYS[2]
|
|
40
|
+
local token = ARGV[1]
|
|
41
|
+
local result = ARGV[2]
|
|
42
|
+
local status = ARGV[3]
|
|
43
|
+
|
|
44
|
+
if redis.call("get", lockKey) == token then
|
|
45
|
+
redis.call("hset", workflowKey, "status", status, "result", result)
|
|
46
|
+
return 1
|
|
47
|
+
else
|
|
48
|
+
return 0
|
|
49
|
+
end
|
|
50
|
+
`,$=`
|
|
51
|
+
local lockKey = KEYS[1]
|
|
52
|
+
local workflowKey = KEYS[2]
|
|
53
|
+
local token = ARGV[1]
|
|
54
|
+
local errorMsg = ARGV[2]
|
|
55
|
+
local status = ARGV[3]
|
|
56
|
+
|
|
57
|
+
if redis.call("get", lockKey) == token then
|
|
58
|
+
redis.call("hset", workflowKey, "status", status, "error", errorMsg)
|
|
59
|
+
return 1
|
|
60
|
+
else
|
|
61
|
+
return 0
|
|
62
|
+
end
|
|
63
|
+
`,G=`
|
|
64
|
+
local limit = tonumber(ARGV[1])
|
|
65
|
+
local now = tonumber(ARGV[2])
|
|
66
|
+
local key = KEYS[1]
|
|
67
|
+
|
|
68
|
+
local ids = redis.call('zrangebyscore', key, 0, now, 'LIMIT', 0, limit)
|
|
69
|
+
if #ids > 0 then
|
|
70
|
+
redis.call('zrem', key, unpack(ids))
|
|
71
|
+
end
|
|
72
|
+
return ids
|
|
73
|
+
`,U=`
|
|
74
|
+
local sourceZSet = KEYS[1]
|
|
75
|
+
local destList = KEYS[2]
|
|
76
|
+
local now = tonumber(ARGV[1])
|
|
77
|
+
local limit = tonumber(ARGV[2])
|
|
78
|
+
|
|
79
|
+
-- Obtener tareas listas (score <= now)
|
|
80
|
+
local tasks = redis.call('zrangebyscore', sourceZSet, 0, now, 'LIMIT', 0, limit)
|
|
81
|
+
|
|
82
|
+
if #tasks > 0 then
|
|
83
|
+
redis.call('zrem', sourceZSet, unpack(tasks))
|
|
84
|
+
-- Pushear cada tarea a la lista de ejecuci\xF3n
|
|
85
|
+
for i, task in ipairs(tasks) do
|
|
86
|
+
redis.call('lpush', destList, task)
|
|
87
|
+
end
|
|
88
|
+
end
|
|
89
|
+
|
|
90
|
+
return #tasks
|
|
91
|
+
`;import M from"superjson";function h(i){return M.stringify(i)}function m(i){try{return M.parse(i)}catch(t){try{return JSON.parse(i)}catch(e){throw new Error(`Failed to deserialize data: ${t} ${e}`)}}}var Y={info:(i,t)=>console.log(`[INFO] ${i}`,t||""),error:(i,t)=>console.error(`[ERROR] ${i}`,t||""),warn:(i,t)=>console.warn(`[WARN] ${i}`,t||""),debug:(i,t)=>console.debug(`[DEBUG] ${i}`,t||"")},K=class{constructor(t){this.retention=t}getKey(t){return`workflow:${t}`}getLockKey(t){return`workflow:${t}:lock`}async acquireLock(t,e=10){let s=this.getLockKey(t),n=W();return await l.set(s,n,"EX",e,"NX")==="OK"?n:null}async releaseLock(t,e){await l.eval(P,1,this.getLockKey(t),e)}async renewLock(t,e,s){return await l.eval(D,1,this.getLockKey(t),e,s)===1}async get(t){let e=await l.hgetall(this.getKey(t));return!e||Object.keys(e).length===0?null:{workflowId:e.workflowId,name:e.name,version:e.version,status:e.status,step:parseInt(e.step,10),input:m(e.input),state:m(e.state),result:e.result?m(e.result):void 0,error:e.error,parentId:e.parentId,subWorkflowId:e.subWorkflowId,awaitingSignal:e.awaitingSignal||e.awaitingEvent,createdAt:e.createdAt?parseInt(e.createdAt,10):0,updatedAt:e.updatedAt?parseInt(e.updatedAt,10):0}}async create(t){let e=Date.now(),s={...t,step:0,state:{},createdAt:e,updatedAt:e},n={...s,input:h(s.input),state:h(s.state)};n.version===void 0&&delete n.version;let r=l.pipeline();r.hset(this.getKey(s.workflowId),n),await r.exec()}async updateState(t,e,s,n){if(await l.eval(A,2,this.getLockKey(t),this.getKey(t),n,h(e),Date.now(),s)===0)throw new Error(`Lock lost for workflow ${t}`)}async updateStatus(t,e,s={}){await l.hset(this.getKey(t),{status:e,...s,updatedAt:Date.now()})}async incrementStep(t,e){let s=await l.eval(_,2,this.getLockKey(t),this.getKey(t),e);if(s===-1)throw new Error(`Lock lost for workflow ${t}`);return s}async applyRetention(t){if(this.retention){let e=v(this.retention)/1e3;e>0&&await l.expire(this.getKey(t),e)}}async complete(t,e,s){if(await l.eval(F,2,this.getLockKey(t),this.getKey(t),s,h(e??null),c.COMPLETED)===0)throw new Error(`Lock lost for workflow ${t}`);await this.applyRetention(t)}async fail(t,e,s,n=c.FAILED){s?await l.eval($,2,this.getLockKey(t),this.getKey(t),s,e.message,n)===0&&console.warn(`Could not fail workflow ${t} safely: Lock lost.`):await l.hset(this.getKey(t),{status:n,error:e.message}),await this.applyRetention(t)}async scheduleSleep(t,e){await this.updateStatus(t,c.SLEEPING),await l.zadd(I,e,t)}async getWorkflowsToWake(t=100){let e=Date.now();return await l.eval(G,1,I,t,e)}async enqueueTask(t){await l.lpush(E,h(t))}async resumeForCatch(t,e,s,n){if(await l.eval(A,2,this.getLockKey(t),this.getKey(t),n,h(e),Date.now(),s)===0)throw new Error(`Lock lost for workflow ${t}`);await l.hset(this.getKey(t),{status:c.RUNNING})}async moveToDLQ(t,e){let s={...t,failedAt:Date.now(),error:e.message,stack:e.stack};await l.lpush(x,h(s))}async scheduleTaskRetry(t,e){let s=Date.now()+e;await l.zadd(L,s,h(t))}async moveDueTasksToQueue(t=100){return await l.eval(U,2,L,E,Date.now(),t)}},b=class{durableFns=new Map;repo;workerId=W();isRunning=!1;schedulerInterval=null;heartbeatInterval=null;sourceRoot;pollingInterval;logger;maxTaskRetries=3;constructor(t){this.sourceRoot=t.sourceRoot,this.repo=new K(t.retention),this.pollingInterval=t.pollingInterval||5e3,this.logger=t.logger||Y}async getState(t){let e=await this.repo.get(t);return e?{workflowId:e.workflowId,name:e.name,version:e.version,status:e.status,step:e.step,input:e.input,output:e.result,state:e.state,error:e.error,createdAt:e.createdAt,updatedAt:e.updatedAt}:null}async start(t,e,s){let n=e.workflowId||W();if(e.workflowId){let r=await this.repo.get(e.workflowId);if(r&&r.status!==c.COMPLETED&&r.status!==c.FAILED)throw new Error(`Workflow with ID '${e.workflowId}' already exists and is in a running state (${r.status}).`)}return this.logger.info(`[RUNTIME] Iniciando workflow '${t.name}' v${t.version} con ID: ${n}`),await this.repo.create({workflowId:n,name:t.name,version:t.version,status:c.RUNNING,input:e.input,parentId:s}),setImmediate(()=>{this._executeStep(n,t).catch(r=>{this.logger.error("Error fatal en ejecuci\xF3n inicial",{error:r,workflowId:n})})}),{workflowId:n,unsubscribe:async()=>{}}}async scheduleExecution(t,e,s,n){setImmediate(()=>{this._executeStep(t,e,s,n).catch(r=>{this.logger.error("Error no manejado en scheduleExecution",{error:r,workflowId:t})})})}async _executeStep(t,e,s,n){let r=await this.repo.acquireLock(t);if(!r)return;let o=setInterval(()=>{this.repo.renewLock(t,r,10).catch(a=>this.logger.warn(`Error renovando lock para ${t}`,{error:a}))},5e3);try{if(n)throw n;let a=await this.repo.get(t);if(!a)return;if(a.status===c.CANCELLING)throw new S(a.error||"Workflow cancelled");if(a.status!==c.RUNNING)return;let p=a.version==="undefined"?void 0:a.version,d=e.version==="undefined"?void 0:e.version;if(String(p??"")!==String(d??"")){let w=new Error(`Version mismatch: DB=${p}, Code=${d}`);await this.repo.fail(t,w,r,c.VERSION_MISMATCH);return}let u={workflowId:t,step:a.step,input:a.input,state:a.state,result:s,log:(w,T)=>this.logger.info(w,{...T,workflowId:t,step:a.step})},g=await e.execute(u);await this.repo.updateState(t,u.state,u.step,r),await this.handleInstruction(g,u,a.name,r)&&(await this.repo.incrementStep(t,r),this.scheduleExecution(t,e,void 0))}catch(a){let p=a instanceof Error?a:new Error(String(a));this.logger.error("Error en workflow",{workflowId:t,error:p.message}),await this.handleFailure(t,p,e,r)}finally{clearInterval(o),await this.repo.releaseLock(t,r)}}async handleInstruction(t,e,s,n){let{workflowId:r}=e;switch(t.type){case"SCHEDULE_TASK":return await this.repo.enqueueTask({workflowId:r,durableFunctionName:s,...t}),!1;case"SCHEDULE_SLEEP":{let o=v(t.duration);if(typeof o!="number")throw new Error(`Invalid time value provided to bSleep: "${t.duration}"`);let a=Date.now()+o;return await this.repo.scheduleSleep(r,a),!1}case"WAIT_FOR_SIGNAL":return await this.repo.updateStatus(r,c.AWAITING_SIGNAL,{awaitingSignal:t.signalName}),await l.sadd(`signals:awaiting:${t.signalName}`,r),!1;case"EXECUTE_SUBWORKFLOW":{let o=this.durableFns.get(t.workflowName);if(!o)throw new Error(`Sub-workflow '${t.workflowName}' no encontrado.`);let{workflowId:a}=await this.start(o,{input:t.input},r);return await this.repo.updateStatus(r,c.AWAITING_SUBWORKFLOW,{subWorkflowId:a}),!1}case"EMIT_EVENT":{let o=`event:${r}`,a=h({eventName:t.eventName,payload:t.payload});return await l.publish(o,a),!0}case"COMPLETE":{let o=`event:${r}`,a=h({eventName:"workflow:completed",payload:t.result});return await l.publish(o,a),await this.repo.complete(r,t.result,n),await this.resumeParentWorkflow(r),!1}}}async handleFailure(t,e,s,n){let r=n;if(!r&&(r=await this.repo.acquireLock(t,20),!r)){this.logger.warn(`No se pudo adquirir lock para fallo en ${t}`);return}try{if(e instanceof S){await this.repo.fail(t,e,r,c.CANCELLED);let u=await this.repo.get(t);u?.subWorkflowId&&await this.cancel(u.subWorkflowId,`Parent workflow ${t} was cancelled`);return}let o=await this.repo.get(t);if(!o||o.status===c.FAILED||o.status===c.COMPLETED)return;let a=o.state.tryCatchStack;if(a&&a.length>0){let g=a.pop()?.catchStep;if(g!==void 0){this.logger.info(`Capturando error en step ${g}`,{workflowId:t}),await this.repo.resumeForCatch(t,o.state,g,r),this.scheduleExecution(t,s,{name:e.name,message:e.message,stack:e.stack});return}}let p=`event:${t}`,d=h({eventName:"workflow:failed",payload:{message:e.message}});await l.publish(p,d),await this.repo.fail(t,e,r),await this.propagateFailureToParent(t,e)}finally{!n&&r&&await this.repo.releaseLock(t,r)}}async resumeParentWorkflow(t){let e=await this.repo.get(t);if(!e?.parentId)return;let s=e.parentId,n=await this.repo.get(s);if(!n||n.status!==c.AWAITING_SUBWORKFLOW||n.subWorkflowId!==t)return;let r=this.durableFns.get(n.name);if(!r){await this.repo.fail(s,new Error(`Definici\xF3n del workflow '${n.name}' no encontrada.`),null);return}await this.repo.updateStatus(s,c.RUNNING,{subWorkflowId:""});let o=await this.repo.acquireLock(s);if(o)try{await this.repo.incrementStep(s,o),this.scheduleExecution(s,r,e.result)}finally{await this.repo.releaseLock(s,o)}else throw this.logger.warn(`Could not lock parent ${s} to resume. Retrying later...`),new Error(`Temporary Lock Failure: Could not acquire parent lock for ${s}`)}async propagateFailureToParent(t,e){let s=await this.repo.get(t);if(!s?.parentId)return;let n=s.parentId,r=await this.repo.get(n);if(!r||r.status!==c.AWAITING_SUBWORKFLOW||r.subWorkflowId!==t)return;let o=this.durableFns.get(r.name);if(!o){await this.repo.fail(n,new Error(`Definici\xF3n del workflow '${r.name}' no encontrada al propagar fallo.`),null);return}await this.repo.updateStatus(n,c.RUNNING,{subWorkflowId:""});let a=new Error(`Sub-workflow '${s.name}' (${t}) fall\xF3: ${e.message}`);a.stack=e.stack,this.scheduleExecution(n,o,void 0,a)}async signal(t,e,s){let n=null;for(let r=0;r<3&&(n=await this.repo.acquireLock(t),!n);r++)await new Promise(o=>setTimeout(o,50));if(!n)return this.logger.warn("Lock timeout en signal",{workflowId:t});try{let r=await this.repo.get(t);if(!r)return this.logger.warn("Se\xF1al para workflow inexistente",{workflowId:t});if(r.status!==c.AWAITING_SIGNAL||r.awaitingSignal!==e)return this.logger.warn("Workflow no esperaba esta se\xF1al",{workflowId:t,expected:r.awaitingSignal,received:e});let o=this.durableFns.get(r.name);if(!o){await this.repo.fail(t,new Error(`Funci\xF3n durable '${r.name}' no encontrada.`),n);return}await this.repo.updateStatus(t,c.RUNNING,{awaitingSignal:""}),await l.srem(`signals:awaiting:${e}`,t),await this.repo.incrementStep(t,n),this.scheduleExecution(t,o,s)}catch(r){let o=r instanceof Error?r:new Error(String(r)),a=(await this.repo.get(t))?.name||"",p=this.durableFns.get(a);await this.handleFailure(t,o,p,n)}finally{n&&await this.repo.releaseLock(t,n)}}async cancel(t,e){let s=await this.repo.acquireLock(t);if(!s)return await new Promise(n=>setTimeout(n,100)),this.cancel(t,e);try{let n=await this.repo.get(t);if(!n||[c.COMPLETED,c.FAILED,c.CANCELLED].includes(n.status))return;if(await this.repo.updateStatus(t,c.CANCELLING,{error:e}),n.status===c.SLEEPING){await l.zrem(I,t);let r=this.durableFns.get(n.name);this.scheduleExecution(t,r)}if(n.status===c.AWAITING_SIGNAL){let r=this.durableFns.get(n.name);this.scheduleExecution(t,r)}}finally{await this.repo.releaseLock(t,s)}}startScheduler(){if(this.schedulerInterval)return;this.logger.info(`Scheduler iniciado (${this.pollingInterval}ms)`);let t=async()=>{await this.checkSleepers(),await this.checkDelayedTasks(),await this.reapDeadWorkers()};this.schedulerInterval=setInterval(t,this.pollingInterval)}async checkDelayedTasks(){try{let t=await this.repo.moveDueTasksToQueue(50);t>0&&this.logger.debug(`Scheduler movi\xF3 ${t} tareas diferidas a la cola activa`)}catch(t){this.logger.error("Error chequeando tareas diferidas",{error:t})}}async checkSleepers(){let e=await this.repo.getWorkflowsToWake(50);e.length!==0&&await Promise.all(e.map(async s=>{let n=await this.repo.acquireLock(s);if(n)try{let r=await this.repo.get(s);if(r){let o=this.durableFns.get(r.name);o&&(this.logger.info("Despertando workflow",{workflowId:s}),await this.repo.updateStatus(s,c.RUNNING),await this.repo.incrementStep(s,n),this.scheduleExecution(s,o,void 0))}}finally{await this.repo.releaseLock(s,n)}}))}async reapDeadWorkers(){let t="0";do{let[e,s]=await l.sscan(k,t,"COUNT",100);t=e;for(let n of s){if(await l.exists(`${R}${n}`))continue;this.logger.warn(`Worker muerto ${n}. Recuperando tareas.`);let r=`${E}:processing:${n}`,o=await l.rpoplpush(r,E);for(;o;)o=await l.rpoplpush(r,E);await l.del(r),await l.srem(k,n)}}while(t!=="0")}startHeartbeat(){let t=`${R}${this.workerId}`,e=Math.max(Math.ceil(this.pollingInterval*3/1e3),5),s=()=>{this.isRunning&&l.set(t,Date.now().toString(),"EX",e).catch(()=>{})};this.heartbeatInterval=setInterval(s,this.pollingInterval),s()}startWorker(){if(this.isRunning)return;this.isRunning=!0;let t=`${E}:processing:${this.workerId}`;this.logger.info(`Worker ${this.workerId} iniciado`),this.startHeartbeat(),(async()=>{for(await l.sadd(k,this.workerId);this.isRunning;)try{let s=await y.brpoplpush(E,t,2);if(!s)continue;let n=m(s);this.logger.debug(`Ejecutando tarea: ${n.exportName}`,{workflowId:n.workflowId});try{let r;n.modulePath.startsWith("virtual:")?r=await import(n.modulePath):r=await import(V(this.sourceRoot,n.modulePath));let o=r[n.exportName];if(typeof o!="function")throw new Error(`'${n.exportName}' no es una funci\xF3n.`);let a=await o(...n.args),p=this.durableFns.get(n.durableFunctionName);if(p){let d=await this.repo.acquireLock(n.workflowId);if(d)try{await this.repo.incrementStep(n.workflowId,d),this.scheduleExecution(n.workflowId,p,a)}finally{await this.repo.releaseLock(n.workflowId,d)}else this.logger.warn(`No se pudo adquirir lock para avanzar workflow ${n.workflowId} tras tarea`,{task:n.exportName})}await l.lrem(t,1,s)}catch(r){let o=r instanceof Error?r:new Error(String(r));this.logger.error(`Fallo en tarea ${n.exportName}`,{workflowId:n.workflowId,error:o.message});let a=this.durableFns.get(n.durableFunctionName),p=a?.retryOptions||{},d=p.maxAttempts??3,u=(n.attempts||0)+1;if(n.attempts=u,u<=d){let g=p.initialInterval?v(p.initialInterval):1e3,f=p.backoffCoefficient??2,w=p.maxInterval?v(p.maxInterval):36e5,T=g*Math.pow(f,u-1);T>w&&(T=w),this.logger.warn(`Reintentando tarea en ${v(T)} (intento ${u}/${d===1/0?"Inf":d})`,{workflowId:n.workflowId}),T>0?await this.repo.scheduleTaskRetry(n,T):await l.lpush(E,h(n)),await l.lrem(t,1,s)}else this.logger.error("Reintentos agotados. Moviendo a DLQ.",{workflowId:n.workflowId}),await this.repo.moveToDLQ(n,o),a?await this.handleFailure(n.workflowId,o,a,null):await this.repo.fail(n.workflowId,new Error(`Def missing for ${n.durableFunctionName}`),null),await l.lrem(t,1,s)}}catch(s){if(!this.isRunning)break;this.logger.error("Error infraestructura worker",{error:s}),await new Promise(n=>setTimeout(n,5e3))}})()}run(t){this.durableFns=t,this.startWorker(),this.startScheduler()}async stop(){this.isRunning=!1,this.schedulerInterval&&clearInterval(this.schedulerInterval),this.heartbeatInterval&&clearInterval(this.heartbeatInterval),await l.srem(k,this.workerId),this.logger.info("Runtime detenido")}};var H=i=>({...i,__isDurable:!0});var B={info:(i,t)=>console.log(`[INFO] ${i}`,t||""),error:(i,t)=>console.error(`[ERROR] ${i}`,t||""),warn:(i,t)=>console.warn(`[WARN] ${i}`,t||""),debug:(i,t)=>console.debug(`[DEBUG] ${i}`,t||"")},Q=i=>{if(!i.startsWith("on")||i.length<=2)return null;let t=i.slice(2);return t.charAt(0).toLowerCase()+t.slice(1)};function Rt(i){let t=i.logger||B;t.info("--- Inicializando Sistema Durable ---"),N({commandClient:i.redisClient,blockingClient:i.blockingRedisClient});let e=new b({sourceRoot:i.sourceRoot,retention:i.retention,pollingInterval:i.pollingInterval,logger:t});e.run(i.durableFunctions);let s=new q(i.redisClient.options),n=new Map;s.psubscribe("event:*",o=>{o&&t.error("Error fatal al suscribirse a los canales de eventos:",{error:o})}),s.on("pmessage",(o,a,p)=>{let d=n.get(a);if(d&&d.length>0)try{let u=m(p),g={name:u.eventName||u.signalName,payload:u.payload};[...d].forEach(f=>f(g))}catch(u){t.error(`Error al parsear evento en ${a}`,{error:u})}});let r=(o,a)=>{let p=`event:${a}`,d=u=>(n.has(p)||n.set(p,[]),n.get(p)?.push(u),()=>{let g=n.get(p);if(g){let f=g.indexOf(u);f>-1&&g.splice(f,1),g.length===0&&n.delete(p)}});return{workflowId:a,signal:async(u,g)=>{await e.signal(a,u,g)},on:async(u,g)=>{let f=C(a),w=await i.redisClient.hgetall(f);return u==="workflow:completed"&&w.status===c.COMPLETED?(g(m(w.result||"null")),{unsubscribe:()=>{}}):u==="workflow:failed"&&w.status===c.FAILED?(g({message:w.error||"Unknown"}),{unsubscribe:()=>{}}):{unsubscribe:d(O=>{O.name===u&&g(O.payload)})}},subscribe:async u=>({unsubscribe:d(f=>{u(f)})})}};return{start:async(o,a)=>{let p=a.workflowId||z(),d=[],u=`event:${p}`,g={};if(Object.keys(a).forEach(f=>{let w=Q(f);w&&typeof a[f]=="function"&&(g[w]=a[f])}),Object.keys(g).length>0){n.has(u)||n.set(u,[]);let f=w=>{let T=g[w.name];T&&T(w.payload)};n.get(u)?.push(f),d.push(()=>{let w=n.get(u);if(w){let T=w.indexOf(f);T>-1&&w.splice(T,1)}})}return await e.start(o,{workflowId:p,input:a.input}),{workflowId:p,unsubscribe:async()=>{d.forEach(f=>f())}}},stop:()=>{e.stop(),s.quit().catch(()=>{})},runtime:e,cancel:(o,a)=>e.cancel(o,a),getState:o=>e.getState(o),getHandle:(o,a)=>r(o,a)}}export{S as WorkflowCancellationError,H as bDurable,Rt as bDurableInitialize};
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@bobtail.software/b-durable",
|
|
3
|
-
"version": "1.0.
|
|
3
|
+
"version": "1.0.7",
|
|
4
4
|
"main": "dist/index.mjs",
|
|
5
5
|
"types": "dist/index.d.mts",
|
|
6
6
|
"description": "A system for creating durable, resilient, and type-safe workflows in JavaScript/TypeScript.",
|
|
@@ -25,6 +25,7 @@
|
|
|
25
25
|
"ms": "^2.1.3",
|
|
26
26
|
"pino": "^10.1.0",
|
|
27
27
|
"prettier": "^3.6.2",
|
|
28
|
+
"superjson": "^2.2.5",
|
|
28
29
|
"ts-morph": "^27.0.2"
|
|
29
30
|
},
|
|
30
31
|
"devDependencies": {
|