@bobtail.software/b-durable 1.0.4 → 1.0.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -1,217 +1,207 @@
1
+ # `b-durable`: Composable, Type-Safe, Durable Workflows for TypeScript
1
2
 
2
- # `@bobtail.software/b-durable`: Composable, Type-Safe, Durable Workflows for TypeScript
3
+ ![NPM Version](https://img.shields.io/npm/v/@bobtail.software/b-durable.svg)
4
+ ![License](https://img.shields.io/npm/l/@bobtail.software/eslint-plugin-b-durable.svg)
3
5
 
4
- [![npm version](https://badge.fury.io/js/@bobtail.software/b-durable.svg)](https://badge.fury.io/js/@bobtail.software/b-durable)
5
- [![License: GPL-3.0](https://img.shields.io/badge/License-GPL--3.0-yellow.svg)](https://opensource.org/licenses/GPL-3.0)
6
-
7
- `b-durable` is a powerful system that transforms standard `async` functions into **composable, interactive, durable, and resilient workflows**. It lets you write long-running business logic—spanning hours, days, or months—as simple, linear `async/await` code. The system handles state persistence, orchestration, external events, and crash recovery, allowing you to focus on your business logic.
6
+ `b-durable` is a production-ready system that transforms standard `async` functions into **composable, interactive, durable, and resilient workflows**. It lets you write long-running business logic—spanning hours, days, or months—as simple, linear `async/await` code. The system handles state persistence, orchestration, crash recovery, strict versioning, and observability.
8
7
 
9
8
  ## The Problem
10
9
 
11
10
  Standard `async/await` is great for short-lived operations, but it breaks down for complex, long-running processes:
12
11
 
13
12
  1. **Fragility**: If your server restarts mid-execution, all in-memory state is lost.
14
- 2. **Inefficiency**: An operation like `await bSleep('7 days')` is impossible. It would hold a process hostage, consume resources, and wouldn't survive a single deployment.
15
- 3. **Orchestration Complexity**: Coordinating processes that involve multiple services, human-in-the-loop steps (like approvals), or external system webhooks often leads to a tangled mess of state machines, queues, and database flags.
13
+ 2. **Inefficiency**: An operation like `await bSleep('7 days')` is impossible standard Node.js.
14
+ 3. **Operational Blindness**: It's hard to know the state of a multi-step process running across distributed services.
15
+ 4. **Deployment Risks**: Deploying new code while old processes are running can corrupt memory/state.
16
16
 
17
17
  ## The `b-durable` Solution
18
18
 
19
- `b-durable` allows you to express this complexity as a single, readable `async` function. The system automatically persists the workflow's state after each `await` step, ensuring it can resume from the exact point of interruption.
19
+ `b-durable` allows you to express this complexity as a single, readable `async` function. The system automatically persists the workflow's state after each `await` step in Redis.
20
20
 
21
- Imagine orchestrating an e-commerce order. With `b-durable`, the code is as clear as the business process itself:
21
+ ### Key Capabilities
22
22
 
23
- ```typescript
24
- // Define a reusable sub-workflow for handling payments
25
- export const paymentWorkflow = bDurable({
26
- workflow: async (input: { orderId: string; amount: number }, context) => {
27
- // 1. Call a service to process the payment
28
- const result = await processPayment({ orderId: input.orderId, amount: input.amount });
29
- if (!result.success) {
30
- throw new Error('Payment failed!');
31
- }
32
- // 2. Pause durably for fraud checks
33
- await context.bSleep('30m');
34
- return { transactionId: result.transactionId };
35
- },
36
- });
23
+ - **🛡️ Strict Versioning**: Prevents state corruption by ensuring running workflows only execute code compatible with their version.
24
+ - **💀 The Reaper (Reliability)**: Automatically detects crashed workers and recovers "lost" tasks, ensuring zero data loss.
25
+ - **👁️ Observability First**: Injectable Loggers, `getState` inspection API, and detailed tracking.
26
+ - **♻️ Dead Letter Queue (DLQ)**: Automatic retries for failed tasks; moves persistently failing tasks to a DLQ for manual inspection.
27
+ - **🛑 Cancellation**: Gracefully cancel running workflows with support for cleanup logic (`try/catch/finally`).
28
+ - **🧹 Auto-Retention**: Automatically expire completed/failed workflows from Redis to manage storage costs.
29
+
30
+ ## Example: E-Commerce Order
37
31
 
38
- // Define the main order processing workflow
39
- export const orderProcessingWorkflow = bDurable({
40
- workflow: async (input: { orderId: string; items: Item[] }, context) => {
32
+ ```typescript
33
+ import { bDurable } from '@bobtail.software/b-durable';
34
+
35
+ // Define contracts
36
+ interface OrderEvents { 'order.approved': { approverId: string }; }
37
+ interface OrderSignals { 'status.update': { status: string }; }
38
+
39
+ export const orderProcessingWorkflow = bDurable<
40
+ { orderId: string; amount: number },
41
+ { status: 'completed' | 'failed' },
42
+ OrderEvents,
43
+ OrderSignals
44
+ >({
45
+ // VERSIONING IS MANDATORY
46
+ version: '1.0',
47
+ workflow: async (input, context) => {
41
48
  try {
42
- // 1. Call another workflow and await its result
43
- const payment = await context.bExecute(paymentWorkflow, { orderId: input.orderId, amount: 99.99 });
44
- context.log(`Payment successful: ${payment.transactionId}`);
49
+ // 1. Execute sub-workflow
50
+ const payment = await context.bExecute(paymentWorkflow, { amount: input.amount });
51
+
52
+ // 2. Emit non-blocking signal
53
+ await context.bSignal('status.update', { status: 'paid' });
45
54
 
46
- // 2. Pause and wait for an external event (e.g., from a UI or webhook)
55
+ // 3. Wait for external event (human approval)
47
56
  const approval = await context.bWaitForEvent('order.approved');
48
57
  context.log(`Order approved by ${approval.approverId}`);
49
58
 
50
- // 3. Call the final service function
59
+ // 4. Schedule shipping
51
60
  await shipOrder(input.orderId);
52
-
61
+
53
62
  return { status: 'completed' };
54
63
  } catch (error) {
55
- // 4. Handle errors durably
56
- await notifyCustomerOfFailure(input.orderId, error.message);
57
- await cancelOrder(input.orderId);
58
- return { status: 'failed', reason: error.message };
64
+ // 5. Handle errors (and cancellations!) durably
65
+ if (error.isCancellation) {
66
+ await releaseInventory(input.orderId); // Cleanup
67
+ throw error;
68
+ }
69
+ await notifyFailure(input.orderId);
70
+ return { status: 'failed' };
59
71
  }
60
72
  },
61
73
  });
62
74
  ```
63
75
 
64
- ## Core Features
65
-
66
- - **Composable Orchestration**: Workflows can call other workflows using `await context.bExecute()`, allowing you to build complex processes from smaller, reusable parts. Results and errors are propagated automatically.
67
- - **Interactive & Event-Driven**: Pause a workflow indefinitely with `await context.bWaitForEvent()` until an external event is received, enabling human-in-the-loop patterns and webhook integrations.
68
- - **Durable & Resilient**: Workflows survive server restarts, crashes, and deployments, resuming exactly where they left off.
69
- - **Built-in Error Handling**: Use standard `try/catch` blocks to handle errors from tasks or sub-workflows. Your `catch` block will execute reliably, even if the failure occurs hours after the `try` block started.
70
- - **Durable Timers**: Use `await context.bSleep('30 days')` to pause workflows for extended periods without consuming server resources.
71
- - **Type Safety End-to-End**: Leverages TypeScript for type safety across steps, I/O, events, and workflow composition.
72
- - **Compiler-Powered**: A smart CLI compiler transforms your workflows into a step-by-step executable format, preserving types and ensuring runtime correctness.
73
-
74
- ## How It Works: Compiler + Runtime
75
-
76
- 1. **The Smart Compiler (`b-durable-compiler`)**:
77
- Analyzes your workflow files (`*.workflow.ts`). For each function wrapped in `bDurable(...)`, it:
78
- - **Maps Control Flow**: Breaks the function into steps at each `await`, analyzing `if/else` and `try/catch` blocks to build a complete state machine.
79
- - **Identifies Durable Calls**: Differentiates between a durable instruction (`context.bSleep`, `context.bExecute`) and a standard service task call.
80
- - **Generates Durable Artifacts**: Produces compiled `.mts` files that the runtime can execute step-by-step.
81
-
82
- 2. **The Durable Runtime**:
83
- The engine that executes the compiled workflows.
84
- - **State Persistence**: Uses Redis to store the state, step, and context of every workflow instance.
85
- - **Orchestration Logic**: Manages parent/child workflow relationships, passing results and errors up the chain.
86
- - **Event System**: Tracks which workflows are waiting for which events.
87
- - **Task Queue & Scheduler**: Reliably executes service function calls and manages long-running timers.
88
-
89
76
  ## Getting Started
90
77
 
91
78
  ### 1. Installation
92
79
 
93
- Install the core library and its peer dependencies. We also highly recommend the ESLint plugin for the best developer experience.
94
-
95
80
  ```bash
96
- pnpm add @bobtail.software/b-durable ioredis
81
+ pnpm add @bobtail.software/b-durable ioredis ms
97
82
  pnpm add -D @bobtail.software/eslint-plugin-b-durable
98
83
  ```
99
84
 
100
- ### 2. Set Up ESLint (Highly Recommended)
101
-
102
- Our ESLint plugin prevents common errors by flagging unsupported code constructs (like loops) inside your workflows.
85
+ ### 2. Define a Workflow
103
86
 
104
- **In `eslint.config.js` (Flat Config):**
105
- ```javascript
106
- import bDurablePlugin from '@bobtail.software/eslint-plugin-b-durable';
107
-
108
- export default [
109
- // ... your other configs
110
- bDurablePlugin.configs.recommended,
111
- ];
112
- ```
113
- For legacy `.eslintrc.js` setup, see the [plugin's documentation](link-to-your-eslint-plugin-readme).
114
-
115
- ### 3. Define a Workflow
116
-
117
- Create a file ending in `.workflow.ts`. The `(input, context)` signature gives you access to durable functions.
87
+ Create a `.workflow.ts` file. Note the mandatory `version` field.
118
88
 
119
89
  ```typescript
120
- // src/workflows/onboarding.workflow.ts
121
- import { bDurable, DurableContext } from '@bobtail.software/b-durable';
122
- import { createUser, sendWelcomeEmail } from '../services';
123
-
124
- interface OnboardingInput {
125
- userId: string;
126
- email: string;
127
- }
128
-
129
- export const userOnboardingWorkflow = bDurable({
130
- workflow: async (input: OnboardingInput, context: DurableContext) => {
131
- const user = await createUser({ id: input.userId, email: input.email });
132
-
133
- await context.bSleep('10s');
134
-
135
- await sendWelcomeEmail(user.email);
136
-
137
- return { status: 'completed', userId: user.id };
90
+ // src/workflows/user.workflow.ts
91
+ import { bDurable } from '@bobtail.software/b-durable';
92
+ import { sendEmail } from '../services';
93
+
94
+ export const userOnboarding = bDurable({
95
+ name: 'userOnboarding',
96
+ version: '1.0', // Required for safety
97
+ workflow: async (input: { email: string }, context) => {
98
+ await context.bSleep('1 day');
99
+ await sendEmail(input.email, 'Welcome!');
100
+ return 'sent';
138
101
  },
139
102
  });
140
103
  ```
141
104
 
142
- ### 4. Compile Workflows
143
-
144
- Add a script to your `package.json` to run the compiler.
105
+ ### 3. Compile
145
106
 
107
+ Add to `package.json`:
146
108
  ```json
147
- // package.json
148
109
  "scripts": {
149
110
  "compile-workflows": "b-durable-compiler --in src/workflows --out src/generated"
150
111
  }
151
112
  ```
152
- Run `pnpm compile-workflows`. This generates the durable definitions in `src/generated`.
113
+ Run `pnpm compile-workflows`.
153
114
 
154
- ### 5. Initialize the Runtime
115
+ ### 4. Initialize the Runtime
155
116
 
156
- In your application's entry point, initialize the system and start a workflow.
117
+ Initialize the system with Redis connections and configuration options.
157
118
 
158
119
  ```typescript
159
120
  // src/main.ts
160
121
  import { bDurableInitialize } from '@bobtail.software/b-durable';
161
122
  import Redis from 'ioredis';
162
- import durableFunctions, { userOnboardingWorkflow } from './generated';
123
+ import durableFunctions from './generated';
124
+ import { myLogger } from './logger'; // Your Winston/Pino logger
125
+
126
+ const durableSystem = bDurableInitialize({
127
+ durableFunctions,
128
+ sourceRoot: process.cwd(),
129
+ redisClient: new Redis(),
130
+ blockingRedisClient: new Redis(), // Dedicated connection for queues
131
+
132
+ // --- Production Configuration ---
133
+ retention: '7 days', // Auto-delete finished workflows after 7 days
134
+ pollingInterval: 5000, // Scheduler/Heartbeat frequency (default: 5000ms)
135
+ logger: { // Inject your logger for better observability
136
+ info: (msg, meta) => myLogger.info(msg, meta),
137
+ error: (msg, meta) => myLogger.error(msg, meta),
138
+ warn: (msg, meta) => myLogger.warn(msg, meta),
139
+ debug: (msg, meta) => myLogger.debug(msg, meta),
140
+ }
141
+ });
163
142
 
164
- async function main() {
165
- const redis = new Redis();
166
- const blockingRedis = new Redis(); // Required for reliable queue operations
143
+ // Start a workflow
144
+ const { workflowId } = await durableSystem.start(userOnboarding, {
145
+ input: { email: 'test@example.com' }
146
+ });
167
147
 
168
- const durableSystem = bDurableInitialize({
169
- durableFunctions,
170
- sourceRoot: process.cwd(),
171
- redisClient: redis,
172
- blockingRedisClient: blockingRedis,
173
- });
148
+ // Inspect state in real-time
149
+ const state = await durableSystem.getState(workflowId);
150
+ console.log(`Current step: ${state.step}, Status: ${state.status}`);
151
+ ```
174
152
 
175
- console.log('Durable system ready. Starting workflow...');
153
+ ## Advanced Features
176
154
 
177
- const workflowId = await durableSystem.start(userOnboardingWorkflow, {
178
- userId: `user-${Date.now()}`,
179
- email: 'test.user@example.com',
180
- });
181
-
182
- console.log(`Workflow ${workflowId} started.`);
183
- }
155
+ ### Strict Versioning & Deployment
184
156
 
185
- main().catch(console.error);
186
- ```
157
+ When you modify a workflow, you **must** increment the `version` string (e.g., `'1.0'` -> `'1.1'`).
158
+
159
+ * **Runtime Check**: Before executing a step, the worker checks if the database version matches the code version.
160
+ * **Mismatch**: If versions differ, the workflow halts with status `VERSION_MISMATCH`. This prevents "Frankenstein" workflows where step 2 of version 1 tries to run step 3 of version 2.
161
+ * **Strategy**: Run new workers alongside old workers (Blue/Green) or drain queues before deploying breaking changes.
162
+
163
+ ### Reliability & The Reaper
187
164
 
188
- ### 6. Run Your Application
165
+ * **Heartbeats**: Every worker sends a heartbeat to Redis every few seconds.
166
+ * **The Reaper**: If a worker crashes (OOM, power failure) while holding a task, the Reaper detects the missing heartbeat and automatically re-queues the task for another worker. No manual intervention required.
189
167
 
190
- Run your app (`node src/main.ts`). You'll see the workflow execute, pause, and resume, with all its state managed by `b-durable`.
168
+ ### Error Handling & Dead Letter Queue (DLQ)
191
169
 
192
- ## Development Setup (for contributors)
170
+ * **Retries**: Tasks are automatically retried 3 times on failure with backoff.
171
+ * **DLQ**: After 3 failures, the task payload and error stack are moved to the Redis list `queue:dead`.
172
+ * **Sub-workflows**: Failures in sub-workflows bubble up to the parent as standard JavaScript exceptions, catchable with `try/catch`.
193
173
 
194
- The project is a `pnpm` monorepo.
174
+ ### Cancellation
195
175
 
196
- 1. **Clone & Install**:
197
- ```bash
198
- git clone <repository-url>
199
- cd b-durable-monorepo
200
- pnpm install
201
- ```
176
+ You can cancel a running workflow at any time.
177
+
178
+ ```typescript
179
+ await durableSystem.cancel(workflowId, 'User requested cancellation');
180
+ ```
181
+
182
+ Inside the workflow, this throws a `WorkflowCancellationError`. You can catch this to perform cleanup (e.g., reverting a payment) before re-throwing or returning.
183
+
184
+ ```typescript
185
+ try {
186
+ await context.bWaitForEvent('approval');
187
+ } catch (e) {
188
+ if (e.isCancellation) {
189
+ await refundPayment();
190
+ // Workflow ends here as CANCELLED
191
+ }
192
+ throw e;
193
+ }
194
+ ```
202
195
 
203
- 2. **Run in Development Mode**:
204
- This command builds the library, compiles example workflows, and starts the example app with hot-reloading.
205
- ```bash
206
- pnpm dev
207
- ```
196
+ ## Architecture
208
197
 
209
- 3. **Run Tests**:
210
- Tests use Vitest and a real Redis instance.
211
- ```bash
212
- pnpm --filter @bobtail.software/b-durable test
213
- ```
198
+ 1. **Compiler**: Analyzes `await` points and transforms code into a deterministic state machine.
199
+ 2. **Redis**: Stores state, task queues, locks, and signals.
200
+ 3. **Runtime**:
201
+ * **Dispatcher**: Routes tasks to service functions.
202
+ * **Scheduler**: Manages timers (`bSleep`) and the Reaper.
203
+ * **Signal Bus**: Uses Redis Pub/Sub for real-time communication.
214
204
 
215
205
  ## License
216
206
 
217
- This project is licensed under the GPL-3.0 License. See the [LICENSE](LICENSE) file for details.
207
+ GPL-3.0
@@ -1,58 +1,59 @@
1
1
  #!/usr/bin/env node
2
- import R from"path";import{existsSync as L,mkdirSync as K,rmSync as _}from"fs";import b from"path";import*as k from"prettier";import{Node as g,Project as z,SyntaxKind as E,ts as B,VariableDeclarationKind as M}from"ts-morph";var U="bDurable",A=B.TypeFormatFlags.UseAliasDefinedOutsideCurrentScope|B.TypeFormatFlags.NoTruncation;async function H(e){let t=e.getFilePath(),s=e.getFullText(),n=await k.resolveConfig(t),a=await k.format(s,{...n,parser:"typescript"});e.replaceWithText(a)}async function O(e){console.log("Iniciando compilador de workflows duraderos...");let{inputDir:t,outputDir:s,packageName:n}=e,a=new z({tsConfigFilePath:b.resolve(process.cwd(),"tsconfig.json")}),i=a.addSourceFilesAtPaths(`${t}/**/*.ts`);L(s)&&(console.log(`Limpiando directorio de salida: ${s}`),_(s,{recursive:!0,force:!0})),K(s,{recursive:!0});let S=a.createDirectory(s);console.log(`Encontrados ${i.length} archivos de workflow para procesar.`);let o=[],c=[];for(let l of i){console.log(`
3
- Procesando archivo: ${l.getBaseName()}`);let r=l.getDescendantsOfKind(E.CallExpression).filter(d=>d.getExpression().getText()===U);if(r.length!==0)for(let d of r){let p=d.getParentIfKind(E.VariableDeclaration);if(!p)continue;let u=p.getName();console.log(` -> Transformando workflow: ${u}`);let[f]=d.getArguments();if(!g.isObjectLiteralExpression(f))continue;let x=f.getProperty("workflow");if(!x||!g.isPropertyAssignment(x))continue;let m=x.getInitializer();if(!m||!g.isArrowFunction(m))continue;let P=l.getBaseName().replace(/\.ts$/,".compiled.mts"),$=b.join(S.getPath(),P),I=a.createSourceFile($,"",{overwrite:!0});c.push(I),q(u,m,d,I,n),console.log(` -> Archivo generado: ${b.relative(process.cwd(),$)}`);let y=P;o.push({name:u,importPath:`./${y}`})}}if(o.length>0){let l=b.join(S.getPath(),"index.mts"),r=a.createSourceFile(l,"",{overwrite:!0});c.push(r),r.addStatements(`// Este archivo fue generado autom\xE1ticamente. NO EDITAR MANUALMENTE.
4
- `),r.addImportDeclaration({isTypeOnly:!0,moduleSpecifier:n,namedImports:["DurableFunction"]});for(let p of o)r.addImportDeclaration({moduleSpecifier:p.importPath,namedImports:[p.name]});r.addExportDeclaration({namedExports:o.map(p=>p.name)}),r.addStatements(`
5
- `),r.addVariableStatement({declarationKind:M.Const,declarations:[{name:"durableFunctions",type:"Map<string, DurableFunction<any, any>>",initializer:"new Map()"}]});let d=o.map(p=>`durableFunctions.set(${p.name}.name, ${p.name});`);r.addStatements(d),r.addStatements(`
2
+ import z from"path";import{existsSync as q,mkdirSync as G,rmSync as X}from"fs";import h from"path";import*as R from"prettier";import{Node as p,Project as Y,SyntaxKind as b,ts as W,VariableDeclarationKind as K}from"ts-morph";var J="bDurable",F=W.TypeFormatFlags.UseAliasDefinedOutsideCurrentScope|W.TypeFormatFlags.NoTruncation;async function Q(e){let t=e.getFilePath(),s=e.getFullText(),n=await R.resolveConfig(t),o=await R.format(s,{...n,parser:"typescript"});e.replaceWithText(o)}async function _(e){console.log("Iniciando compilador de workflows duraderos...");let{inputDir:t,outputDir:s,packageName:n}=e,o=new Y({tsConfigFilePath:h.resolve(process.cwd(),"tsconfig.json")}),i=o.addSourceFilesAtPaths(`${t}/**/*.ts`);q(s)&&(console.log(`Limpiando directorio de salida: ${s}`),X(s,{recursive:!0,force:!0})),G(s,{recursive:!0});let f=o.createDirectory(s);console.log(`Encontrados ${i.length} archivos de workflow para procesar.`);let a=[],c=[];for(let u of i){console.log(`
3
+ Procesando archivo: ${u.getBaseName()}`);let r=u.getDescendantsOfKind(b.CallExpression).filter(g=>g.getExpression().getText()===J);if(r.length!==0)for(let g of r){let l=g.getParentIfKind(b.VariableDeclaration);if(!l)continue;let d=l.getName();console.log(` -> Transformando workflow: ${d}`);let[m]=g.getArguments();if(!p.isObjectLiteralExpression(m))continue;let y=m.getProperty("workflow");if(!y||!p.isPropertyAssignment(y))continue;let x=y.getInitializer();if(!x||!p.isArrowFunction(x))continue;let N=u.getBaseName().replace(/\.ts$/,".compiled.mts"),A=h.join(f.getPath(),N),D=o.createSourceFile(A,"",{overwrite:!0});c.push(D),Z(d,x,g,D,n),console.log(` -> Archivo generado: ${h.relative(process.cwd(),A)}`);let w=N;a.push({name:d,importPath:`./${w}`})}}if(a.length>0){let u=h.join(f.getPath(),"index.mts"),r=o.createSourceFile(u,"",{overwrite:!0});c.push(r),r.addStatements(`// Este archivo fue generado autom\xE1ticamente. NO EDITAR MANUALMENTE.
4
+ `),r.addImportDeclaration({isTypeOnly:!0,moduleSpecifier:n,namedImports:["DurableFunction"]});for(let l of a)r.addImportDeclaration({moduleSpecifier:l.importPath,namedImports:[l.name]});r.addExportDeclaration({namedExports:a.map(l=>l.name)}),r.addStatements(`
5
+ `),r.addVariableStatement({declarationKind:K.Const,declarations:[{name:"durableFunctions",type:"Map<string, DurableFunction<any, any, any, any>>",initializer:"new Map()"}]});let g=a.map(l=>`durableFunctions.set(${l.name}.name, ${l.name});`);r.addStatements(g),r.addStatements(`
6
6
  `),r.addExportAssignment({isExportEquals:!1,expression:"durableFunctions"}),console.log(`
7
- -> Archivo de \xEDndice generado: ${b.basename(l)}`)}console.log(`
8
- Formateando archivos generados con Prettier...`);for(let l of c)await H(l);await a.save(),console.log(`
9
- Compilaci\xF3n completada exitosamente.`)}function q(e,t,s,n,a){let i=t.getBody();if(!g.isBlock(i))throw new Error(`El cuerpo del workflow '${e}' debe ser un bloque {}.`);let{clauses:S}=w(i.getStatements(),{step:0,persistedVariables:new Map}),o=t.getReturnType();o.getSymbol()?.getName()==="Promise"&&o.isObject()&&(o=o.getTypeArguments()[0]||o);let c=o.getText(void 0,A),l=new Set,r=t.getSourceFile(),d=s.getTypeArguments(),p=d.length>0?d[0].getText():"unknown";r.getImportDeclarations().forEach(m=>{if(m.getModuleSpecifierValue()===a)return;let h=m.getModuleSpecifierValue();if(h.includes(".workflow")){let y=b.parse(h),T=b.join(y.dir,y.base+".compiled.mts");!T.startsWith(".")&&!b.isAbsolute(T)&&(T="./"+T),h=T.replace(/\\/g,"/")}else h.startsWith(".")&&b.extname(h)===""&&(h+=".mjs");let P=[],$=[];m.getNamedImports().forEach(y=>{let T=y.getName(),F=y.getAliasNode()?.getText(),v=F?`${T} as ${F}`:T,V=(y.getNameNode().getSymbol()?.getAliasedSymbol()??y.getNameNode().getSymbol())?.getDeclarations()??[],j=V.some(D=>g.isEnumDeclaration(D));y.isTypeOnly()||!j&&V.every(D=>g.isInterfaceDeclaration(D)||g.isTypeAliasDeclaration(D))?$.push(v):P.push(v)}),P.length>0&&n.addImportDeclaration({moduleSpecifier:h,namedImports:P}),$.length>0&&n.addImportDeclaration({isTypeOnly:!0,moduleSpecifier:h,namedImports:$});let I=m.getDefaultImport();I&&n.addImportDeclaration({moduleSpecifier:h,defaultImport:I.getText()})}),r.getInterfaces().forEach(m=>{l.add(m.getText().startsWith("export")?m.getText():`export ${m.getText()}`)}),r.getTypeAliases().forEach(m=>{l.add(m.getText().startsWith("export")?m.getText():`export ${m.getText()}`)});let[u]=t.getParameters(),f="";if(u){let m=u.getNameNode().getText();m!=="input"&&(f=`const ${m} = input;`)}n.addImportDeclaration({isTypeOnly:!0,moduleSpecifier:a,namedImports:["DurableFunction","WorkflowContext","Instruction"]}),l.size>0&&(n.addStatements(`
7
+ -> Archivo de \xEDndice generado: ${h.basename(u)}`)}console.log(`
8
+ Formateando archivos generados con Prettier...`);for(let u of c)await Q(u);await o.save(),console.log(`
9
+ Compilaci\xF3n completada exitosamente.`)}function Z(e,t,s,n,o){let i=t.getBody();if(!p.isBlock(i))throw new Error(`El cuerpo del workflow '${e}' debe ser un bloque {}.`);let[f]=s.getArguments();if(!p.isObjectLiteralExpression(f))throw new Error("El argumento de bDurable debe ser un objeto.");let a=f.getProperty("version");if(!a||!p.isPropertyAssignment(a))throw new Error(`El workflow '${e}' debe tener una propiedad 'version'.`);let c=a.getInitializer();if(!c||!p.isStringLiteral(c))throw new Error(`La versi\xF3n del workflow '${e}' debe ser un string literal.`);let{clauses:u}=T(i.getStatements(),{step:0,persistedVariables:new Map}),r=t.getReturnType();r.getSymbol()?.getName()==="Promise"&&r.isObject()&&(r=r.getTypeArguments()[0]||r);let g=r.getText(void 0,F),l=new Set,d=t.getSourceFile(),m=s.getTypeArguments(),y=m.length>0?m[0].getText():"unknown",x=m.length>2?m[2].getText():"Record<string, never>",k=m.length>3?m[3].getText():"Record<string, never>";d.getImportDeclarations().forEach(S=>{if(S.getModuleSpecifierValue()===o)return;let E=S.getModuleSpecifierValue();if(E.includes(".workflow")){let $=h.parse(E),P=h.join($.dir,$.base+".compiled.mts");!P.startsWith(".")&&!h.isAbsolute(P)&&(P="./"+P),E=P.replace(/\\/g,"/")}else E.startsWith(".")&&h.extname(E)===""&&(E+=".mjs");let O=[],V=[];S.getNamedImports().forEach($=>{let P=$.getName(),M=$.getAliasNode()?.getText(),j=M?`${P} as ${M}`:P,L=($.getNameNode().getSymbol()?.getAliasedSymbol()??$.getNameNode().getSymbol())?.getDeclarations()??[],H=L.some(v=>p.isEnumDeclaration(v));$.isTypeOnly()||!H&&L.every(v=>p.isInterfaceDeclaration(v)||p.isTypeAliasDeclaration(v))?V.push(j):O.push(j)}),O.length>0&&n.addImportDeclaration({moduleSpecifier:E,namedImports:O}),V.length>0&&n.addImportDeclaration({isTypeOnly:!0,moduleSpecifier:E,namedImports:V});let B=S.getDefaultImport();B&&n.addImportDeclaration({moduleSpecifier:E,defaultImport:B.getText()})}),d.getInterfaces().forEach(S=>{l.add(S.getText().startsWith("export")?S.getText():`export ${S.getText()}`)}),d.getTypeAliases().forEach(S=>{l.add(S.getText().startsWith("export")?S.getText():`export ${S.getText()}`)});let[N]=t.getParameters(),A="";if(N){let S=N.getNameNode().getText();S!=="input"&&(A=`const ${S} = input;`)}n.addImportDeclaration({isTypeOnly:!0,moduleSpecifier:o,namedImports:["DurableFunction","WorkflowContext","Instruction"]}),l.size>0&&(n.addStatements(`
10
10
  `),n.addStatements(Array.from(l))),n.addStatements(`
11
11
  // Este archivo fue generado autom\xE1ticamente. NO EDITAR MANUALMENTE.
12
- `);let x=`{
12
+ `);let D=c.getLiteralValue(),w=`{
13
13
  __isDurable: true,
14
14
  name: '${e}',
15
- async execute(context: WorkflowContext<${p}>): Promise<Instruction<${c}>> {
15
+ version: '${D}',
16
+ async execute(context: WorkflowContext<${y}>): Promise<Instruction<${g}>> {
16
17
  const { input, state, result, log, workflowId } = context;
17
- ${f}
18
+ ${A}
18
19
  while (true) {
19
20
  switch (context.step) {
20
- ${S.join(`
21
+ ${u.join(`
21
22
  `)}
22
23
  default:
23
24
  throw new Error(\`Paso desconocido: \${context.step}\`);
24
25
  }
25
26
  }
26
27
  }
27
- }`;n.addVariableStatement({isExported:!0,declarationKind:M.Const,declarations:[{name:e,type:`DurableFunction<${p}, ${c}>`,initializer:x}]}),n.organizeImports()}function w(e,t){if(e.length===0){let f=[];if(t.pendingStateAssignment){let x=`case ${t.step}: {
28
+ }`;n.addVariableStatement({isExported:!0,declarationKind:K.Const,declarations:[{name:e,type:`DurableFunction<${y}, ${g}, ${x}, ${k}>`,initializer:w}]}),n.organizeImports()}function T(e,t){if(e.length===0){let m=[];if(t.pendingStateAssignment){let y=`case ${t.step}: {
28
29
  ${t.pendingStateAssignment}
29
30
  return { type: 'COMPLETE', result: undefined };
30
- }`;f.push(x)}return{clauses:f,nextStep:t.step+1}}let{syncBlock:s,durableStatement:n,nextStatements:a}=te(e),{rewrittenSyncStatements:i,newlyPersistedVariables:S}=Y(s,n?[n,...a]:[],t.persistedVariables);t.pendingStateAssignment&&i.unshift(t.pendingStateAssignment);let o=new Map([...t.persistedVariables,...S]);if(!n){let f=i.join(`
31
- `),m=s.length>0&&g.isReturnStatement(s[s.length-1])?"":`
31
+ }`;m.push(y)}return{clauses:m,nextStep:t.step+1}}let{syncBlock:s,durableStatement:n,nextStatements:o}=ie(e),{rewrittenSyncStatements:i,newlyPersistedVariables:f}=ne(s,n?[n,...o]:[],t.persistedVariables);t.pendingStateAssignment&&i.unshift(t.pendingStateAssignment);let a=new Map([...t.persistedVariables,...f]);if(!n){let m=i.join(`
32
+ `),x=s.length>0&&p.isReturnStatement(s[s.length-1])?"":`
32
33
  return { type: 'COMPLETE', result: undefined };`;return{clauses:[`case ${t.step}: {
33
- ${f}${m}
34
- }`],nextStep:t.step+1}}if(g.isIfStatement(n))return G(n,a,{...t,persistedVariables:o},i);if(g.isTryStatement(n))return X(n,a,{...t,persistedVariables:o},i);let{instruction:c,nextPendingStateAssignment:l}=Q(n,o);i.push(c);let r=i.join(`
35
- `),d=`case ${t.step}: {
34
+ ${m}${x}
35
+ }`],nextStep:t.step+1}}if(p.isIfStatement(n))return ee(n,o,{...t,persistedVariables:a},i);if(p.isTryStatement(n))return te(n,o,{...t,persistedVariables:a},i);let{instruction:c,nextPendingStateAssignment:u}=re(n,a);i.push(c);let r=i.join(`
36
+ `),g=`case ${t.step}: {
36
37
  ${r}
37
- }`,p={step:t.step+1,persistedVariables:o,pendingStateAssignment:l},u=w(a,p);return{clauses:[d,...u.clauses],nextStep:u.nextStep}}function G(e,t,s,n){let a=C(e.getExpression(),s.persistedVariables),i=e.getThenStatement(),S=g.isBlock(i)?i.getStatements():[i],o=w(S,{step:s.step+1,persistedVariables:new Map(s.persistedVariables)}),c,l=e.getElseStatement();if(l){let x=g.isBlock(l)?l.getStatements():[l];c=w(x,{step:o.nextStep,persistedVariables:new Map(s.persistedVariables)})}let r=c?c.nextStep:o.nextStep,d=w(t,{step:r,persistedVariables:s.persistedVariables}),p=n.join(`
38
- `),u=o.nextStep;return{clauses:[`
38
+ }`,l={step:t.step+1,persistedVariables:a,pendingStateAssignment:u},d=T(o,l);return{clauses:[g,...d.clauses],nextStep:d.nextStep}}function ee(e,t,s,n){let o=I(e.getExpression(),s.persistedVariables),i=e.getThenStatement(),f=p.isBlock(i)?i.getStatements():[i],a=T(f,{step:s.step+1,persistedVariables:new Map(s.persistedVariables)}),c,u=e.getElseStatement();if(u){let y=p.isBlock(u)?u.getStatements():[u];c=T(y,{step:a.nextStep,persistedVariables:new Map(s.persistedVariables)})}let r=c?c.nextStep:a.nextStep,g=T(t,{step:r,persistedVariables:s.persistedVariables}),l=n.join(`
39
+ `),d=a.nextStep;return{clauses:[`
39
40
  case ${s.step}: {
40
- ${p}
41
- if (${a}) {
41
+ ${l}
42
+ if (${o}) {
42
43
  context.step = ${s.step+1};
43
44
  } else {
44
- ${l?`context.step = ${u};`:`context.step = ${r};`}
45
+ ${u?`context.step = ${d};`:`context.step = ${r};`}
45
46
  }
46
47
  break;
47
48
  }
48
- `,...o.clauses,...c?c.clauses:[],...d.clauses],nextStep:d.nextStep}}function X(e,t,s,n){let{step:a,persistedVariables:i}=s,S=e.getTryBlock(),o=e.getCatchClause(),c=e.getFinallyBlock(),l=w(S.getStatements(),{step:a+1,persistedVariables:new Map(i)}),r,d,p=l.nextStep;if(o){let y=o.getBlock(),T=o.getVariableDeclaration();T&&(d=T.getName()),r=w(y.getStatements(),{step:p,persistedVariables:new Map(i)})}let u,f=r?r.nextStep:p;c&&(u=w(c.getStatements(),{step:f,persistedVariables:new Map(i)}));let x=u?u.nextStep:f,m=w(t,{step:x,persistedVariables:i}),h=`{ catchStep: ${o?p:"undefined"}, finallyStep: ${c?f:"undefined"} }`,P=`
49
- case ${a}: {
49
+ `,...a.clauses,...c?c.clauses:[],...g.clauses],nextStep:g.nextStep}}function te(e,t,s,n){let{step:o,persistedVariables:i}=s,f=e.getTryBlock(),a=e.getCatchClause(),c=e.getFinallyBlock(),u=T(f.getStatements(),{step:o+1,persistedVariables:new Map(i)}),r,g,l=u.nextStep;if(a){let w=a.getBlock(),S=a.getVariableDeclaration();S&&(g=S.getName()),r=T(w.getStatements(),{step:l,persistedVariables:new Map(i)})}let d,m=r?r.nextStep:l;c&&(d=T(c.getStatements(),{step:m,persistedVariables:new Map(i)}));let y=d?d.nextStep:m,x=T(t,{step:y,persistedVariables:i}),k=`{ catchStep: ${a?l:"undefined"}, finallyStep: ${c?m:"undefined"} }`,N=`
50
+ case ${o}: {
50
51
  ${n.join(`
51
52
  `)}
52
53
  state.tryCatchStack = state.tryCatchStack || [];
53
- state.tryCatchStack.push(${h});
54
- context.step = ${a+1}; // Salta al inicio del bloque try
54
+ state.tryCatchStack.push(${k});
55
+ context.step = ${o+1}; // Salta al inicio del bloque try
55
56
  break;
56
57
  }
57
- `,$=l.clauses.pop()||"",I=c?f:x;if(l.clauses.push($.replace(/return { type: 'COMPLETE'.* };/,`context.step = ${I}; break;`)),r){if(d){let T=r.clauses[0]||`case ${p}: {}`;r.clauses[0]=T.replace("{",`{
58
- const ${d} = result as Error;`)}let y=r.clauses.pop()||"";r.clauses.push(y.replace(/return { type: 'COMPLETE'.* };/,`context.step = ${I}; break;`))}if(u){let y=u.clauses.pop()||"";u.clauses.push(y.replace(/return { type: 'COMPLETE'.* };/,`state.tryCatchStack?.pop(); context.step = ${x}; break;`))}return{clauses:[P,...l.clauses,...r?r.clauses:[],...u?u.clauses:[],...m.clauses],nextStep:m.nextStep}}function Y(e,t,s){let n=[],a=new Map,i=Z(t),S=new Map(s);for(let o of e){let c=!1;if(g.isVariableStatement(o))for(let l of o.getDeclarations()){let r=l.getInitializer();if(!r)continue;let d=J(l),p=d.filter(u=>i.has(u.name));if(p.length>0){let u=C(r,s);for(let{name:f,type:x}of p){a.set(f,{type:x}),S.set(f,{type:x});let m=d.length>1?`${u}.${f}`:u;n.push(`state.${f} = ${m};`)}p.length===d.length&&(c=!0)}}c||n.push(C(o,S))}return{rewrittenSyncStatements:n,newlyPersistedVariables:a}}function J(e){let t=e.getNameNode(),s=[];if(g.isIdentifier(t)){let n=e.getType().getText(e,A);s.push({name:t.getText(),type:n})}else if(g.isObjectBindingPattern(t))for(let n of t.getElements()){let a=n.getName(),i=n.getType().getText(n,A);s.push({name:a,type:i})}return s}function Q(e,t){if(g.isReturnStatement(e))return{instruction:`return { type: 'COMPLETE', result: ${e.getExpression()?C(e.getExpressionOrThrow(),t):"undefined"} };`,nextPendingStateAssignment:void 0};let s,n=e.getFirstDescendantByKind(E.VariableDeclaration);if(n){let i=n.getName(),S=n.getType().getText(n,A);t.set(i,{type:S}),s=`state.${i} = result;`}let a=e.getFirstDescendantByKind(E.AwaitExpression);if(a){let i=a.getExpression();if(g.isCallExpression(i))return{instruction:`return ${ee(i,t)};`,nextPendingStateAssignment:s}}return{instruction:C(e,t),nextPendingStateAssignment:s}}function Z(e){let t=new Set;for(let s of e)s.getDescendantsOfKind(E.Identifier).forEach(n=>{t.add(n.getText())});return t}function C(e,t){let s=e.getProject().createSourceFile(`temp_rewrite_${Math.random()}.ts`,`const temp = ${e.getText()};`,{overwrite:!0}),n=s.getVariableDeclarationOrThrow("temp").getInitializerOrThrow(),a=[n,...n.getDescendants()].reverse();for(let c of a)if(g.isIdentifier(c)&&!c.wasForgotten()&&t.has(c.getText())){let l=c.getText(),r=c.getParent(),d=g.isVariableDeclaration(r)&&r.getNameNode()===c,p=g.isPropertyAccessExpression(r)&&r.getNameNode()===c||g.isPropertyAssignment(r)&&r.getNameNode()===c,u=g.isBindingElement(r)&&r.getNameNode()===c;if(!d&&!p&&!u){let f=t.get(l);c.replaceWithText(`(state.${l} as ${f.type})`)}}let i=s.getFullText().trim();s.forget();let S="const temp = ",o=i;return o.startsWith(S)&&(o=o.substring(S.length)),o.endsWith(";")&&(o=o.slice(0,-1)),o}function ee(e,t){let s=e.getExpression(),n,a=!1;g.isPropertyAccessExpression(s)?(s.getExpression().getText()==="context"&&(a=!0),n=s.getName()):n=s.getText();let i=e.getArguments().map(r=>C(r,t)).join(", ");if(a){if(n==="bSleep")return`{ type: 'SCHEDULE_SLEEP', duration: ${i} }`;if(n==="bWaitForEvent")return`{ type: 'WAIT_FOR_EVENT', eventName: ${i} }`;if(n==="bExecute"){let[r,d]=e.getArguments(),p=r.getText(),u=d?C(d,t):"undefined";return`{ type: 'EXECUTE_SUBWORKFLOW', workflowName: ${p}.name, input: ${u} }`}throw new Error(`Funci\xF3n de contexto durable desconocida: '${n}'.`)}let S=s.getSymbol();if(!S)throw new Error(`S\xEDmbolo no encontrado para '${n}'.`);let o=S.getDeclarations()[0]?.asKind(E.ImportSpecifier);if(!o)throw new Error(`'${n}' debe ser importada.`);let c=o.getImportDeclaration().getModuleSpecifierSourceFileOrThrow();return`{ type: 'SCHEDULE_TASK', modulePath: '${b.relative(process.cwd(),c.getFilePath()).replace(/\\/g,"/")}', exportName: '${n}', args: [${i}] }`}function N(e){for(let t of e.getDescendantsOfKind(E.AwaitExpression)){let s=t.getExpressionIfKind(E.CallExpression);if(s){let n=s.getExpression();if(g.isPropertyAccessExpression(n)){let a=n.getName();if(n.getExpression().getText()==="context"&&(a==="bSleep"||a==="bWaitForEvent"||a==="bExecute")||n.getSymbol()?.getDeclarations()[0]?.isKind(E.ImportSpecifier))return!0}else if(n.getSymbol()?.getDeclarations()[0]?.isKind(E.ImportSpecifier))return!0}}if(g.isTryStatement(e)&&(N(e.getTryBlock())||e.getCatchClause()&&N(e.getCatchClause().getBlock())||e.getFinallyBlock()&&N(e.getFinallyBlock())))return!0;if(g.isIfStatement(e)){let t=N(e.getThenStatement()),s=e.getElseStatement()?N(e.getElseStatement()):!1;return t||s}return g.isBlock(e)?e.getStatements().some(N):!1}function te(e){for(let t=0;t<e.length;t++){let s=e[t];if(g.isReturnStatement(s)||N(s)||g.isTryStatement(s))return{syncBlock:e.slice(0,t),durableStatement:s,nextStatements:e.slice(t+1)}}return{syncBlock:e,durableStatement:null,nextStatements:[]}}var W=e=>{let t=process.argv.indexOf(e);if(t!==-1&&process.argv.length>t+1)return process.argv[t+1]};async function ne(){let e=W("--in"),t=W("--out");(!e||!t)&&(console.error("Uso: b-durable-compiler --in <directorio_entrada> --out <directorio_salida>"),process.exit(1));let s=R.resolve(process.cwd(),e),n=R.resolve(process.cwd(),t);await O({inputDir:s,outputDir:n,packageName:"@bobtail.software/b-durable"})}ne().catch(e=>{console.error("Error durante la compilaci\xF3n:",e),process.exit(1)});
58
+ `,A=u.clauses.pop()||"",D=c?m:y;if(u.clauses.push(A.replace(/return { type: 'COMPLETE'.* };/,`context.step = ${D}; break;`)),r){if(g){let S=r.clauses[0]||`case ${l}: {}`;r.clauses[0]=S.replace("{",`{
59
+ const ${g} = result as any;`)}let w=r.clauses.pop()||"";r.clauses.push(w.replace(/return { type: 'COMPLETE'.* };/,`context.step = ${D}; break;`))}if(d){let w=d.clauses.pop()||"";d.clauses.push(w.replace(/return { type: 'COMPLETE'.* };/,`state.tryCatchStack?.pop(); context.step = ${y}; break;`))}return{clauses:[N,...u.clauses,...r?r.clauses:[],...d?d.clauses:[],...x.clauses],nextStep:x.nextStep}}function ne(e,t,s){let n=[],o=new Map,i=oe(t),f=new Map(s);for(let a of e){let c=!1;if(p.isVariableStatement(a))for(let u of a.getDeclarations()){let r=u.getInitializer();if(!r)continue;let g=se(u),l=g.filter(d=>i.has(d.name));if(l.length>0){let d=I(r,s);for(let{name:m,type:y}of l){o.set(m,{type:y}),f.set(m,{type:y});let x=g.length>1?`${d}.${m}`:d;n.push(`state.${m} = ${x};`)}l.length===g.length&&(c=!0)}}c||n.push(I(a,f))}return{rewrittenSyncStatements:n,newlyPersistedVariables:o}}function se(e){let t=e.getNameNode(),s=[];if(p.isIdentifier(t)){let n=e.getType().getText(e,F);s.push({name:t.getText(),type:n})}else if(p.isObjectBindingPattern(t))for(let n of t.getElements()){let o=n.getName(),i=n.getType().getText(n,F);s.push({name:o,type:i})}return s}function re(e,t){if(p.isReturnStatement(e))return{instruction:`return { type: 'COMPLETE', result: ${e.getExpression()?I(e.getExpressionOrThrow(),t):"undefined"} };`,nextPendingStateAssignment:void 0};let s,n=e.getFirstDescendantByKind(b.VariableDeclaration);if(n){let i=n.getName(),f=n.getType().getText(n,F);t.set(i,{type:f}),s=`state.${i} = result;`}let o=e.getFirstDescendantByKind(b.AwaitExpression);if(o){let i=o.getExpression();if(p.isCallExpression(i))return{instruction:`return ${ae(i,t)};`,nextPendingStateAssignment:s}}return{instruction:I(e,t),nextPendingStateAssignment:s}}function oe(e){let t=new Set;for(let s of e)s.getDescendantsOfKind(b.Identifier).forEach(n=>{t.add(n.getText())});return t}function I(e,t){let s=e.getProject().createSourceFile(`temp_rewrite_${Math.random()}.ts`,`const temp = ${e.getText()};`,{overwrite:!0}),n=s.getVariableDeclarationOrThrow("temp").getInitializerOrThrow(),o=[n,...n.getDescendants()].reverse();for(let c of o)if(p.isIdentifier(c)&&!c.wasForgotten()&&t.has(c.getText())){let u=c.getText(),r=c.getParent(),g=p.isVariableDeclaration(r)&&r.getNameNode()===c,l=p.isPropertyAccessExpression(r)&&r.getNameNode()===c||p.isPropertyAssignment(r)&&r.getNameNode()===c,d=p.isBindingElement(r)&&r.getNameNode()===c;if(!g&&!l&&!d){let m=t.get(u);c.replaceWithText(`(state.${u} as ${m.type})`)}}let i=s.getFullText().trim();s.forget();let f="const temp = ",a=i;return a.startsWith(f)&&(a=a.substring(f.length)),a.endsWith(";")&&(a=a.slice(0,-1)),a}function ae(e,t){let s=e.getExpression(),n,o=!1;p.isPropertyAccessExpression(s)?(s.getExpression().getText()==="context"&&(o=!0),n=s.getName()):n=s.getText();let i=e.getArguments().map(r=>I(r,t)).join(", ");if(o)switch(n){case"bSleep":return`{ type: 'SCHEDULE_SLEEP', duration: ${i} }`;case"bWaitForEvent":return`{ type: 'WAIT_FOR_EVENT', eventName: ${i} }`;case"bExecute":{let[r,g]=e.getArguments(),l=r.getText(),d=g?I(g,t):"undefined";return`{ type: 'EXECUTE_SUBWORKFLOW', workflowName: ${l}.name, input: ${d} }`}case"bSignal":{let[r,g]=e.getArguments().map(l=>I(l,t));return`{ type: 'SEND_SIGNAL', signalName: ${r}, payload: ${g} }`}default:throw new Error(`Funci\xF3n de contexto durable desconocida: '${n}'.`)}let f=s.getSymbol();if(!f)throw new Error(`S\xEDmbolo no encontrado para '${n}'.`);let a=f.getDeclarations()[0]?.asKind(b.ImportSpecifier);if(!a)throw new Error(`'${n}' debe ser importada.`);let c=a.getImportDeclaration().getModuleSpecifierSourceFileOrThrow();return`{ type: 'SCHEDULE_TASK', modulePath: '${h.relative(process.cwd(),c.getFilePath()).replace(/\\/g,"/")}', exportName: '${n}', args: [${i}] }`}function C(e){for(let t of e.getDescendantsOfKind(b.AwaitExpression)){let s=t.getExpressionIfKind(b.CallExpression);if(s){let n=s.getExpression();if(p.isPropertyAccessExpression(n)){let o=n.getName();if(n.getExpression().getText()==="context"&&(o==="bSleep"||o==="bWaitForEvent"||o==="bExecute"||o==="bSignal")||n.getSymbol()?.getDeclarations()[0]?.isKind(b.ImportSpecifier))return!0}else if(n.getSymbol()?.getDeclarations()[0]?.isKind(b.ImportSpecifier))return!0}}if(p.isTryStatement(e)&&(C(e.getTryBlock())||e.getCatchClause()&&C(e.getCatchClause().getBlock())||e.getFinallyBlock()&&C(e.getFinallyBlock())))return!0;if(p.isIfStatement(e)){let t=C(e.getThenStatement()),s=e.getElseStatement()?C(e.getElseStatement()):!1;return t||s}return p.isBlock(e)?e.getStatements().some(C):!1}function ie(e){for(let t=0;t<e.length;t++){let s=e[t];if(p.isReturnStatement(s)||C(s)||p.isTryStatement(s))return{syncBlock:e.slice(0,t),durableStatement:s,nextStatements:e.slice(t+1)}}return{syncBlock:e,durableStatement:null,nextStatements:[]}}var U=e=>{let t=process.argv.indexOf(e);if(t!==-1&&process.argv.length>t+1)return process.argv[t+1]};async function ce(){let e=U("--in"),t=U("--out");(!e||!t)&&(console.error("Uso: b-durable-compiler --in <directorio_entrada> --out <directorio_salida>"),process.exit(1));let s=z.resolve(process.cwd(),e),n=z.resolve(process.cwd(),t);await _({inputDir:s,outputDir:n,packageName:"@bobtail.software/b-durable"})}ce().catch(e=>{console.error("Error durante la compilaci\xF3n:",e),process.exit(1)});
package/dist/index.d.mts CHANGED
@@ -1,6 +1,45 @@
1
1
  import Redis from 'ioredis';
2
2
  import ms from 'ms';
3
3
 
4
+ interface Logger {
5
+ info(message: string, meta?: Record<string, unknown>): void;
6
+ error(message: string, meta?: Record<string, unknown>): void;
7
+ warn(message: string, meta?: Record<string, unknown>): void;
8
+ debug(message: string, meta?: Record<string, unknown>): void;
9
+ }
10
+ interface WorkflowStateInfo<TInput = unknown, TOutput = unknown> {
11
+ workflowId: string;
12
+ name: string;
13
+ version: string;
14
+ status: string;
15
+ step: number;
16
+ input: TInput;
17
+ output?: TOutput;
18
+ state: Record<string, unknown>;
19
+ error?: string;
20
+ createdAt?: number;
21
+ updatedAt?: number;
22
+ pendingTask?: {
23
+ name: string;
24
+ attempts: number;
25
+ };
26
+ }
27
+ interface StartOptions<TInput, TSignals, TOutput> {
28
+ input: TInput;
29
+ workflowId?: string;
30
+ /**
31
+ * Un callback opcional para recibir señales emitidas por el workflow en tiempo real.
32
+ * Se garantiza que la suscripción está activa antes de que se ejecute el primer paso.
33
+ */
34
+ subscribe?: (signal: WorkflowSignal<TSignals, TOutput>) => void;
35
+ }
36
+ interface StartedWorkflowHandle {
37
+ workflowId: string;
38
+ /**
39
+ * Cierra la suscripción a las señales creada en `start`.
40
+ */
41
+ unsubscribe: () => Promise<void>;
42
+ }
4
43
  interface WorkflowState {
5
44
  tryCatchStack?: {
6
45
  catchStep?: number;
@@ -14,7 +53,7 @@ interface WorkflowContext<TInput = unknown> {
14
53
  input: TInput;
15
54
  state: WorkflowState;
16
55
  result?: unknown;
17
- log: (message: string) => void;
56
+ log: (message: string, meta?: Record<string, unknown>) => void;
18
57
  }
19
58
  type Instruction<TOutput = unknown> = {
20
59
  type: 'SCHEDULE_TASK';
@@ -31,14 +70,58 @@ type Instruction<TOutput = unknown> = {
31
70
  type: 'EXECUTE_SUBWORKFLOW';
32
71
  workflowName: string;
33
72
  input: unknown;
73
+ } | {
74
+ type: 'SEND_SIGNAL';
75
+ signalName: string;
76
+ payload: unknown;
34
77
  } | {
35
78
  type: 'COMPLETE';
36
79
  result: TOutput;
37
80
  };
38
- interface DurableFunction<TInput = unknown, TOutput = unknown> {
81
+ /**
82
+ * Representa una señal emitida por un workflow como una unión discriminada.
83
+ * Esto permite un estrechamiento de tipos seguro en el consumidor.
84
+ * Incluye las señales personalizadas (TSignals) y las señales del sistema.
85
+ */
86
+ type WorkflowSignal<TSignals, TOutput> = {
87
+ [K in keyof TSignals]: {
88
+ name: K;
89
+ payload: TSignals[K];
90
+ };
91
+ }[keyof TSignals] | {
92
+ name: 'workflow:completed';
93
+ payload: TOutput;
94
+ } | {
95
+ name: 'workflow:failed';
96
+ payload: {
97
+ message: string;
98
+ };
99
+ };
100
+ /**
101
+ * Permite suscribirse a un workflow ya existente.
102
+ */
103
+ interface WorkflowHandle<TSignals = unknown, TOutput = unknown> {
104
+ workflowId: string;
105
+ /**
106
+ * Se suscribe a las señales emitidas por esta instancia específica del workflow.
107
+ * @param onSignal Un callback fuertemente tipado que se ejecuta para cada señal.
108
+ * @returns Una promesa que se resuelve en un objeto con una función `unsubscribe`.
109
+ */
110
+ subscribe: (onSignal: (signal: WorkflowSignal<TSignals, TOutput>, unsubscribe: () => void) => void) => Promise<{
111
+ unsubscribe: () => void;
112
+ }>;
113
+ }
114
+ declare class WorkflowCancellationError extends Error {
115
+ readonly isCancellation = true;
116
+ constructor(message: string);
117
+ }
118
+ interface DurableFunction<TInput = unknown, TOutput = unknown, TEvents = Record<string, never>, TSignals = Record<string, never>> {
39
119
  __isDurable: true;
40
120
  name: string;
121
+ version: string;
41
122
  execute: (context: WorkflowContext<TInput>) => Promise<Instruction<TOutput>>;
123
+ _TEvents?: TEvents;
124
+ _TSignals?: TSignals;
42
125
  }
43
126
 
44
127
  declare class DurableRuntime {
@@ -47,11 +130,19 @@ declare class DurableRuntime {
47
130
  private workerId;
48
131
  private isRunning;
49
132
  private schedulerInterval;
133
+ private heartbeatInterval;
50
134
  private readonly sourceRoot;
135
+ private readonly pollingInterval;
136
+ private readonly logger;
137
+ private readonly maxTaskRetries;
51
138
  constructor(options: {
52
139
  sourceRoot: string;
140
+ retention?: ms.StringValue;
141
+ pollingInterval?: number;
142
+ logger?: Logger;
53
143
  });
54
- start<TInput, TOutput>(durableFn: DurableFunction<TInput, TOutput>, input: TInput, parentId?: string): Promise<string>;
144
+ getState(workflowId: string): Promise<WorkflowStateInfo | null>;
145
+ start<TInput, TOutput, TEvents, TSignals>(durableFn: DurableFunction<TInput, TOutput, TEvents, TSignals>, options: StartOptions<TInput, TSignals, TOutput>, parentId?: string): Promise<StartedWorkflowHandle>;
55
146
  private scheduleExecution;
56
147
  private _executeStep;
57
148
  private handleInstruction;
@@ -59,18 +150,20 @@ declare class DurableRuntime {
59
150
  private resumeParentWorkflow;
60
151
  private propagateFailureToParent;
61
152
  sendEvent<T>(workflowId: string, eventName: string, payload: T): Promise<void>;
153
+ cancel(workflowId: string, reason: string): Promise<void>;
62
154
  private startScheduler;
155
+ private checkSleepers;
156
+ private reapDeadWorkers;
157
+ private startHeartbeat;
63
158
  private startWorker;
64
- run(durableFns: Map<string, DurableFunction<unknown>>): void;
159
+ run(durableFns: Map<string, DurableFunction<unknown, unknown, any, any>>): void;
65
160
  stop(): void;
66
161
  }
67
162
 
68
- type DurableWorkflowFn$1<TInput, TOutput> = (input: TInput, ...args: any[]) => Promise<TOutput>;
69
-
70
163
  /**
71
164
  * El contexto de ejecución proporcionado a cada workflow, con métodos de durabilidad tipados.
72
165
  */
73
- interface DurableContext<TEvents extends Record<string, any> = Record<string, never>> extends Pick<WorkflowContext, 'log' | 'workflowId'> {
166
+ interface DurableContext<TEvents = Record<string, never>, TSignals = Record<string, never>> extends Pick<WorkflowContext, 'log' | 'workflowId'> {
74
167
  /**
75
168
  * Pausa la ejecución del workflow de manera duradera.
76
169
  * @param duration Una cadena de tiempo como '2 days', '10h', '7s'.
@@ -89,40 +182,69 @@ interface DurableContext<TEvents extends Record<string, any> = Record<string, ne
89
182
  * @param input La entrada para el sub-workflow.
90
183
  * @returns Una promesa que se resuelve con el resultado del sub-workflow.
91
184
  */
92
- bExecute<TInput, TOutput>(workflow: DurableWorkflowFn$1<TInput, TOutput>, input: TInput): Promise<TOutput>;
185
+ bExecute<TInput, TOutput, TWorkflowEvents, TWorkflowSignals>(workflow: DurableFunction<TInput, TOutput, TWorkflowEvents, TWorkflowSignals>, input: TInput): Promise<TOutput>;
186
+ /**
187
+ * Emite una señal o notificación con un payload desde el workflow.
188
+ * Esta operación es duradera pero no bloquea la ejecución. El workflow continúa inmediatamente después.
189
+ * El nombre de la señal y el tipo del payload son validados contra el contrato de señales del workflow.
190
+ * @param signalName El nombre de la señal.
191
+ * @param payload Los datos a enviar con la señal.
192
+ */
193
+ bSignal<K extends keyof TSignals>(signalName: K, payload: TSignals[K]): Promise<void>;
93
194
  }
94
- type DurableWorkflowFn<TInput, TOutput, TEvents extends Record<string, any> = Record<string, never>> = (input: TInput, context: DurableContext<TEvents>) => Promise<TOutput>;
95
- interface DurableWorkflowDef<TInput, TOutput, TEvents extends Record<string, any> = Record<string, never>> {
195
+ type DurableWorkflowFn<TInput, TOutput, TEvents = Record<string, never>, TSignals = Record<string, never>> = (input: TInput, context: DurableContext<TEvents, TSignals>) => Promise<TOutput>;
196
+ interface DurableWorkflowDef<TInput, TOutput, TEvents = Record<string, never>, TSignals = Record<string, never>> {
96
197
  /**
97
198
  * La función async que contiene la lógica del workflow.
98
199
  */
99
- workflow: DurableWorkflowFn<TInput, TOutput, TEvents>;
200
+ workflow: DurableWorkflowFn<TInput, TOutput, TEvents, TSignals>;
201
+ /**
202
+ * Versión del workflow. Se utiliza para identificar versiones de un workflow.
203
+ */
204
+ version: string;
100
205
  }
101
206
  /**
102
207
  * Marcador para que el compilador identifique y transforme una función en un workflow durable.
103
208
  * Esta función es un passthrough en tiempo de ejecución, su único propósito es para el análisis estático.
104
209
  */
105
- declare const bDurable: <TInput = unknown, TOutput = unknown, TEvents extends Record<string, any> = Record<string, never>>(def: DurableWorkflowDef<TInput, TOutput, TEvents>) => DurableWorkflowFn$1<TInput, TOutput>;
210
+ declare const bDurable: <TInput = any, TOutput = any, TEvents = Record<string, never>, TSignals = Record<string, never>>(def: DurableWorkflowDef<TInput, TOutput, TEvents, TSignals>) => DurableFunction<TInput, TOutput, TEvents, TSignals>;
106
211
 
107
- interface BDurableAPI<TEvents extends Record<string, any> = Record<string, never>> {
108
- start: <TInput, TOutput>(durableFn: DurableFunction<TInput, TOutput>, input: TInput) => Promise<string>;
212
+ interface BDurableAPI {
213
+ start: <TInput, TOutput, TEvents, TSignals>(durableFn: DurableFunction<TInput, TOutput, TEvents, TSignals>, options: StartOptions<TInput, TSignals, TOutput>) => Promise<StartedWorkflowHandle>;
109
214
  stop: () => void;
110
215
  runtime: DurableRuntime;
216
+ cancel: (workflowId: string, reason: string) => Promise<void>;
217
+ getState: (workflowId: string) => Promise<WorkflowStateInfo | null>;
111
218
  /**
112
219
  * Envía un evento a un workflow en ejecución que está en pausa esperando dicho evento.
113
- * Esta función es estrictamente tipada basada en el tipo de eventos globales proporcionado.
220
+ * Esta función es estrictamente tipada basada en el tipo de eventos de la definición del workflow.
221
+ * @param durableFn La definición del workflow al que se le enviará el evento. Se usa para la inferencia de tipos.
114
222
  * @param workflowId El ID del workflow al que se le enviará el evento.
115
223
  * @param eventName El nombre del evento. Será autocompletado por el editor.
116
224
  * @param payload La carga útil del evento. El tipo debe coincidir con el definido para `eventName`.
117
225
  */
118
- sendEvent: <K extends keyof TEvents>(workflowId: string, eventName: K, payload: TEvents[K]) => Promise<void>;
226
+ sendEvent: <TInput, TOutput, TWorkflowEvents, TSignals, K extends keyof TWorkflowEvents>(durableFn: DurableFunction<TInput, TOutput, TWorkflowEvents, TSignals>, workflowId: string, eventName: K, payload: TWorkflowEvents[K]) => Promise<void>;
227
+ /**
228
+ * Obtiene un "handle" para una instancia de workflow existente, permitiendo suscribirse a sus señales.
229
+ * @param durableFn La definición del workflow. Se usa para la inferencia de tipos de las señales.
230
+ * @param workflowId El ID de la instancia del workflow.
231
+ * @returns Un WorkflowHandle para la instancia especificada.
232
+ */
233
+ getWorkflowHandle: <TInput, TOutput, TEvents, TSignals>(durableFn: DurableFunction<TInput, TOutput, TEvents, TSignals>, workflowId: string) => WorkflowHandle<TSignals, TOutput>;
119
234
  }
120
235
  interface InitializeOptions {
121
- durableFunctions: Map<string, DurableFunction<unknown, unknown>>;
236
+ durableFunctions: Map<string, DurableFunction<unknown, unknown, unknown, unknown>>;
122
237
  sourceRoot: string;
123
238
  redisClient: Redis;
124
239
  blockingRedisClient: Redis;
240
+ /**
241
+ * NUEVO: Período de retención para workflows completados/fallidos en Redis.
242
+ * Ejemplo: '7 days', '2h'. Si no se especifica, los workflows se guardan para siempre.
243
+ */
244
+ retention?: ms.StringValue;
245
+ pollingInterval?: number;
246
+ logger?: Logger;
125
247
  }
126
- declare function bDurableInitialize<TEvents extends Record<string, any>>(options: InitializeOptions): BDurableAPI<TEvents>;
248
+ declare function bDurableInitialize(options: InitializeOptions): BDurableAPI;
127
249
 
128
- export { type BDurableAPI, type DurableFunction, type Instruction, type WorkflowContext, type WorkflowState, bDurable, bDurableInitialize };
250
+ export { type BDurableAPI, type DurableFunction, type Instruction, type Logger, type StartOptions, type StartedWorkflowHandle, WorkflowCancellationError, type WorkflowContext, type WorkflowHandle, type WorkflowSignal, type WorkflowState, type WorkflowStateInfo, bDurable, bDurableInitialize };
package/dist/index.mjs CHANGED
@@ -1 +1 @@
1
- var i,p;function E(c){if(i||p){console.warn("[Persistence] Los clientes de Redis ya han sido configurados. Omitiendo.");return}i=c.commandClient,p=c.blockingClient}import{randomUUID as m}from"crypto";import T from"ms";import{resolve as I}from"path";var d="queue:tasks",w="durable:sleepers";var u={RUNNING:"RUNNING",SLEEPING:"SLEEPING",COMPLETED:"COMPLETED",FAILED:"FAILED",AWAITING_EVENT:"AWAITING_EVENT",AWAITING_SUBWORKFLOW:"AWAITING_SUBWORKFLOW"};var f=class{getKey(t){return`workflow:${t}`}getLockKey(t){return`workflow:${t}:lock`}async acquireLock(t,e=10){let a=this.getLockKey(t);return await i.set(a,"locked","EX",e,"NX")==="OK"}async releaseLock(t){await i.del(this.getLockKey(t))}async get(t){let e=await i.hgetall(this.getKey(t));return!e||Object.keys(e).length===0?null:{workflowId:e.workflowId,name:e.name,status:e.status,step:parseInt(e.step,10),input:JSON.parse(e.input),state:JSON.parse(e.state),result:e.result?JSON.parse(e.result):void 0,error:e.error,parentId:e.parentId,subWorkflowId:e.subWorkflowId,awaitingEvent:e.awaitingEvent}}async create(t){let e={...t,step:0,state:{}},a=i.pipeline();a.hset(this.getKey(e.workflowId),{...e,input:JSON.stringify(e.input),state:JSON.stringify(e.state)}),await a.exec()}async updateState(t,e){await i.hset(this.getKey(t),"state",JSON.stringify(e))}async updateStatus(t,e,a={}){await i.hset(this.getKey(t),{status:e,...a})}async incrementStep(t){return i.hincrby(this.getKey(t),"step",1)}async complete(t,e){await i.hset(this.getKey(t),{status:u.COMPLETED,result:JSON.stringify(e??null)})}async fail(t,e){await i.hset(this.getKey(t),{status:u.FAILED,error:e.message})}async scheduleSleep(t,e){await this.updateStatus(t,u.SLEEPING),await i.zadd(w,e.toString(),t)}async getWorkflowsToWake(){let t=Date.now(),e=await i.zrangebyscore(w,0,t);return e.length>0&&await i.zrem(w,...e),e}async enqueueTask(t){await i.lpush(d,JSON.stringify(t))}async resumeForCatch(t,e,a){let n=this.getKey(t);await i.hset(n,{state:JSON.stringify(e),status:u.RUNNING,step:a.toString()})}},g=class{durableFns=new Map;repo=new f;workerId=m();isRunning=!1;schedulerInterval=null;sourceRoot;constructor(t){this.sourceRoot=t.sourceRoot}async start(t,e,a){let n=m();return console.log(`[RUNTIME] Iniciando workflow '${t.name}' con ID: ${n}`),await this.repo.create({workflowId:n,name:t.name,status:u.RUNNING,input:e,parentId:a}),this.scheduleExecution(n,t),n}async scheduleExecution(t,e,a,n){setImmediate(()=>{this._executeStep(t,e,a,n).catch(r=>{console.error(`[RUNTIME-FATAL] Error no manejado en la ejecuci\xF3n del workflow ${t}`,r)})})}async _executeStep(t,e,a,n){if(!await this.repo.acquireLock(t)){console.log(`[RUNTIME-LOCK] No se pudo adquirir el bloqueo para ${t}, otro proceso est\xE1 trabajando. Se omitir\xE1 este ciclo.`);return}let o=null;try{if(n)throw n;let s=await this.repo.get(t);if(!s)return;if(s.status!==u.RUNNING){console.log(`[RUNTIME] Se intent\xF3 ejecutar el workflow ${t} pero su estado es ${s.status}. Omitiendo.`);return}let l={workflowId:t,step:s.step,input:s.input,state:s.state,result:a,log:k=>console.log(`[WF:${t}] ${k}`)},h=await e.execute(l);await this.repo.updateState(t,l.state),await this.handleInstruction(h,l,s.name)}catch(s){let l=s instanceof Error?s:new Error(String(s));o=s instanceof Error?s:new Error(String(s)),console.error(`[RUNTIME] Error en workflow ${t}:`,o),await this.handleFailure(t,l,e)}finally{await this.repo.releaseLock(t)}o&&await this.handleFailure(t,o,e)}async handleInstruction(t,e,a){let{workflowId:n}=e;switch(t.type){case"SCHEDULE_TASK":{await this.repo.enqueueTask({workflowId:n,durableFunctionName:a,...t});break}case"SCHEDULE_SLEEP":{let r=T(t.duration),o=Date.now()+r;await this.repo.scheduleSleep(n,o);break}case"WAIT_FOR_EVENT":{await this.repo.updateStatus(n,u.AWAITING_EVENT,{awaitingEvent:t.eventName}),await i.sadd(`events:awaiting:${t.eventName}`,n);break}case"EXECUTE_SUBWORKFLOW":{let r=this.durableFns.get(t.workflowName);if(!r)throw new Error(`Sub-workflow '${t.workflowName}' no encontrado.`);let o=await this.start(r,t.input,n);await this.repo.updateStatus(n,u.AWAITING_SUBWORKFLOW,{subWorkflowId:o});break}case"COMPLETE":{await this.repo.complete(n,t.result),await this.resumeParentWorkflow(n);break}}}async handleFailure(t,e,a){if(!await this.repo.acquireLock(t,20)){console.warn(`[RUNTIME-FAIL] No se pudo adquirir lock para manejar fallo en ${t}. Reintentando m\xE1s tarde...`);return}try{let r=await this.repo.get(t);if(!r||r.status===u.FAILED)return;let o=r.state.tryCatchStack;if(o&&o.length>0){let l=o.pop()?.catchStep;if(l!==void 0){console.log(`[RUNTIME-FAIL] Excepci\xF3n capturada en ${t}. Saltando a la cl\xE1usula CATCH en el paso ${l}.`),await this.repo.resumeForCatch(t,r.state,l),this.scheduleExecution(t,a,{name:e.name,message:e.message,stack:e.stack});return}}console.error(`[RUNTIME] Error no capturado en workflow ${t}:`,e),await this.repo.fail(t,e),await this.propagateFailureToParent(t,e)}finally{await this.repo.releaseLock(t)}}async resumeParentWorkflow(t){let e=await this.repo.get(t);if(!e?.parentId)return;let a=e.parentId,n=await this.repo.get(a);if(!n||n.status!==u.AWAITING_SUBWORKFLOW||n.subWorkflowId!==t)return;console.log(`[RUNTIME] Reanudando workflow padre ${a}.`);let r=this.durableFns.get(n.name);if(!r){await this.repo.fail(a,new Error(`Definici\xF3n del workflow '${n.name}' no encontrada.`));return}await this.repo.updateStatus(a,u.RUNNING,{subWorkflowId:""}),await this.repo.incrementStep(a),this.scheduleExecution(a,r,e.result)}async propagateFailureToParent(t,e){let a=await this.repo.get(t);if(!a?.parentId)return;let n=a.parentId,r=await this.repo.get(n);if(!r||r.status!==u.AWAITING_SUBWORKFLOW||r.subWorkflowId!==t)return;console.log(`[RUNTIME] Propagando fallo del sub-workflow ${t} al padre ${n}.`);let o=this.durableFns.get(r.name);if(!o){await this.repo.fail(n,new Error(`Definici\xF3n del workflow '${r.name}' no encontrada al propagar fallo.`));return}await this.repo.updateStatus(n,u.RUNNING,{subWorkflowId:""});let s=new Error(`Sub-workflow '${a.name}' (${t}) fall\xF3: ${e.message}`);s.stack=e.stack,this.scheduleExecution(n,o,void 0,s)}async sendEvent(t,e,a){if(!await this.repo.acquireLock(t)){console.warn(`[RUNTIME-LOCK] No se pudo adquirir el bloqueo para sendEvent en ${t}. El evento podr\xEDa ser descartado o retrasado.`);return}try{let r=await this.repo.get(t);if(!r){console.warn(`[RUNTIME] Se intent\xF3 enviar un evento a un workflow no existente: ${t}`);return}if(r.status!==u.AWAITING_EVENT||r.awaitingEvent!==e){console.warn(`[RUNTIME] El workflow ${t} no est\xE1 esperando el evento '${e}'. Estado actual: ${r.status}, esperando: ${r.awaitingEvent}.`);return}console.log(`[RUNTIME] Evento '${e}' recibido para el workflow ${t}. Reanudando...`);let o=this.durableFns.get(r.name);if(!o){console.error(`[RUNTIME] La definici\xF3n de la funci\xF3n durable '${r.name}' no se encontr\xF3 para el workflow ${t}.`),await this.repo.fail(t,new Error(`Funci\xF3n durable '${r.name}' no encontrada.`));return}await this.repo.updateStatus(t,u.RUNNING,{awaitingEvent:""}),await i.srem(`events:awaiting:${e}`,t),await this.repo.incrementStep(t),this.scheduleExecution(t,o,a)}catch(r){console.error(`[RUNTIME] Error procesando el evento '${e}' para el workflow ${t}:`,r),await this.repo.fail(t,new Error(`Fallo al procesar el evento: ${r instanceof Error?r.message:String(r)}`))}finally{await this.repo.releaseLock(t)}}startScheduler(){if(this.schedulerInterval)return;console.log("[SCHEDULER] Scheduler iniciado.");let t=async()=>{let e=await this.repo.getWorkflowsToWake();for(let a of e){let n=await this.repo.get(a);if(n){let r=this.durableFns.get(n.name);r&&(console.log(`[SCHEDULER] Reanudando workflow ${a}`),await this.repo.updateStatus(a,u.RUNNING),await this.repo.incrementStep(a),this.scheduleExecution(a,r,null))}}};this.schedulerInterval=setInterval(t,2e3)}startWorker(){if(this.isRunning)return;this.isRunning=!0;let t=`${d}:processing:${this.workerId}`;console.log(`[WORKER] Worker ${this.workerId} iniciado, esperando tareas...`),(async()=>{for(;this.isRunning;)try{let a=await p.brpoplpush(d,t,0);if(!a)continue;let n=JSON.parse(a);console.log(`[WORKER] Tarea recibida: ${n.exportName}`);try{let r;n.modulePath.startsWith("virtual:")?r=await import(n.modulePath):r=await import(I(this.sourceRoot,n.modulePath));let o=r[n.exportName];if(typeof o!="function")throw new Error(`'${n.exportName}' no es una funci\xF3n.`);let s=await o(...n.args),l=this.durableFns.get(n.durableFunctionName);l&&(await this.repo.incrementStep(n.workflowId),this.scheduleExecution(n.workflowId,l,s)),await i.lrem(t,1,a)}catch(r){let o=r instanceof Error?r:new Error(String(r));console.error(`[WORKER] Falla en la tarea '${n.exportName}' para workflow ${n.workflowId}`,o);let s=this.durableFns.get(n.durableFunctionName);s?await this.handleFailure(n.workflowId,o,s):await this.repo.fail(n.workflowId,new Error(`Definici\xF3n de workflow ${n.durableFunctionName} no encontrada durante el manejo de fallos.`)),console.log(`[WORKER] Eliminando tarea procesada (con error manejado): ${n.exportName}`),await i.lrem(t,1,a)}}catch(a){if(!this.isRunning)break;console.error("[WORKER] Error de infraestructura:",a),await new Promise(n=>setTimeout(n,5e3))}})()}run(t){this.durableFns=t,this.startWorker(),this.startScheduler()}stop(){this.isRunning=!1,this.schedulerInterval&&clearInterval(this.schedulerInterval),console.log("[RUNTIME] Solicitando detenci\xF3n...")}};var b=c=>c.workflow;function D(c){console.log("--- Inicializando Sistema Durable ---"),E({commandClient:c.redisClient,blockingClient:c.blockingRedisClient});let t=new g({sourceRoot:c.sourceRoot});return t.run(c.durableFunctions),{start:t.start.bind(t),sendEvent:t.sendEvent.bind(t),stop:t.stop.bind(t),runtime:t}}export{b as bDurable,D as bDurableInitialize};
1
+ import P from"ioredis";var w="queue:tasks",m="durable:sleepers",v="worker:heartbeat:",W="queue:dead";function O(u){return`workflow:${u}`}var l={RUNNING:"RUNNING",SLEEPING:"SLEEPING",COMPLETED:"COMPLETED",FAILED:"FAILED",AWAITING_EVENT:"AWAITING_EVENT",AWAITING_SUBWORKFLOW:"AWAITING_SUBWORKFLOW",CANCELLING:"CANCELLING",CANCELLED:"CANCELLED",VERSION_MISMATCH:"VERSION_MISMATCH"};var o,E;function N(u){if(o||E){console.warn("[Persistence] Los clientes de Redis ya han sido configurados. Omitiendo.");return}o=u.commandClient,E=u.blockingClient}import{randomUUID as R}from"crypto";import L from"ms";import{resolve as D}from"path";var k=class extends Error{isCancellation=!0;constructor(t){super(t),this.name="WorkflowCancellationError"}};var x={info:(u,t)=>console.log(`[INFO] ${u}`,t||""),error:(u,t)=>console.error(`[ERROR] ${u}`,t||""),warn:(u,t)=>console.warn(`[WARN] ${u}`,t||""),debug:(u,t)=>console.debug(`[DEBUG] ${u}`,t||"")},I=class{constructor(t){this.retention=t}getKey(t){return`workflow:${t}`}getLockKey(t){return`workflow:${t}:lock`}async acquireLock(t,e=10){let s=this.getLockKey(t);return await o.set(s,"locked","EX",e,"NX")==="OK"}async releaseLock(t){await o.del(this.getLockKey(t))}async get(t){let e=await o.hgetall(this.getKey(t));return!e||Object.keys(e).length===0?null:{workflowId:e.workflowId,name:e.name,version:e.version,status:e.status,step:parseInt(e.step,10),input:JSON.parse(e.input),state:JSON.parse(e.state),result:e.result?JSON.parse(e.result):void 0,error:e.error,parentId:e.parentId,subWorkflowId:e.subWorkflowId,awaitingEvent:e.awaitingEvent,createdAt:e.createdAt?parseInt(e.createdAt,10):0,updatedAt:e.updatedAt?parseInt(e.updatedAt,10):0}}async create(t){let e=Date.now(),s={...t,step:0,state:{},createdAt:e,updatedAt:e},n={...s,input:JSON.stringify(s.input),state:JSON.stringify(s.state)};n.version===void 0&&delete n.version;let a=o.pipeline();a.hset(this.getKey(s.workflowId),n),await a.exec()}async updateState(t,e){await o.hset(this.getKey(t),{state:JSON.stringify(e),updatedAt:Date.now()})}async updateStatus(t,e,s={}){await o.hset(this.getKey(t),{status:e,...s,updatedAt:Date.now()})}async incrementStep(t){return o.hincrby(this.getKey(t),"step",1)}async applyRetention(t){if(this.retention){let e=L(this.retention)/1e3;e>0&&await o.expire(this.getKey(t),e)}}async complete(t,e){await o.hset(this.getKey(t),{status:l.COMPLETED,result:JSON.stringify(e??null)}),await this.applyRetention(t)}async fail(t,e,s=l.FAILED){await o.hset(this.getKey(t),{status:s,error:e.message}),await this.applyRetention(t)}async scheduleSleep(t,e){await this.updateStatus(t,l.SLEEPING),await o.zadd(m,e,t)}async getWorkflowsToWake(){let t=Date.now(),e=await o.zrangebyscore(m,0,t);return e.length>0&&await o.zrem(m,...e),e}async enqueueTask(t){await o.lpush(w,JSON.stringify(t))}async resumeForCatch(t,e,s){let n=this.getKey(t);await o.hset(n,{state:JSON.stringify(e),status:l.RUNNING,step:s.toString()})}async moveToDLQ(t,e){let s={...t,failedAt:Date.now(),error:e.message,stack:e.stack};await o.lpush(W,JSON.stringify(s))}},S=class{durableFns=new Map;repo;workerId=R();isRunning=!1;schedulerInterval=null;heartbeatInterval=null;sourceRoot;pollingInterval;logger;maxTaskRetries=3;constructor(t){this.sourceRoot=t.sourceRoot,this.repo=new I(t.retention),this.pollingInterval=t.pollingInterval||5e3,this.logger=t.logger||x}async getState(t){let e=await this.repo.get(t);return e?{workflowId:e.workflowId,name:e.name,version:e.version,status:e.status,step:e.step,input:e.input,output:e.result,state:e.state,error:e.error,createdAt:e.createdAt,updatedAt:e.updatedAt}:null}async start(t,e,s){if(e.workflowId){let r=await this.repo.get(e.workflowId);if(r&&r.status!==l.COMPLETED&&r.status!==l.FAILED)throw new Error(`Workflow with ID '${e.workflowId}' already exists and is in a running state (${r.status}).`)}let n=e.workflowId??R();this.logger.info(`[RUNTIME] Iniciando workflow '${t.name}' v${t.version} con ID: ${n}`),await this.repo.create({workflowId:n,name:t.name,version:t.version,status:l.RUNNING,input:e.input,parentId:s});let a=async()=>{};if(e.subscribe){let r=o.duplicate(),i=`signal:${n}`;r.on("message",(c,g)=>{if(c===i)try{let p=JSON.parse(g),f={name:p.signalName,payload:p.payload};e.subscribe?.(f)}catch(p){this.logger.error("Error al procesar se\xF1al",{error:p,workflowId:n})}}),await r.subscribe(i),a=async()=>{r.status==="ready"&&(r.unsubscribe(i).catch(()=>{}),r.quit().catch(()=>{}))}}return setImmediate(()=>{this._executeStep(n,t).catch(r=>{this.logger.error("Error fatal en ejecuci\xF3n inicial",{error:r,workflowId:n})})}),{workflowId:n,unsubscribe:a}}async scheduleExecution(t,e,s,n){setImmediate(()=>{this._executeStep(t,e,s,n).catch(a=>{this.logger.error("Error no manejado en scheduleExecution",{error:a,workflowId:t})})})}async _executeStep(t,e,s,n){if(await this.repo.acquireLock(t))try{if(n)throw n;let r=await this.repo.get(t);if(!r)return;if(r.status===l.CANCELLING)throw new k(r.error||"Workflow cancelled");if(r.status!==l.RUNNING)return;let i=r.version==="undefined"?void 0:r.version,c=e.version==="undefined"?void 0:e.version;if(String(i??"")!==String(c??"")){let d=new Error(`Version mismatch: DB=${i}, Code=${c}`);await this.repo.fail(t,d,l.VERSION_MISMATCH);return}let g={workflowId:t,step:r.step,input:r.input,state:r.state,result:s,log:(d,T)=>this.logger.info(d,{...T,workflowId:t,step:r.step})},p=await e.execute(g);await this.repo.updateState(t,g.state),await this.handleInstruction(p,g,r.name)&&(await this.repo.incrementStep(t),this.scheduleExecution(t,e,void 0))}catch(r){let i=r instanceof Error?r:new Error(String(r));this.logger.error("Error en workflow",{workflowId:t,error:i.message}),await this.handleFailure(t,i,e,!0)}finally{await this.repo.releaseLock(t)}}async handleInstruction(t,e,s){let{workflowId:n}=e;switch(t.type){case"SCHEDULE_TASK":return await this.repo.enqueueTask({workflowId:n,durableFunctionName:s,...t}),!1;case"SCHEDULE_SLEEP":{let a=L(t.duration);if(typeof a!="number")throw new Error(`Invalid time value provided to bSleep: "${t.duration}"`);let r=Date.now()+a;return await this.repo.scheduleSleep(n,r),!1}case"WAIT_FOR_EVENT":return await this.repo.updateStatus(n,l.AWAITING_EVENT,{awaitingEvent:t.eventName}),await o.sadd(`events:awaiting:${t.eventName}`,n),!1;case"EXECUTE_SUBWORKFLOW":{let a=this.durableFns.get(t.workflowName);if(!a)throw new Error(`Sub-workflow '${t.workflowName}' no encontrado.`);let{workflowId:r}=await this.start(a,{input:t.input},n);return await this.repo.updateStatus(n,l.AWAITING_SUBWORKFLOW,{subWorkflowId:r}),!1}case"SEND_SIGNAL":{let a=`signal:${n}`,r=JSON.stringify({signalName:t.signalName,payload:t.payload});return await o.publish(a,r),!0}case"COMPLETE":{let a=`signal:${n}`,r=JSON.stringify({signalName:"workflow:completed",payload:t.result});return await o.publish(a,r),await this.repo.complete(n,t.result),await this.resumeParentWorkflow(n),!1}}}async handleFailure(t,e,s,n=!1){if(!n&&!await this.repo.acquireLock(t,20)){this.logger.warn(`No se pudo adquirir lock para fallo en ${t}`);return}try{if(e instanceof k){await this.repo.fail(t,e,l.CANCELLED);let g=await this.repo.get(t);g?.subWorkflowId&&await this.cancel(g.subWorkflowId,`Parent workflow ${t} was cancelled`);return}let a=await this.repo.get(t);if(!a||a.status===l.FAILED||a.status===l.COMPLETED)return;let r=a.state.tryCatchStack;if(r&&r.length>0){let p=r.pop()?.catchStep;if(p!==void 0){this.logger.info(`Capturando error en step ${p}`,{workflowId:t}),await this.repo.resumeForCatch(t,a.state,p),this.scheduleExecution(t,s,{name:e.name,message:e.message,stack:e.stack});return}}let i=`signal:${t}`,c=JSON.stringify({signalName:"workflow:failed",payload:{message:e.message}});await o.publish(i,c),await this.repo.fail(t,e),await this.propagateFailureToParent(t,e)}finally{n||await this.repo.releaseLock(t)}}async resumeParentWorkflow(t){let e=await this.repo.get(t);if(!e?.parentId)return;let s=e.parentId,n=await this.repo.get(s);if(!n||n.status!==l.AWAITING_SUBWORKFLOW||n.subWorkflowId!==t)return;let a=this.durableFns.get(n.name);if(!a){await this.repo.fail(s,new Error(`Definici\xF3n del workflow '${n.name}' no encontrada.`));return}await this.repo.updateStatus(s,l.RUNNING,{subWorkflowId:""}),await this.repo.incrementStep(s),this.scheduleExecution(s,a,e.result)}async propagateFailureToParent(t,e){let s=await this.repo.get(t);if(!s?.parentId)return;let n=s.parentId,a=await this.repo.get(n);if(!a||a.status!==l.AWAITING_SUBWORKFLOW||a.subWorkflowId!==t)return;let r=this.durableFns.get(a.name);if(!r){await this.repo.fail(n,new Error(`Definici\xF3n del workflow '${a.name}' no encontrada al propagar fallo.`));return}await this.repo.updateStatus(n,l.RUNNING,{subWorkflowId:""});let i=new Error(`Sub-workflow '${s.name}' (${t}) fall\xF3: ${e.message}`);i.stack=e.stack,this.scheduleExecution(n,r,void 0,i)}async sendEvent(t,e,s){let n=!1;for(let a=0;a<3&&(n=await this.repo.acquireLock(t),!n);a++)await new Promise(r=>setTimeout(r,50));if(!n)return this.logger.warn("Lock timeout en sendEvent",{workflowId:t});try{let a=await this.repo.get(t);if(!a)return this.logger.warn("Evento para workflow inexistente",{workflowId:t});if(a.status!==l.AWAITING_EVENT||a.awaitingEvent!==e)return this.logger.warn("Workflow no esperaba este evento",{workflowId:t,expected:a.awaitingEvent,received:e});let r=this.durableFns.get(a.name);if(!r){await this.repo.fail(t,new Error(`Funci\xF3n durable '${a.name}' no encontrada.`));return}await this.repo.updateStatus(t,l.RUNNING,{awaitingEvent:""}),await o.srem(`events:awaiting:${e}`,t),await this.repo.incrementStep(t),this.scheduleExecution(t,r,s)}catch(a){let r=a instanceof Error?a:new Error(String(a)),i=(await this.repo.get(t))?.name||"",c=this.durableFns.get(i);await this.handleFailure(t,r,c,!0)}finally{await this.repo.releaseLock(t)}}async cancel(t,e){if(!await this.repo.acquireLock(t))return await new Promise(n=>setTimeout(n,100)),this.cancel(t,e);try{let n=await this.repo.get(t);if(!n||[l.COMPLETED,l.FAILED,l.CANCELLED].includes(n.status))return;if(await this.repo.updateStatus(t,l.CANCELLING,{error:e}),n.status===l.SLEEPING){await o.zrem(m,t);let a=this.durableFns.get(n.name);this.scheduleExecution(t,a)}if(n.status===l.AWAITING_EVENT){let a=this.durableFns.get(n.name);this.scheduleExecution(t,a)}}finally{await this.repo.releaseLock(t)}}startScheduler(){if(this.schedulerInterval)return;this.logger.info(`Scheduler iniciado (${this.pollingInterval}ms)`);let t=async()=>{await this.checkSleepers(),await this.reapDeadWorkers()};this.schedulerInterval=setInterval(t,this.pollingInterval)}async checkSleepers(){let t=await this.repo.getWorkflowsToWake();for(let e of t){let s=await this.repo.get(e);if(s){let n=this.durableFns.get(s.name);n&&(this.logger.info("Despertando workflow",{workflowId:e}),await this.repo.updateStatus(e,l.RUNNING),await this.repo.incrementStep(e),this.scheduleExecution(e,n,void 0))}}}async reapDeadWorkers(){let t=await o.keys(`${w}:processing:*`);for(let e of t){let s=e.split(":").pop();if(!s||await o.exists(`${v}${s}`))continue;this.logger.warn(`Worker muerto ${s}. Recuperando tareas.`);let n=await o.rpoplpush(e,w);for(;n;)n=await o.rpoplpush(e,w);await o.del(e)}}startHeartbeat(){let t=`${v}${this.workerId}`,e=Math.max(Math.ceil(this.pollingInterval*3/1e3),5),s=()=>{this.isRunning&&o.set(t,Date.now().toString(),"EX",e).catch(()=>{})};this.heartbeatInterval=setInterval(s,this.pollingInterval),s()}startWorker(){if(this.isRunning)return;this.isRunning=!0;let t=`${w}:processing:${this.workerId}`;this.logger.info(`Worker ${this.workerId} iniciado`),this.startHeartbeat(),(async()=>{for(;this.isRunning;)try{let s=await E.brpoplpush(w,t,0);if(!s)continue;let n=JSON.parse(s);this.logger.debug(`Ejecutando tarea: ${n.exportName}`,{workflowId:n.workflowId});try{let a;n.modulePath.startsWith("virtual:")?a=await import(n.modulePath):a=await import(D(this.sourceRoot,n.modulePath));let r=a[n.exportName];if(typeof r!="function")throw new Error(`'${n.exportName}' no es una funci\xF3n.`);let i=await r(...n.args),c=this.durableFns.get(n.durableFunctionName);c&&(await this.repo.incrementStep(n.workflowId),this.scheduleExecution(n.workflowId,c,i)),await o.lrem(t,1,s)}catch(a){let r=a instanceof Error?a:new Error(String(a));this.logger.error(`Fallo en tarea ${n.exportName}`,{workflowId:n.workflowId,error:r.message});let i=(n.attempts||0)+1;if(i<=this.maxTaskRetries)this.logger.warn(`Reintentando tarea (intento ${i}/${this.maxTaskRetries})`,{workflowId:n.workflowId}),n.attempts=i,await o.lpush(w,JSON.stringify(n)),await o.lrem(t,1,s);else{this.logger.error("Reintentos agotados. Moviendo a DLQ.",{workflowId:n.workflowId}),await this.repo.moveToDLQ(n,r);let c=this.durableFns.get(n.durableFunctionName);c?await this.handleFailure(n.workflowId,r,c):await this.repo.fail(n.workflowId,new Error(`Def missing for ${n.durableFunctionName}`)),await o.lrem(t,1,s)}}}catch(s){if(!this.isRunning)break;this.logger.error("Error infraestructura worker",{error:s}),await new Promise(n=>setTimeout(n,5e3))}})()}run(t){this.durableFns=t,this.startWorker(),this.startScheduler()}stop(){this.isRunning=!1,this.schedulerInterval&&clearInterval(this.schedulerInterval),this.heartbeatInterval&&clearInterval(this.heartbeatInterval),this.logger.info("Runtime detenido")}};var F=u=>({...u,__isDurable:!0});var A={info:(u,t)=>console.log(`[INFO] ${u}`,t||""),error:(u,t)=>console.error(`[ERROR] ${u}`,t||""),warn:(u,t)=>console.warn(`[WARN] ${u}`,t||""),debug:(u,t)=>console.debug(`[DEBUG] ${u}`,t||"")};function it(u){let t=u.logger||A;t.info("--- Inicializando Sistema Durable ---");let e=new P(u.redisClient.options),s=new Map;e.psubscribe("signal:*",r=>{r&&t.error("Error fatal al suscribirse a los canales de se\xF1ales:",{error:r})}),e.on("pmessage",(r,i,c)=>{let g=s.get(i);if(g&&g.length>0)try{let p=JSON.parse(c),f={name:p.signalName,payload:p.payload};[...g].forEach(d=>d(f))}catch(p){t.error(`Error al parsear mensaje de se\xF1al en ${i}`,{error:p})}});let n=(r,i)=>{let c=()=>{};return{workflowId:i,subscribe:async g=>{let p=`signal:${i}`,f=O(i),d=await u.redisClient.hgetall(f);if(d.status===l.COMPLETED)return g({name:"workflow:completed",payload:JSON.parse(d.result||"null")},c),{unsubscribe:c};if(d.status===l.FAILED)return g({name:"workflow:failed",payload:{message:d.error||"Unknown error"}},c),{unsubscribe:c};let T=null,b=()=>{if(!T)return;let h=s.get(p);if(h){let y=h.indexOf(T);y>-1&&h.splice(y,1),h.length===0&&s.delete(p)}};return T=h=>{g(h,b)},s.has(p)||s.set(p,[]),s.get(p)?.push(T),{unsubscribe:b}}}};N({commandClient:u.redisClient,blockingClient:u.blockingRedisClient});let a=new S({sourceRoot:u.sourceRoot,retention:u.retention,pollingInterval:u.pollingInterval,logger:t});return a.run(u.durableFunctions),{start:async(r,i)=>a.start(r,i),cancel:(r,i)=>a.cancel(r,i),getState:r=>a.getState(r),getWorkflowHandle:(r,i)=>n(r,i),sendEvent:(r,i,c,g)=>a.sendEvent(i,c,g),stop:()=>{a.stop(),e.quit().catch(()=>{})},runtime:a}}export{k as WorkflowCancellationError,F as bDurable,it as bDurableInitialize};
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@bobtail.software/b-durable",
3
- "version": "1.0.4",
3
+ "version": "1.0.6",
4
4
  "main": "dist/index.mjs",
5
5
  "types": "dist/index.d.mts",
6
6
  "description": "A system for creating durable, resilient, and type-safe workflows in JavaScript/TypeScript.",
@@ -23,6 +23,7 @@
23
23
  "dependencies": {
24
24
  "ioredis": "^5.8.2",
25
25
  "ms": "^2.1.3",
26
+ "pino": "^10.1.0",
26
27
  "prettier": "^3.6.2",
27
28
  "ts-morph": "^27.0.2"
28
29
  },