@bobtail.software/b-durable 1.0.4 → 1.0.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +23 -8
- package/dist/compiler/cli.mjs +703 -40
- package/dist/index.d.mts +19 -15
- package/dist/index.mjs +551 -1
- package/package.json +2 -1
package/dist/index.d.mts
CHANGED
|
@@ -35,12 +35,17 @@ type Instruction<TOutput = unknown> = {
|
|
|
35
35
|
type: 'COMPLETE';
|
|
36
36
|
result: TOutput;
|
|
37
37
|
};
|
|
38
|
-
interface DurableFunction<TInput = unknown, TOutput = unknown
|
|
38
|
+
interface DurableFunction<TInput = unknown, TOutput = unknown, TEvents = Record<string, never>> {
|
|
39
39
|
__isDurable: true;
|
|
40
40
|
name: string;
|
|
41
41
|
execute: (context: WorkflowContext<TInput>) => Promise<Instruction<TOutput>>;
|
|
42
|
+
_TEvents?: TEvents;
|
|
42
43
|
}
|
|
43
44
|
|
|
45
|
+
interface StartOptions<TInput> {
|
|
46
|
+
input: TInput;
|
|
47
|
+
workflowId?: string;
|
|
48
|
+
}
|
|
44
49
|
declare class DurableRuntime {
|
|
45
50
|
private durableFns;
|
|
46
51
|
private repo;
|
|
@@ -51,7 +56,7 @@ declare class DurableRuntime {
|
|
|
51
56
|
constructor(options: {
|
|
52
57
|
sourceRoot: string;
|
|
53
58
|
});
|
|
54
|
-
start<TInput, TOutput>(durableFn: DurableFunction<TInput, TOutput>,
|
|
59
|
+
start<TInput, TOutput>(durableFn: DurableFunction<TInput, TOutput>, options: StartOptions<TInput>, parentId?: string): Promise<string>;
|
|
55
60
|
private scheduleExecution;
|
|
56
61
|
private _executeStep;
|
|
57
62
|
private handleInstruction;
|
|
@@ -65,12 +70,10 @@ declare class DurableRuntime {
|
|
|
65
70
|
stop(): void;
|
|
66
71
|
}
|
|
67
72
|
|
|
68
|
-
type DurableWorkflowFn$1<TInput, TOutput> = (input: TInput, ...args: any[]) => Promise<TOutput>;
|
|
69
|
-
|
|
70
73
|
/**
|
|
71
74
|
* El contexto de ejecución proporcionado a cada workflow, con métodos de durabilidad tipados.
|
|
72
75
|
*/
|
|
73
|
-
interface DurableContext<TEvents
|
|
76
|
+
interface DurableContext<TEvents = Record<string, never>> extends Pick<WorkflowContext, 'log' | 'workflowId'> {
|
|
74
77
|
/**
|
|
75
78
|
* Pausa la ejecución del workflow de manera duradera.
|
|
76
79
|
* @param duration Una cadena de tiempo como '2 days', '10h', '7s'.
|
|
@@ -89,10 +92,10 @@ interface DurableContext<TEvents extends Record<string, any> = Record<string, ne
|
|
|
89
92
|
* @param input La entrada para el sub-workflow.
|
|
90
93
|
* @returns Una promesa que se resuelve con el resultado del sub-workflow.
|
|
91
94
|
*/
|
|
92
|
-
bExecute<TInput, TOutput
|
|
95
|
+
bExecute<TInput, TOutput, TWorkflowEvents extends Record<string, any>>(workflow: DurableFunction<TInput, TOutput, TWorkflowEvents>, input: TInput): Promise<TOutput>;
|
|
93
96
|
}
|
|
94
|
-
type DurableWorkflowFn<TInput, TOutput, TEvents
|
|
95
|
-
interface DurableWorkflowDef<TInput, TOutput, TEvents
|
|
97
|
+
type DurableWorkflowFn<TInput, TOutput, TEvents = Record<string, never>> = (input: TInput, context: DurableContext<TEvents>) => Promise<TOutput>;
|
|
98
|
+
interface DurableWorkflowDef<TInput, TOutput, TEvents = Record<string, never>> {
|
|
96
99
|
/**
|
|
97
100
|
* La función async que contiene la lógica del workflow.
|
|
98
101
|
*/
|
|
@@ -102,20 +105,21 @@ interface DurableWorkflowDef<TInput, TOutput, TEvents extends Record<string, any
|
|
|
102
105
|
* Marcador para que el compilador identifique y transforme una función en un workflow durable.
|
|
103
106
|
* Esta función es un passthrough en tiempo de ejecución, su único propósito es para el análisis estático.
|
|
104
107
|
*/
|
|
105
|
-
declare const bDurable: <TInput =
|
|
108
|
+
declare const bDurable: <TInput = any, TOutput = any, TEvents = Record<string, never>>(def: DurableWorkflowDef<TInput, TOutput, TEvents>) => DurableFunction<TInput, TOutput, TEvents>;
|
|
106
109
|
|
|
107
|
-
interface BDurableAPI
|
|
108
|
-
start: <TInput, TOutput>(durableFn: DurableFunction<TInput, TOutput>,
|
|
110
|
+
interface BDurableAPI {
|
|
111
|
+
start: <TInput, TOutput>(durableFn: DurableFunction<TInput, TOutput>, options: StartOptions<TInput>) => Promise<string>;
|
|
109
112
|
stop: () => void;
|
|
110
113
|
runtime: DurableRuntime;
|
|
111
114
|
/**
|
|
112
115
|
* Envía un evento a un workflow en ejecución que está en pausa esperando dicho evento.
|
|
113
|
-
* Esta función es estrictamente tipada basada en el tipo de eventos
|
|
116
|
+
* Esta función es estrictamente tipada basada en el tipo de eventos de la definición del workflow.
|
|
117
|
+
* @param durableFn La definición del workflow al que se le enviará el evento. Se usa para la inferencia de tipos.
|
|
114
118
|
* @param workflowId El ID del workflow al que se le enviará el evento.
|
|
115
119
|
* @param eventName El nombre del evento. Será autocompletado por el editor.
|
|
116
120
|
* @param payload La carga útil del evento. El tipo debe coincidir con el definido para `eventName`.
|
|
117
121
|
*/
|
|
118
|
-
sendEvent: <K extends keyof
|
|
122
|
+
sendEvent: <TInput, TOutput, TWorkflowEvents, K extends keyof TWorkflowEvents>(durableFn: DurableFunction<TInput, TOutput, TWorkflowEvents>, workflowId: string, eventName: K, payload: TWorkflowEvents[K]) => Promise<void>;
|
|
119
123
|
}
|
|
120
124
|
interface InitializeOptions {
|
|
121
125
|
durableFunctions: Map<string, DurableFunction<unknown, unknown>>;
|
|
@@ -123,6 +127,6 @@ interface InitializeOptions {
|
|
|
123
127
|
redisClient: Redis;
|
|
124
128
|
blockingRedisClient: Redis;
|
|
125
129
|
}
|
|
126
|
-
declare function bDurableInitialize
|
|
130
|
+
declare function bDurableInitialize(options: InitializeOptions): BDurableAPI;
|
|
127
131
|
|
|
128
|
-
export { type BDurableAPI, type DurableFunction, type Instruction, type WorkflowContext, type WorkflowState, bDurable, bDurableInitialize };
|
|
132
|
+
export { type BDurableAPI, type DurableFunction, type Instruction, type StartOptions, type WorkflowContext, type WorkflowState, bDurable, bDurableInitialize };
|
package/dist/index.mjs
CHANGED
|
@@ -1 +1,551 @@
|
|
|
1
|
-
|
|
1
|
+
// src/runtime/persistence.ts
|
|
2
|
+
var redis;
|
|
3
|
+
var blockingRedis;
|
|
4
|
+
function configurePersistence(clients) {
|
|
5
|
+
if (redis || blockingRedis) {
|
|
6
|
+
console.warn(
|
|
7
|
+
"[Persistence] Los clientes de Redis ya han sido configurados. Omitiendo."
|
|
8
|
+
);
|
|
9
|
+
return;
|
|
10
|
+
}
|
|
11
|
+
redis = clients.commandClient;
|
|
12
|
+
blockingRedis = clients.blockingClient;
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
// src/runtime/runtime.ts
|
|
16
|
+
import { randomUUID } from "crypto";
|
|
17
|
+
import ms from "ms";
|
|
18
|
+
import { resolve } from "path";
|
|
19
|
+
|
|
20
|
+
// src/runtime/constants.ts
|
|
21
|
+
var TASK_QUEUE_KEY = "queue:tasks";
|
|
22
|
+
var SLEEPERS_KEY = "durable:sleepers";
|
|
23
|
+
var WorkflowStatus = {
|
|
24
|
+
RUNNING: "RUNNING",
|
|
25
|
+
SLEEPING: "SLEEPING",
|
|
26
|
+
COMPLETED: "COMPLETED",
|
|
27
|
+
FAILED: "FAILED",
|
|
28
|
+
AWAITING_EVENT: "AWAITING_EVENT",
|
|
29
|
+
AWAITING_SUBWORKFLOW: "AWAITING_SUBWORKFLOW"
|
|
30
|
+
};
|
|
31
|
+
|
|
32
|
+
// src/runtime/runtime.ts
|
|
33
|
+
var WorkflowRepository = class {
|
|
34
|
+
getKey(workflowId) {
|
|
35
|
+
return `workflow:${workflowId}`;
|
|
36
|
+
}
|
|
37
|
+
getLockKey(workflowId) {
|
|
38
|
+
return `workflow:${workflowId}:lock`;
|
|
39
|
+
}
|
|
40
|
+
async acquireLock(workflowId, lockTimeoutSeconds = 10) {
|
|
41
|
+
const lockKey = this.getLockKey(workflowId);
|
|
42
|
+
const result = await redis.set(lockKey, "locked", "EX", lockTimeoutSeconds, "NX");
|
|
43
|
+
return result === "OK";
|
|
44
|
+
}
|
|
45
|
+
async releaseLock(workflowId) {
|
|
46
|
+
await redis.del(this.getLockKey(workflowId));
|
|
47
|
+
}
|
|
48
|
+
async get(workflowId) {
|
|
49
|
+
const data = await redis.hgetall(this.getKey(workflowId));
|
|
50
|
+
if (!data || Object.keys(data).length === 0) {
|
|
51
|
+
return null;
|
|
52
|
+
}
|
|
53
|
+
return {
|
|
54
|
+
workflowId: data.workflowId,
|
|
55
|
+
name: data.name,
|
|
56
|
+
status: data.status,
|
|
57
|
+
step: parseInt(data.step, 10),
|
|
58
|
+
input: JSON.parse(data.input),
|
|
59
|
+
state: JSON.parse(data.state),
|
|
60
|
+
result: data.result ? JSON.parse(data.result) : void 0,
|
|
61
|
+
error: data.error,
|
|
62
|
+
parentId: data.parentId,
|
|
63
|
+
subWorkflowId: data.subWorkflowId,
|
|
64
|
+
awaitingEvent: data.awaitingEvent
|
|
65
|
+
};
|
|
66
|
+
}
|
|
67
|
+
async create(instanceData) {
|
|
68
|
+
const instance = {
|
|
69
|
+
...instanceData,
|
|
70
|
+
step: 0,
|
|
71
|
+
state: {}
|
|
72
|
+
};
|
|
73
|
+
const pipeline = redis.pipeline();
|
|
74
|
+
pipeline.hset(this.getKey(instance.workflowId), {
|
|
75
|
+
...instance,
|
|
76
|
+
input: JSON.stringify(instance.input),
|
|
77
|
+
state: JSON.stringify(instance.state)
|
|
78
|
+
});
|
|
79
|
+
await pipeline.exec();
|
|
80
|
+
}
|
|
81
|
+
async updateState(workflowId, state) {
|
|
82
|
+
await redis.hset(this.getKey(workflowId), "state", JSON.stringify(state));
|
|
83
|
+
}
|
|
84
|
+
async updateStatus(workflowId, status, extraFields = {}) {
|
|
85
|
+
await redis.hset(this.getKey(workflowId), { status, ...extraFields });
|
|
86
|
+
}
|
|
87
|
+
async incrementStep(workflowId) {
|
|
88
|
+
return redis.hincrby(this.getKey(workflowId), "step", 1);
|
|
89
|
+
}
|
|
90
|
+
async complete(workflowId, result) {
|
|
91
|
+
await redis.hset(this.getKey(workflowId), {
|
|
92
|
+
status: WorkflowStatus.COMPLETED,
|
|
93
|
+
result: JSON.stringify(result ?? null)
|
|
94
|
+
});
|
|
95
|
+
}
|
|
96
|
+
async fail(workflowId, error) {
|
|
97
|
+
await redis.hset(this.getKey(workflowId), {
|
|
98
|
+
status: WorkflowStatus.FAILED,
|
|
99
|
+
error: error.message
|
|
100
|
+
});
|
|
101
|
+
}
|
|
102
|
+
// --- Métodos para operaciones de Runtime ---
|
|
103
|
+
async scheduleSleep(workflowId, wakeUpAt) {
|
|
104
|
+
await this.updateStatus(workflowId, WorkflowStatus.SLEEPING);
|
|
105
|
+
await redis.zadd(SLEEPERS_KEY, wakeUpAt, workflowId);
|
|
106
|
+
}
|
|
107
|
+
async getWorkflowsToWake() {
|
|
108
|
+
const now = Date.now();
|
|
109
|
+
const ids = await redis.zrangebyscore(SLEEPERS_KEY, 0, now);
|
|
110
|
+
if (ids.length > 0) {
|
|
111
|
+
await redis.zrem(SLEEPERS_KEY, ...ids);
|
|
112
|
+
}
|
|
113
|
+
return ids;
|
|
114
|
+
}
|
|
115
|
+
async enqueueTask(task) {
|
|
116
|
+
await redis.lpush(TASK_QUEUE_KEY, JSON.stringify(task));
|
|
117
|
+
}
|
|
118
|
+
async resumeForCatch(workflowId, newState, catchStep) {
|
|
119
|
+
const key = this.getKey(workflowId);
|
|
120
|
+
await redis.hset(key, {
|
|
121
|
+
state: JSON.stringify(newState),
|
|
122
|
+
status: WorkflowStatus.RUNNING,
|
|
123
|
+
step: catchStep.toString()
|
|
124
|
+
});
|
|
125
|
+
}
|
|
126
|
+
};
|
|
127
|
+
var DurableRuntime = class {
|
|
128
|
+
durableFns = /* @__PURE__ */ new Map();
|
|
129
|
+
repo = new WorkflowRepository();
|
|
130
|
+
workerId = randomUUID();
|
|
131
|
+
isRunning = false;
|
|
132
|
+
schedulerInterval = null;
|
|
133
|
+
sourceRoot;
|
|
134
|
+
constructor(options) {
|
|
135
|
+
this.sourceRoot = options.sourceRoot;
|
|
136
|
+
}
|
|
137
|
+
async start(durableFn, options, parentId) {
|
|
138
|
+
if (options.workflowId) {
|
|
139
|
+
const existing = await this.repo.get(options.workflowId);
|
|
140
|
+
if (existing && existing.status !== WorkflowStatus.COMPLETED && existing.status !== WorkflowStatus.FAILED) {
|
|
141
|
+
throw new Error(
|
|
142
|
+
`Workflow with ID '${options.workflowId}' already exists and is in a running state (${existing.status}).`
|
|
143
|
+
);
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
const workflowId = options.workflowId ?? randomUUID();
|
|
147
|
+
console.log(`[RUNTIME] Iniciando workflow '${durableFn.name}' con ID: ${workflowId}`);
|
|
148
|
+
await this.repo.create({
|
|
149
|
+
workflowId,
|
|
150
|
+
name: durableFn.name,
|
|
151
|
+
status: WorkflowStatus.RUNNING,
|
|
152
|
+
input: options.input,
|
|
153
|
+
parentId
|
|
154
|
+
});
|
|
155
|
+
this.scheduleExecution(workflowId, durableFn);
|
|
156
|
+
return workflowId;
|
|
157
|
+
}
|
|
158
|
+
async scheduleExecution(workflowId, durableFn, lastResult, initialError) {
|
|
159
|
+
setImmediate(() => {
|
|
160
|
+
this._executeStep(workflowId, durableFn, lastResult, initialError).catch(
|
|
161
|
+
(err) => {
|
|
162
|
+
console.error(
|
|
163
|
+
`[RUNTIME-FATAL] Error no manejado en la ejecuci\xF3n del workflow ${workflowId}`,
|
|
164
|
+
err
|
|
165
|
+
);
|
|
166
|
+
}
|
|
167
|
+
);
|
|
168
|
+
});
|
|
169
|
+
}
|
|
170
|
+
async _executeStep(workflowId, durableFn, lastResult, initialError) {
|
|
171
|
+
const hasLock = await this.repo.acquireLock(workflowId);
|
|
172
|
+
if (!hasLock) {
|
|
173
|
+
console.log(
|
|
174
|
+
`[RUNTIME-LOCK] No se pudo adquirir el bloqueo para ${workflowId}, otro proceso est\xE1 trabajando. Se omitir\xE1 este ciclo.`
|
|
175
|
+
);
|
|
176
|
+
return;
|
|
177
|
+
}
|
|
178
|
+
let executionError = null;
|
|
179
|
+
try {
|
|
180
|
+
if (initialError) {
|
|
181
|
+
throw initialError;
|
|
182
|
+
}
|
|
183
|
+
const instance = await this.repo.get(workflowId);
|
|
184
|
+
if (!instance) return;
|
|
185
|
+
if (instance.status !== WorkflowStatus.RUNNING) {
|
|
186
|
+
console.log(
|
|
187
|
+
`[RUNTIME] Se intent\xF3 ejecutar el workflow ${workflowId} pero su estado es ${instance.status}. Omitiendo.`
|
|
188
|
+
);
|
|
189
|
+
return;
|
|
190
|
+
}
|
|
191
|
+
const context = {
|
|
192
|
+
workflowId,
|
|
193
|
+
step: instance.step,
|
|
194
|
+
input: instance.input,
|
|
195
|
+
state: instance.state,
|
|
196
|
+
result: lastResult,
|
|
197
|
+
log: (message) => console.log(`[WF:${workflowId}] ${message}`)
|
|
198
|
+
};
|
|
199
|
+
const instruction = await durableFn.execute(context);
|
|
200
|
+
await this.repo.updateState(workflowId, context.state);
|
|
201
|
+
await this.handleInstruction(instruction, context, instance.name);
|
|
202
|
+
} catch (error) {
|
|
203
|
+
executionError = error instanceof Error ? error : new Error(String(error));
|
|
204
|
+
console.error(`[RUNTIME] Error en workflow ${workflowId}:`, executionError);
|
|
205
|
+
} finally {
|
|
206
|
+
await this.repo.releaseLock(workflowId);
|
|
207
|
+
}
|
|
208
|
+
if (executionError) {
|
|
209
|
+
await this.handleFailure(workflowId, executionError, durableFn);
|
|
210
|
+
}
|
|
211
|
+
}
|
|
212
|
+
async handleInstruction(instruction, context, durableFunctionName) {
|
|
213
|
+
const { workflowId } = context;
|
|
214
|
+
switch (instruction.type) {
|
|
215
|
+
case "SCHEDULE_TASK": {
|
|
216
|
+
await this.repo.enqueueTask({
|
|
217
|
+
workflowId,
|
|
218
|
+
durableFunctionName,
|
|
219
|
+
...instruction
|
|
220
|
+
});
|
|
221
|
+
break;
|
|
222
|
+
}
|
|
223
|
+
case "SCHEDULE_SLEEP": {
|
|
224
|
+
const durationMs = ms(instruction.duration);
|
|
225
|
+
if (typeof durationMs !== "number") {
|
|
226
|
+
throw new Error(
|
|
227
|
+
`Invalid time value provided to bSleep: "${instruction.duration}"`
|
|
228
|
+
);
|
|
229
|
+
}
|
|
230
|
+
const wakeUpAt = Date.now() + durationMs;
|
|
231
|
+
await this.repo.scheduleSleep(workflowId, wakeUpAt);
|
|
232
|
+
break;
|
|
233
|
+
}
|
|
234
|
+
case "WAIT_FOR_EVENT": {
|
|
235
|
+
await this.repo.updateStatus(workflowId, WorkflowStatus.AWAITING_EVENT, {
|
|
236
|
+
awaitingEvent: instruction.eventName
|
|
237
|
+
});
|
|
238
|
+
await redis.sadd(`events:awaiting:${instruction.eventName}`, workflowId);
|
|
239
|
+
break;
|
|
240
|
+
}
|
|
241
|
+
case "EXECUTE_SUBWORKFLOW": {
|
|
242
|
+
const subDurableFn = this.durableFns.get(instruction.workflowName);
|
|
243
|
+
if (!subDurableFn)
|
|
244
|
+
throw new Error(
|
|
245
|
+
`Sub-workflow '${instruction.workflowName}' no encontrado.`
|
|
246
|
+
);
|
|
247
|
+
const subWorkflowId = await this.start(
|
|
248
|
+
subDurableFn,
|
|
249
|
+
{ input: instruction.input },
|
|
250
|
+
workflowId
|
|
251
|
+
);
|
|
252
|
+
await this.repo.updateStatus(
|
|
253
|
+
workflowId,
|
|
254
|
+
WorkflowStatus.AWAITING_SUBWORKFLOW,
|
|
255
|
+
{ subWorkflowId }
|
|
256
|
+
);
|
|
257
|
+
break;
|
|
258
|
+
}
|
|
259
|
+
case "COMPLETE": {
|
|
260
|
+
await this.repo.complete(workflowId, instruction.result);
|
|
261
|
+
await this.resumeParentWorkflow(workflowId);
|
|
262
|
+
break;
|
|
263
|
+
}
|
|
264
|
+
}
|
|
265
|
+
}
|
|
266
|
+
async handleFailure(workflowId, error, durableFn) {
|
|
267
|
+
const hasLock = await this.repo.acquireLock(workflowId, 20);
|
|
268
|
+
if (!hasLock) {
|
|
269
|
+
console.warn(
|
|
270
|
+
`[RUNTIME-FAIL] No se pudo adquirir lock para manejar fallo en ${workflowId}. Reintentando m\xE1s tarde...`
|
|
271
|
+
);
|
|
272
|
+
return;
|
|
273
|
+
}
|
|
274
|
+
try {
|
|
275
|
+
const instance = await this.repo.get(workflowId);
|
|
276
|
+
if (!instance || instance.status === WorkflowStatus.FAILED) return;
|
|
277
|
+
const stack = instance.state.tryCatchStack;
|
|
278
|
+
if (stack && stack.length > 0) {
|
|
279
|
+
const handler = stack.pop();
|
|
280
|
+
const nextStep = handler?.catchStep;
|
|
281
|
+
if (nextStep !== void 0) {
|
|
282
|
+
console.log(
|
|
283
|
+
`[RUNTIME-FAIL] Excepci\xF3n capturada en ${workflowId}. Saltando a la cl\xE1usula CATCH en el paso ${nextStep}.`
|
|
284
|
+
);
|
|
285
|
+
await this.repo.resumeForCatch(workflowId, instance.state, nextStep);
|
|
286
|
+
this.scheduleExecution(workflowId, durableFn, {
|
|
287
|
+
name: error.name,
|
|
288
|
+
message: error.message,
|
|
289
|
+
stack: error.stack
|
|
290
|
+
});
|
|
291
|
+
return;
|
|
292
|
+
}
|
|
293
|
+
}
|
|
294
|
+
console.error(`[RUNTIME] Error no capturado en workflow ${workflowId}:`, error);
|
|
295
|
+
await this.repo.fail(workflowId, error);
|
|
296
|
+
await this.propagateFailureToParent(workflowId, error);
|
|
297
|
+
} finally {
|
|
298
|
+
await this.repo.releaseLock(workflowId);
|
|
299
|
+
}
|
|
300
|
+
}
|
|
301
|
+
async resumeParentWorkflow(completedWorkflowId) {
|
|
302
|
+
const completedInstance = await this.repo.get(completedWorkflowId);
|
|
303
|
+
if (!completedInstance?.parentId) return;
|
|
304
|
+
const parentId = completedInstance.parentId;
|
|
305
|
+
const parentInstance = await this.repo.get(parentId);
|
|
306
|
+
if (!parentInstance || parentInstance.status !== WorkflowStatus.AWAITING_SUBWORKFLOW || parentInstance.subWorkflowId !== completedWorkflowId) {
|
|
307
|
+
return;
|
|
308
|
+
}
|
|
309
|
+
console.log(`[RUNTIME] Reanudando workflow padre ${parentId}.`);
|
|
310
|
+
const durableFn = this.durableFns.get(parentInstance.name);
|
|
311
|
+
if (!durableFn) {
|
|
312
|
+
await this.repo.fail(
|
|
313
|
+
parentId,
|
|
314
|
+
new Error(`Definici\xF3n del workflow '${parentInstance.name}' no encontrada.`)
|
|
315
|
+
);
|
|
316
|
+
return;
|
|
317
|
+
}
|
|
318
|
+
await this.repo.updateStatus(parentId, WorkflowStatus.RUNNING, { subWorkflowId: "" });
|
|
319
|
+
await this.repo.incrementStep(parentId);
|
|
320
|
+
this.scheduleExecution(parentId, durableFn, completedInstance.result);
|
|
321
|
+
}
|
|
322
|
+
async propagateFailureToParent(failedWorkflowId, error) {
|
|
323
|
+
const failedInstance = await this.repo.get(failedWorkflowId);
|
|
324
|
+
if (!failedInstance?.parentId) return;
|
|
325
|
+
const parentId = failedInstance.parentId;
|
|
326
|
+
const parentInstance = await this.repo.get(parentId);
|
|
327
|
+
if (!parentInstance || parentInstance.status !== WorkflowStatus.AWAITING_SUBWORKFLOW || parentInstance.subWorkflowId !== failedWorkflowId) {
|
|
328
|
+
return;
|
|
329
|
+
}
|
|
330
|
+
console.log(
|
|
331
|
+
`[RUNTIME] Propagando fallo del sub-workflow ${failedWorkflowId} al padre ${parentId}.`
|
|
332
|
+
);
|
|
333
|
+
const durableFn = this.durableFns.get(parentInstance.name);
|
|
334
|
+
if (!durableFn) {
|
|
335
|
+
await this.repo.fail(
|
|
336
|
+
parentId,
|
|
337
|
+
new Error(
|
|
338
|
+
`Definici\xF3n del workflow '${parentInstance.name}' no encontrada al propagar fallo.`
|
|
339
|
+
)
|
|
340
|
+
);
|
|
341
|
+
return;
|
|
342
|
+
}
|
|
343
|
+
await this.repo.updateStatus(parentId, WorkflowStatus.RUNNING, { subWorkflowId: "" });
|
|
344
|
+
const propagationError = new Error(
|
|
345
|
+
`Sub-workflow '${failedInstance.name}' (${failedWorkflowId}) fall\xF3: ${error.message}`
|
|
346
|
+
);
|
|
347
|
+
propagationError.stack = error.stack;
|
|
348
|
+
this.scheduleExecution(parentId, durableFn, void 0, propagationError);
|
|
349
|
+
}
|
|
350
|
+
async sendEvent(workflowId, eventName, payload) {
|
|
351
|
+
let hasLock = false;
|
|
352
|
+
for (let i = 0; i < 3; i++) {
|
|
353
|
+
hasLock = await this.repo.acquireLock(workflowId);
|
|
354
|
+
if (hasLock) {
|
|
355
|
+
break;
|
|
356
|
+
}
|
|
357
|
+
await new Promise((resolve2) => setTimeout(resolve2, 50));
|
|
358
|
+
}
|
|
359
|
+
if (!hasLock) {
|
|
360
|
+
console.warn(
|
|
361
|
+
`[RUNTIME-LOCK] No se pudo adquirir el bloqueo para sendEvent en ${workflowId}. El evento podr\xEDa ser descartado o retrasado.`
|
|
362
|
+
);
|
|
363
|
+
return;
|
|
364
|
+
}
|
|
365
|
+
try {
|
|
366
|
+
const instance = await this.repo.get(workflowId);
|
|
367
|
+
if (!instance) {
|
|
368
|
+
console.warn(
|
|
369
|
+
`[RUNTIME] Se intent\xF3 enviar un evento a un workflow no existente: ${workflowId}`
|
|
370
|
+
);
|
|
371
|
+
return;
|
|
372
|
+
}
|
|
373
|
+
if (instance.status !== WorkflowStatus.AWAITING_EVENT || instance.awaitingEvent !== eventName) {
|
|
374
|
+
console.warn(
|
|
375
|
+
`[RUNTIME] El workflow ${workflowId} no est\xE1 esperando el evento '${eventName}'. Estado actual: ${instance.status}, esperando: ${instance.awaitingEvent}.`
|
|
376
|
+
);
|
|
377
|
+
return;
|
|
378
|
+
}
|
|
379
|
+
console.log(
|
|
380
|
+
`[RUNTIME] Evento '${eventName}' recibido para el workflow ${workflowId}. Reanudando...`
|
|
381
|
+
);
|
|
382
|
+
const durableFn = this.durableFns.get(instance.name);
|
|
383
|
+
if (!durableFn) {
|
|
384
|
+
console.error(
|
|
385
|
+
`[RUNTIME] La definici\xF3n de la funci\xF3n durable '${instance.name}' no se encontr\xF3 para el workflow ${workflowId}.`
|
|
386
|
+
);
|
|
387
|
+
await this.repo.fail(
|
|
388
|
+
workflowId,
|
|
389
|
+
new Error(`Funci\xF3n durable '${instance.name}' no encontrada.`)
|
|
390
|
+
);
|
|
391
|
+
return;
|
|
392
|
+
}
|
|
393
|
+
await this.repo.updateStatus(workflowId, WorkflowStatus.RUNNING, {
|
|
394
|
+
awaitingEvent: ""
|
|
395
|
+
});
|
|
396
|
+
await redis.srem(`events:awaiting:${eventName}`, workflowId);
|
|
397
|
+
await this.repo.incrementStep(workflowId);
|
|
398
|
+
this.scheduleExecution(workflowId, durableFn, payload);
|
|
399
|
+
} catch (error) {
|
|
400
|
+
console.error(
|
|
401
|
+
`[RUNTIME] Error procesando el evento '${eventName}' para el workflow ${workflowId}:`,
|
|
402
|
+
error
|
|
403
|
+
);
|
|
404
|
+
await this.repo.fail(
|
|
405
|
+
workflowId,
|
|
406
|
+
new Error(
|
|
407
|
+
`Fallo al procesar el evento: ${error instanceof Error ? error.message : String(error)}`
|
|
408
|
+
)
|
|
409
|
+
);
|
|
410
|
+
} finally {
|
|
411
|
+
await this.repo.releaseLock(workflowId);
|
|
412
|
+
}
|
|
413
|
+
}
|
|
414
|
+
startScheduler() {
|
|
415
|
+
if (this.schedulerInterval) return;
|
|
416
|
+
console.log("[SCHEDULER] Scheduler iniciado.");
|
|
417
|
+
const checkSleepers = async () => {
|
|
418
|
+
const workflowIds = await this.repo.getWorkflowsToWake();
|
|
419
|
+
for (const workflowId of workflowIds) {
|
|
420
|
+
const instance = await this.repo.get(workflowId);
|
|
421
|
+
if (instance) {
|
|
422
|
+
const durableFn = this.durableFns.get(instance.name);
|
|
423
|
+
if (durableFn) {
|
|
424
|
+
console.log(`[SCHEDULER] Reanudando workflow ${workflowId}`);
|
|
425
|
+
await this.repo.updateStatus(
|
|
426
|
+
workflowId,
|
|
427
|
+
WorkflowStatus.RUNNING
|
|
428
|
+
);
|
|
429
|
+
await this.repo.incrementStep(workflowId);
|
|
430
|
+
this.scheduleExecution(workflowId, durableFn, null);
|
|
431
|
+
}
|
|
432
|
+
}
|
|
433
|
+
}
|
|
434
|
+
};
|
|
435
|
+
this.schedulerInterval = setInterval(checkSleepers, 2e3);
|
|
436
|
+
}
|
|
437
|
+
startWorker() {
|
|
438
|
+
if (this.isRunning) return;
|
|
439
|
+
this.isRunning = true;
|
|
440
|
+
const processingQueueKey = `${TASK_QUEUE_KEY}:processing:${this.workerId}`;
|
|
441
|
+
console.log(`[WORKER] Worker ${this.workerId} iniciado, esperando tareas...`);
|
|
442
|
+
const listenForTasks = async () => {
|
|
443
|
+
while (this.isRunning) {
|
|
444
|
+
try {
|
|
445
|
+
const taskString = await blockingRedis.brpoplpush(
|
|
446
|
+
TASK_QUEUE_KEY,
|
|
447
|
+
processingQueueKey,
|
|
448
|
+
0
|
|
449
|
+
);
|
|
450
|
+
if (!taskString) continue;
|
|
451
|
+
const task = JSON.parse(taskString);
|
|
452
|
+
console.log(`[WORKER] Tarea recibida: ${task.exportName}`);
|
|
453
|
+
try {
|
|
454
|
+
let module;
|
|
455
|
+
if (task.modulePath.startsWith("virtual:")) {
|
|
456
|
+
module = await import(task.modulePath);
|
|
457
|
+
} else {
|
|
458
|
+
const moduleFullPath = resolve(
|
|
459
|
+
this.sourceRoot,
|
|
460
|
+
task.modulePath
|
|
461
|
+
);
|
|
462
|
+
module = await import(moduleFullPath);
|
|
463
|
+
}
|
|
464
|
+
const serviceFn = module[task.exportName];
|
|
465
|
+
if (typeof serviceFn !== "function")
|
|
466
|
+
throw new Error(
|
|
467
|
+
`'${task.exportName}' no es una funci\xF3n.`
|
|
468
|
+
);
|
|
469
|
+
const serviceResult = await serviceFn(...task.args);
|
|
470
|
+
const durableFn = this.durableFns.get(task.durableFunctionName);
|
|
471
|
+
if (durableFn) {
|
|
472
|
+
await this.repo.incrementStep(task.workflowId);
|
|
473
|
+
this.scheduleExecution(
|
|
474
|
+
task.workflowId,
|
|
475
|
+
durableFn,
|
|
476
|
+
serviceResult
|
|
477
|
+
);
|
|
478
|
+
}
|
|
479
|
+
await redis.lrem(processingQueueKey, 1, taskString);
|
|
480
|
+
} catch (taskError) {
|
|
481
|
+
const err = taskError instanceof Error ? taskError : new Error(String(taskError));
|
|
482
|
+
console.error(
|
|
483
|
+
`[WORKER] Falla en la tarea '${task.exportName}' para workflow ${task.workflowId}`,
|
|
484
|
+
err
|
|
485
|
+
);
|
|
486
|
+
const durableFn = this.durableFns.get(task.durableFunctionName);
|
|
487
|
+
if (durableFn) {
|
|
488
|
+
await this.handleFailure(task.workflowId, err, durableFn);
|
|
489
|
+
} else {
|
|
490
|
+
await this.repo.fail(
|
|
491
|
+
task.workflowId,
|
|
492
|
+
new Error(
|
|
493
|
+
`Definici\xF3n de workflow ${task.durableFunctionName} no encontrada durante el manejo de fallos.`
|
|
494
|
+
)
|
|
495
|
+
);
|
|
496
|
+
}
|
|
497
|
+
console.log(
|
|
498
|
+
`[WORKER] Eliminando tarea procesada (con error manejado): ${task.exportName}`
|
|
499
|
+
);
|
|
500
|
+
await redis.lrem(processingQueueKey, 1, taskString);
|
|
501
|
+
}
|
|
502
|
+
} catch (error) {
|
|
503
|
+
if (!this.isRunning) break;
|
|
504
|
+
console.error("[WORKER] Error de infraestructura:", error);
|
|
505
|
+
await new Promise((resolve2) => setTimeout(resolve2, 5e3));
|
|
506
|
+
}
|
|
507
|
+
}
|
|
508
|
+
};
|
|
509
|
+
listenForTasks();
|
|
510
|
+
}
|
|
511
|
+
run(durableFns) {
|
|
512
|
+
this.durableFns = durableFns;
|
|
513
|
+
this.startWorker();
|
|
514
|
+
this.startScheduler();
|
|
515
|
+
}
|
|
516
|
+
stop() {
|
|
517
|
+
this.isRunning = false;
|
|
518
|
+
if (this.schedulerInterval) {
|
|
519
|
+
clearInterval(this.schedulerInterval);
|
|
520
|
+
}
|
|
521
|
+
console.log("[RUNTIME] Solicitando detenci\xF3n...");
|
|
522
|
+
}
|
|
523
|
+
};
|
|
524
|
+
|
|
525
|
+
// src/define.ts
|
|
526
|
+
var bDurable = (def) => {
|
|
527
|
+
return def;
|
|
528
|
+
};
|
|
529
|
+
|
|
530
|
+
// src/index.ts
|
|
531
|
+
function bDurableInitialize(options) {
|
|
532
|
+
console.log("--- Inicializando Sistema Durable ---");
|
|
533
|
+
configurePersistence({
|
|
534
|
+
commandClient: options.redisClient,
|
|
535
|
+
blockingClient: options.blockingRedisClient
|
|
536
|
+
});
|
|
537
|
+
const runtime = new DurableRuntime({ sourceRoot: options.sourceRoot });
|
|
538
|
+
runtime.run(options.durableFunctions);
|
|
539
|
+
return {
|
|
540
|
+
start: runtime.start.bind(runtime),
|
|
541
|
+
sendEvent: (durableFn, workflowId, eventName, payload) => {
|
|
542
|
+
return runtime.sendEvent(workflowId, eventName, payload);
|
|
543
|
+
},
|
|
544
|
+
stop: runtime.stop.bind(runtime),
|
|
545
|
+
runtime
|
|
546
|
+
};
|
|
547
|
+
}
|
|
548
|
+
export {
|
|
549
|
+
bDurable,
|
|
550
|
+
bDurableInitialize
|
|
551
|
+
};
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@bobtail.software/b-durable",
|
|
3
|
-
"version": "1.0.
|
|
3
|
+
"version": "1.0.5",
|
|
4
4
|
"main": "dist/index.mjs",
|
|
5
5
|
"types": "dist/index.d.mts",
|
|
6
6
|
"description": "A system for creating durable, resilient, and type-safe workflows in JavaScript/TypeScript.",
|
|
@@ -23,6 +23,7 @@
|
|
|
23
23
|
"dependencies": {
|
|
24
24
|
"ioredis": "^5.8.2",
|
|
25
25
|
"ms": "^2.1.3",
|
|
26
|
+
"pino": "^10.1.0",
|
|
26
27
|
"prettier": "^3.6.2",
|
|
27
28
|
"ts-morph": "^27.0.2"
|
|
28
29
|
},
|