@bobtail.software/b-durable 1.0.5 → 1.0.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -1,551 +1,91 @@
1
- // src/runtime/persistence.ts
2
- var redis;
3
- var blockingRedis;
4
- function configurePersistence(clients) {
5
- if (redis || blockingRedis) {
6
- console.warn(
7
- "[Persistence] Los clientes de Redis ya han sido configurados. Omitiendo."
8
- );
9
- return;
10
- }
11
- redis = clients.commandClient;
12
- blockingRedis = clients.blockingClient;
13
- }
1
+ import{randomUUID as z}from"crypto";import q from"ioredis";var E="queue:tasks",I="durable:sleepers",R="worker:heartbeat:",k="durable:workers",x="queue:dead",L="queue:tasks:delayed";function C(i){return`workflow:${i}`}var c={RUNNING:"RUNNING",SLEEPING:"SLEEPING",COMPLETED:"COMPLETED",FAILED:"FAILED",AWAITING_SIGNAL:"AWAITING_SIGNAL",AWAITING_SUBWORKFLOW:"AWAITING_SUBWORKFLOW",CANCELLING:"CANCELLING",CANCELLED:"CANCELLED",VERSION_MISMATCH:"VERSION_MISMATCH"};var l,y;function N(i){if(l||y){console.warn("[Persistence] Los clientes de Redis ya han sido configurados. Omitiendo.");return}l=i.commandClient,y=i.blockingClient}import{randomUUID as W}from"crypto";import v from"ms";import{resolve as V}from"path";var S=class extends Error{isCancellation=!0;constructor(t){super(t),this.name="WorkflowCancellationError"}};var P=`
2
+ if redis.call("get", KEYS[1]) == ARGV[1] then
3
+ return redis.call("del", KEYS[1])
4
+ else
5
+ return 0
6
+ end
7
+ `,D=`
8
+ if redis.call("get", KEYS[1]) == ARGV[1] then
9
+ return redis.call("expire", KEYS[1], ARGV[2])
10
+ else
11
+ return 0
12
+ end
13
+ `,A=`
14
+ local lockKey = KEYS[1]
15
+ local workflowKey = KEYS[2]
16
+ local token = ARGV[1]
17
+ local state = ARGV[2]
18
+ local updatedAt = ARGV[3]
19
+ local step = ARGV[4]
14
20
 
15
- // src/runtime/runtime.ts
16
- import { randomUUID } from "crypto";
17
- import ms from "ms";
18
- import { resolve } from "path";
21
+ if redis.call("get", lockKey) == token then
22
+ redis.call("hset", workflowKey, "state", state, "updatedAt", updatedAt, "step", step)
23
+ return 1
24
+ else
25
+ return 0
26
+ end
27
+ `,_=`
28
+ local lockKey = KEYS[1]
29
+ local workflowKey = KEYS[2]
30
+ local token = ARGV[1]
19
31
 
20
- // src/runtime/constants.ts
21
- var TASK_QUEUE_KEY = "queue:tasks";
22
- var SLEEPERS_KEY = "durable:sleepers";
23
- var WorkflowStatus = {
24
- RUNNING: "RUNNING",
25
- SLEEPING: "SLEEPING",
26
- COMPLETED: "COMPLETED",
27
- FAILED: "FAILED",
28
- AWAITING_EVENT: "AWAITING_EVENT",
29
- AWAITING_SUBWORKFLOW: "AWAITING_SUBWORKFLOW"
30
- };
32
+ if redis.call("get", lockKey) == token then
33
+ return redis.call("hincrby", workflowKey, "step", 1)
34
+ else
35
+ return -1
36
+ end
37
+ `,F=`
38
+ local lockKey = KEYS[1]
39
+ local workflowKey = KEYS[2]
40
+ local token = ARGV[1]
41
+ local result = ARGV[2]
42
+ local status = ARGV[3]
31
43
 
32
- // src/runtime/runtime.ts
33
- var WorkflowRepository = class {
34
- getKey(workflowId) {
35
- return `workflow:${workflowId}`;
36
- }
37
- getLockKey(workflowId) {
38
- return `workflow:${workflowId}:lock`;
39
- }
40
- async acquireLock(workflowId, lockTimeoutSeconds = 10) {
41
- const lockKey = this.getLockKey(workflowId);
42
- const result = await redis.set(lockKey, "locked", "EX", lockTimeoutSeconds, "NX");
43
- return result === "OK";
44
- }
45
- async releaseLock(workflowId) {
46
- await redis.del(this.getLockKey(workflowId));
47
- }
48
- async get(workflowId) {
49
- const data = await redis.hgetall(this.getKey(workflowId));
50
- if (!data || Object.keys(data).length === 0) {
51
- return null;
52
- }
53
- return {
54
- workflowId: data.workflowId,
55
- name: data.name,
56
- status: data.status,
57
- step: parseInt(data.step, 10),
58
- input: JSON.parse(data.input),
59
- state: JSON.parse(data.state),
60
- result: data.result ? JSON.parse(data.result) : void 0,
61
- error: data.error,
62
- parentId: data.parentId,
63
- subWorkflowId: data.subWorkflowId,
64
- awaitingEvent: data.awaitingEvent
65
- };
66
- }
67
- async create(instanceData) {
68
- const instance = {
69
- ...instanceData,
70
- step: 0,
71
- state: {}
72
- };
73
- const pipeline = redis.pipeline();
74
- pipeline.hset(this.getKey(instance.workflowId), {
75
- ...instance,
76
- input: JSON.stringify(instance.input),
77
- state: JSON.stringify(instance.state)
78
- });
79
- await pipeline.exec();
80
- }
81
- async updateState(workflowId, state) {
82
- await redis.hset(this.getKey(workflowId), "state", JSON.stringify(state));
83
- }
84
- async updateStatus(workflowId, status, extraFields = {}) {
85
- await redis.hset(this.getKey(workflowId), { status, ...extraFields });
86
- }
87
- async incrementStep(workflowId) {
88
- return redis.hincrby(this.getKey(workflowId), "step", 1);
89
- }
90
- async complete(workflowId, result) {
91
- await redis.hset(this.getKey(workflowId), {
92
- status: WorkflowStatus.COMPLETED,
93
- result: JSON.stringify(result ?? null)
94
- });
95
- }
96
- async fail(workflowId, error) {
97
- await redis.hset(this.getKey(workflowId), {
98
- status: WorkflowStatus.FAILED,
99
- error: error.message
100
- });
101
- }
102
- // --- Métodos para operaciones de Runtime ---
103
- async scheduleSleep(workflowId, wakeUpAt) {
104
- await this.updateStatus(workflowId, WorkflowStatus.SLEEPING);
105
- await redis.zadd(SLEEPERS_KEY, wakeUpAt, workflowId);
106
- }
107
- async getWorkflowsToWake() {
108
- const now = Date.now();
109
- const ids = await redis.zrangebyscore(SLEEPERS_KEY, 0, now);
110
- if (ids.length > 0) {
111
- await redis.zrem(SLEEPERS_KEY, ...ids);
112
- }
113
- return ids;
114
- }
115
- async enqueueTask(task) {
116
- await redis.lpush(TASK_QUEUE_KEY, JSON.stringify(task));
117
- }
118
- async resumeForCatch(workflowId, newState, catchStep) {
119
- const key = this.getKey(workflowId);
120
- await redis.hset(key, {
121
- state: JSON.stringify(newState),
122
- status: WorkflowStatus.RUNNING,
123
- step: catchStep.toString()
124
- });
125
- }
126
- };
127
- var DurableRuntime = class {
128
- durableFns = /* @__PURE__ */ new Map();
129
- repo = new WorkflowRepository();
130
- workerId = randomUUID();
131
- isRunning = false;
132
- schedulerInterval = null;
133
- sourceRoot;
134
- constructor(options) {
135
- this.sourceRoot = options.sourceRoot;
136
- }
137
- async start(durableFn, options, parentId) {
138
- if (options.workflowId) {
139
- const existing = await this.repo.get(options.workflowId);
140
- if (existing && existing.status !== WorkflowStatus.COMPLETED && existing.status !== WorkflowStatus.FAILED) {
141
- throw new Error(
142
- `Workflow with ID '${options.workflowId}' already exists and is in a running state (${existing.status}).`
143
- );
144
- }
145
- }
146
- const workflowId = options.workflowId ?? randomUUID();
147
- console.log(`[RUNTIME] Iniciando workflow '${durableFn.name}' con ID: ${workflowId}`);
148
- await this.repo.create({
149
- workflowId,
150
- name: durableFn.name,
151
- status: WorkflowStatus.RUNNING,
152
- input: options.input,
153
- parentId
154
- });
155
- this.scheduleExecution(workflowId, durableFn);
156
- return workflowId;
157
- }
158
- async scheduleExecution(workflowId, durableFn, lastResult, initialError) {
159
- setImmediate(() => {
160
- this._executeStep(workflowId, durableFn, lastResult, initialError).catch(
161
- (err) => {
162
- console.error(
163
- `[RUNTIME-FATAL] Error no manejado en la ejecuci\xF3n del workflow ${workflowId}`,
164
- err
165
- );
166
- }
167
- );
168
- });
169
- }
170
- async _executeStep(workflowId, durableFn, lastResult, initialError) {
171
- const hasLock = await this.repo.acquireLock(workflowId);
172
- if (!hasLock) {
173
- console.log(
174
- `[RUNTIME-LOCK] No se pudo adquirir el bloqueo para ${workflowId}, otro proceso est\xE1 trabajando. Se omitir\xE1 este ciclo.`
175
- );
176
- return;
177
- }
178
- let executionError = null;
179
- try {
180
- if (initialError) {
181
- throw initialError;
182
- }
183
- const instance = await this.repo.get(workflowId);
184
- if (!instance) return;
185
- if (instance.status !== WorkflowStatus.RUNNING) {
186
- console.log(
187
- `[RUNTIME] Se intent\xF3 ejecutar el workflow ${workflowId} pero su estado es ${instance.status}. Omitiendo.`
188
- );
189
- return;
190
- }
191
- const context = {
192
- workflowId,
193
- step: instance.step,
194
- input: instance.input,
195
- state: instance.state,
196
- result: lastResult,
197
- log: (message) => console.log(`[WF:${workflowId}] ${message}`)
198
- };
199
- const instruction = await durableFn.execute(context);
200
- await this.repo.updateState(workflowId, context.state);
201
- await this.handleInstruction(instruction, context, instance.name);
202
- } catch (error) {
203
- executionError = error instanceof Error ? error : new Error(String(error));
204
- console.error(`[RUNTIME] Error en workflow ${workflowId}:`, executionError);
205
- } finally {
206
- await this.repo.releaseLock(workflowId);
207
- }
208
- if (executionError) {
209
- await this.handleFailure(workflowId, executionError, durableFn);
210
- }
211
- }
212
- async handleInstruction(instruction, context, durableFunctionName) {
213
- const { workflowId } = context;
214
- switch (instruction.type) {
215
- case "SCHEDULE_TASK": {
216
- await this.repo.enqueueTask({
217
- workflowId,
218
- durableFunctionName,
219
- ...instruction
220
- });
221
- break;
222
- }
223
- case "SCHEDULE_SLEEP": {
224
- const durationMs = ms(instruction.duration);
225
- if (typeof durationMs !== "number") {
226
- throw new Error(
227
- `Invalid time value provided to bSleep: "${instruction.duration}"`
228
- );
229
- }
230
- const wakeUpAt = Date.now() + durationMs;
231
- await this.repo.scheduleSleep(workflowId, wakeUpAt);
232
- break;
233
- }
234
- case "WAIT_FOR_EVENT": {
235
- await this.repo.updateStatus(workflowId, WorkflowStatus.AWAITING_EVENT, {
236
- awaitingEvent: instruction.eventName
237
- });
238
- await redis.sadd(`events:awaiting:${instruction.eventName}`, workflowId);
239
- break;
240
- }
241
- case "EXECUTE_SUBWORKFLOW": {
242
- const subDurableFn = this.durableFns.get(instruction.workflowName);
243
- if (!subDurableFn)
244
- throw new Error(
245
- `Sub-workflow '${instruction.workflowName}' no encontrado.`
246
- );
247
- const subWorkflowId = await this.start(
248
- subDurableFn,
249
- { input: instruction.input },
250
- workflowId
251
- );
252
- await this.repo.updateStatus(
253
- workflowId,
254
- WorkflowStatus.AWAITING_SUBWORKFLOW,
255
- { subWorkflowId }
256
- );
257
- break;
258
- }
259
- case "COMPLETE": {
260
- await this.repo.complete(workflowId, instruction.result);
261
- await this.resumeParentWorkflow(workflowId);
262
- break;
263
- }
264
- }
265
- }
266
- async handleFailure(workflowId, error, durableFn) {
267
- const hasLock = await this.repo.acquireLock(workflowId, 20);
268
- if (!hasLock) {
269
- console.warn(
270
- `[RUNTIME-FAIL] No se pudo adquirir lock para manejar fallo en ${workflowId}. Reintentando m\xE1s tarde...`
271
- );
272
- return;
273
- }
274
- try {
275
- const instance = await this.repo.get(workflowId);
276
- if (!instance || instance.status === WorkflowStatus.FAILED) return;
277
- const stack = instance.state.tryCatchStack;
278
- if (stack && stack.length > 0) {
279
- const handler = stack.pop();
280
- const nextStep = handler?.catchStep;
281
- if (nextStep !== void 0) {
282
- console.log(
283
- `[RUNTIME-FAIL] Excepci\xF3n capturada en ${workflowId}. Saltando a la cl\xE1usula CATCH en el paso ${nextStep}.`
284
- );
285
- await this.repo.resumeForCatch(workflowId, instance.state, nextStep);
286
- this.scheduleExecution(workflowId, durableFn, {
287
- name: error.name,
288
- message: error.message,
289
- stack: error.stack
290
- });
291
- return;
292
- }
293
- }
294
- console.error(`[RUNTIME] Error no capturado en workflow ${workflowId}:`, error);
295
- await this.repo.fail(workflowId, error);
296
- await this.propagateFailureToParent(workflowId, error);
297
- } finally {
298
- await this.repo.releaseLock(workflowId);
299
- }
300
- }
301
- async resumeParentWorkflow(completedWorkflowId) {
302
- const completedInstance = await this.repo.get(completedWorkflowId);
303
- if (!completedInstance?.parentId) return;
304
- const parentId = completedInstance.parentId;
305
- const parentInstance = await this.repo.get(parentId);
306
- if (!parentInstance || parentInstance.status !== WorkflowStatus.AWAITING_SUBWORKFLOW || parentInstance.subWorkflowId !== completedWorkflowId) {
307
- return;
308
- }
309
- console.log(`[RUNTIME] Reanudando workflow padre ${parentId}.`);
310
- const durableFn = this.durableFns.get(parentInstance.name);
311
- if (!durableFn) {
312
- await this.repo.fail(
313
- parentId,
314
- new Error(`Definici\xF3n del workflow '${parentInstance.name}' no encontrada.`)
315
- );
316
- return;
317
- }
318
- await this.repo.updateStatus(parentId, WorkflowStatus.RUNNING, { subWorkflowId: "" });
319
- await this.repo.incrementStep(parentId);
320
- this.scheduleExecution(parentId, durableFn, completedInstance.result);
321
- }
322
- async propagateFailureToParent(failedWorkflowId, error) {
323
- const failedInstance = await this.repo.get(failedWorkflowId);
324
- if (!failedInstance?.parentId) return;
325
- const parentId = failedInstance.parentId;
326
- const parentInstance = await this.repo.get(parentId);
327
- if (!parentInstance || parentInstance.status !== WorkflowStatus.AWAITING_SUBWORKFLOW || parentInstance.subWorkflowId !== failedWorkflowId) {
328
- return;
329
- }
330
- console.log(
331
- `[RUNTIME] Propagando fallo del sub-workflow ${failedWorkflowId} al padre ${parentId}.`
332
- );
333
- const durableFn = this.durableFns.get(parentInstance.name);
334
- if (!durableFn) {
335
- await this.repo.fail(
336
- parentId,
337
- new Error(
338
- `Definici\xF3n del workflow '${parentInstance.name}' no encontrada al propagar fallo.`
339
- )
340
- );
341
- return;
342
- }
343
- await this.repo.updateStatus(parentId, WorkflowStatus.RUNNING, { subWorkflowId: "" });
344
- const propagationError = new Error(
345
- `Sub-workflow '${failedInstance.name}' (${failedWorkflowId}) fall\xF3: ${error.message}`
346
- );
347
- propagationError.stack = error.stack;
348
- this.scheduleExecution(parentId, durableFn, void 0, propagationError);
349
- }
350
- async sendEvent(workflowId, eventName, payload) {
351
- let hasLock = false;
352
- for (let i = 0; i < 3; i++) {
353
- hasLock = await this.repo.acquireLock(workflowId);
354
- if (hasLock) {
355
- break;
356
- }
357
- await new Promise((resolve2) => setTimeout(resolve2, 50));
358
- }
359
- if (!hasLock) {
360
- console.warn(
361
- `[RUNTIME-LOCK] No se pudo adquirir el bloqueo para sendEvent en ${workflowId}. El evento podr\xEDa ser descartado o retrasado.`
362
- );
363
- return;
364
- }
365
- try {
366
- const instance = await this.repo.get(workflowId);
367
- if (!instance) {
368
- console.warn(
369
- `[RUNTIME] Se intent\xF3 enviar un evento a un workflow no existente: ${workflowId}`
370
- );
371
- return;
372
- }
373
- if (instance.status !== WorkflowStatus.AWAITING_EVENT || instance.awaitingEvent !== eventName) {
374
- console.warn(
375
- `[RUNTIME] El workflow ${workflowId} no est\xE1 esperando el evento '${eventName}'. Estado actual: ${instance.status}, esperando: ${instance.awaitingEvent}.`
376
- );
377
- return;
378
- }
379
- console.log(
380
- `[RUNTIME] Evento '${eventName}' recibido para el workflow ${workflowId}. Reanudando...`
381
- );
382
- const durableFn = this.durableFns.get(instance.name);
383
- if (!durableFn) {
384
- console.error(
385
- `[RUNTIME] La definici\xF3n de la funci\xF3n durable '${instance.name}' no se encontr\xF3 para el workflow ${workflowId}.`
386
- );
387
- await this.repo.fail(
388
- workflowId,
389
- new Error(`Funci\xF3n durable '${instance.name}' no encontrada.`)
390
- );
391
- return;
392
- }
393
- await this.repo.updateStatus(workflowId, WorkflowStatus.RUNNING, {
394
- awaitingEvent: ""
395
- });
396
- await redis.srem(`events:awaiting:${eventName}`, workflowId);
397
- await this.repo.incrementStep(workflowId);
398
- this.scheduleExecution(workflowId, durableFn, payload);
399
- } catch (error) {
400
- console.error(
401
- `[RUNTIME] Error procesando el evento '${eventName}' para el workflow ${workflowId}:`,
402
- error
403
- );
404
- await this.repo.fail(
405
- workflowId,
406
- new Error(
407
- `Fallo al procesar el evento: ${error instanceof Error ? error.message : String(error)}`
408
- )
409
- );
410
- } finally {
411
- await this.repo.releaseLock(workflowId);
412
- }
413
- }
414
- startScheduler() {
415
- if (this.schedulerInterval) return;
416
- console.log("[SCHEDULER] Scheduler iniciado.");
417
- const checkSleepers = async () => {
418
- const workflowIds = await this.repo.getWorkflowsToWake();
419
- for (const workflowId of workflowIds) {
420
- const instance = await this.repo.get(workflowId);
421
- if (instance) {
422
- const durableFn = this.durableFns.get(instance.name);
423
- if (durableFn) {
424
- console.log(`[SCHEDULER] Reanudando workflow ${workflowId}`);
425
- await this.repo.updateStatus(
426
- workflowId,
427
- WorkflowStatus.RUNNING
428
- );
429
- await this.repo.incrementStep(workflowId);
430
- this.scheduleExecution(workflowId, durableFn, null);
431
- }
432
- }
433
- }
434
- };
435
- this.schedulerInterval = setInterval(checkSleepers, 2e3);
436
- }
437
- startWorker() {
438
- if (this.isRunning) return;
439
- this.isRunning = true;
440
- const processingQueueKey = `${TASK_QUEUE_KEY}:processing:${this.workerId}`;
441
- console.log(`[WORKER] Worker ${this.workerId} iniciado, esperando tareas...`);
442
- const listenForTasks = async () => {
443
- while (this.isRunning) {
444
- try {
445
- const taskString = await blockingRedis.brpoplpush(
446
- TASK_QUEUE_KEY,
447
- processingQueueKey,
448
- 0
449
- );
450
- if (!taskString) continue;
451
- const task = JSON.parse(taskString);
452
- console.log(`[WORKER] Tarea recibida: ${task.exportName}`);
453
- try {
454
- let module;
455
- if (task.modulePath.startsWith("virtual:")) {
456
- module = await import(task.modulePath);
457
- } else {
458
- const moduleFullPath = resolve(
459
- this.sourceRoot,
460
- task.modulePath
461
- );
462
- module = await import(moduleFullPath);
463
- }
464
- const serviceFn = module[task.exportName];
465
- if (typeof serviceFn !== "function")
466
- throw new Error(
467
- `'${task.exportName}' no es una funci\xF3n.`
468
- );
469
- const serviceResult = await serviceFn(...task.args);
470
- const durableFn = this.durableFns.get(task.durableFunctionName);
471
- if (durableFn) {
472
- await this.repo.incrementStep(task.workflowId);
473
- this.scheduleExecution(
474
- task.workflowId,
475
- durableFn,
476
- serviceResult
477
- );
478
- }
479
- await redis.lrem(processingQueueKey, 1, taskString);
480
- } catch (taskError) {
481
- const err = taskError instanceof Error ? taskError : new Error(String(taskError));
482
- console.error(
483
- `[WORKER] Falla en la tarea '${task.exportName}' para workflow ${task.workflowId}`,
484
- err
485
- );
486
- const durableFn = this.durableFns.get(task.durableFunctionName);
487
- if (durableFn) {
488
- await this.handleFailure(task.workflowId, err, durableFn);
489
- } else {
490
- await this.repo.fail(
491
- task.workflowId,
492
- new Error(
493
- `Definici\xF3n de workflow ${task.durableFunctionName} no encontrada durante el manejo de fallos.`
494
- )
495
- );
496
- }
497
- console.log(
498
- `[WORKER] Eliminando tarea procesada (con error manejado): ${task.exportName}`
499
- );
500
- await redis.lrem(processingQueueKey, 1, taskString);
501
- }
502
- } catch (error) {
503
- if (!this.isRunning) break;
504
- console.error("[WORKER] Error de infraestructura:", error);
505
- await new Promise((resolve2) => setTimeout(resolve2, 5e3));
506
- }
507
- }
508
- };
509
- listenForTasks();
510
- }
511
- run(durableFns) {
512
- this.durableFns = durableFns;
513
- this.startWorker();
514
- this.startScheduler();
515
- }
516
- stop() {
517
- this.isRunning = false;
518
- if (this.schedulerInterval) {
519
- clearInterval(this.schedulerInterval);
520
- }
521
- console.log("[RUNTIME] Solicitando detenci\xF3n...");
522
- }
523
- };
44
+ if redis.call("get", lockKey) == token then
45
+ redis.call("hset", workflowKey, "status", status, "result", result)
46
+ return 1
47
+ else
48
+ return 0
49
+ end
50
+ `,$=`
51
+ local lockKey = KEYS[1]
52
+ local workflowKey = KEYS[2]
53
+ local token = ARGV[1]
54
+ local errorMsg = ARGV[2]
55
+ local status = ARGV[3]
524
56
 
525
- // src/define.ts
526
- var bDurable = (def) => {
527
- return def;
528
- };
57
+ if redis.call("get", lockKey) == token then
58
+ redis.call("hset", workflowKey, "status", status, "error", errorMsg)
59
+ return 1
60
+ else
61
+ return 0
62
+ end
63
+ `,G=`
64
+ local limit = tonumber(ARGV[1])
65
+ local now = tonumber(ARGV[2])
66
+ local key = KEYS[1]
67
+
68
+ local ids = redis.call('zrangebyscore', key, 0, now, 'LIMIT', 0, limit)
69
+ if #ids > 0 then
70
+ redis.call('zrem', key, unpack(ids))
71
+ end
72
+ return ids
73
+ `,U=`
74
+ local sourceZSet = KEYS[1]
75
+ local destList = KEYS[2]
76
+ local now = tonumber(ARGV[1])
77
+ local limit = tonumber(ARGV[2])
529
78
 
530
- // src/index.ts
531
- function bDurableInitialize(options) {
532
- console.log("--- Inicializando Sistema Durable ---");
533
- configurePersistence({
534
- commandClient: options.redisClient,
535
- blockingClient: options.blockingRedisClient
536
- });
537
- const runtime = new DurableRuntime({ sourceRoot: options.sourceRoot });
538
- runtime.run(options.durableFunctions);
539
- return {
540
- start: runtime.start.bind(runtime),
541
- sendEvent: (durableFn, workflowId, eventName, payload) => {
542
- return runtime.sendEvent(workflowId, eventName, payload);
543
- },
544
- stop: runtime.stop.bind(runtime),
545
- runtime
546
- };
547
- }
548
- export {
549
- bDurable,
550
- bDurableInitialize
551
- };
79
+ -- Obtener tareas listas (score <= now)
80
+ local tasks = redis.call('zrangebyscore', sourceZSet, 0, now, 'LIMIT', 0, limit)
81
+
82
+ if #tasks > 0 then
83
+ redis.call('zrem', sourceZSet, unpack(tasks))
84
+ -- Pushear cada tarea a la lista de ejecuci\xF3n
85
+ for i, task in ipairs(tasks) do
86
+ redis.call('lpush', destList, task)
87
+ end
88
+ end
89
+
90
+ return #tasks
91
+ `;import M from"superjson";function h(i){return M.stringify(i)}function m(i){try{return M.parse(i)}catch(t){try{return JSON.parse(i)}catch(e){throw new Error(`Failed to deserialize data: ${t} ${e}`)}}}var Y={info:(i,t)=>console.log(`[INFO] ${i}`,t||""),error:(i,t)=>console.error(`[ERROR] ${i}`,t||""),warn:(i,t)=>console.warn(`[WARN] ${i}`,t||""),debug:(i,t)=>console.debug(`[DEBUG] ${i}`,t||"")},K=class{constructor(t){this.retention=t}getKey(t){return`workflow:${t}`}getLockKey(t){return`workflow:${t}:lock`}async acquireLock(t,e=10){let s=this.getLockKey(t),n=W();return await l.set(s,n,"EX",e,"NX")==="OK"?n:null}async releaseLock(t,e){await l.eval(P,1,this.getLockKey(t),e)}async renewLock(t,e,s){return await l.eval(D,1,this.getLockKey(t),e,s)===1}async get(t){let e=await l.hgetall(this.getKey(t));return!e||Object.keys(e).length===0?null:{workflowId:e.workflowId,name:e.name,version:e.version,status:e.status,step:parseInt(e.step,10),input:m(e.input),state:m(e.state),result:e.result?m(e.result):void 0,error:e.error,parentId:e.parentId,subWorkflowId:e.subWorkflowId,awaitingSignal:e.awaitingSignal||e.awaitingEvent,createdAt:e.createdAt?parseInt(e.createdAt,10):0,updatedAt:e.updatedAt?parseInt(e.updatedAt,10):0}}async create(t){let e=Date.now(),s={...t,step:0,state:{},createdAt:e,updatedAt:e},n={...s,input:h(s.input),state:h(s.state)};n.version===void 0&&delete n.version;let r=l.pipeline();r.hset(this.getKey(s.workflowId),n),await r.exec()}async updateState(t,e,s,n){if(await l.eval(A,2,this.getLockKey(t),this.getKey(t),n,h(e),Date.now(),s)===0)throw new Error(`Lock lost for workflow ${t}`)}async updateStatus(t,e,s={}){await l.hset(this.getKey(t),{status:e,...s,updatedAt:Date.now()})}async incrementStep(t,e){let s=await l.eval(_,2,this.getLockKey(t),this.getKey(t),e);if(s===-1)throw new Error(`Lock lost for workflow ${t}`);return s}async applyRetention(t){if(this.retention){let e=v(this.retention)/1e3;e>0&&await l.expire(this.getKey(t),e)}}async complete(t,e,s){if(await l.eval(F,2,this.getLockKey(t),this.getKey(t),s,h(e??null),c.COMPLETED)===0)throw new Error(`Lock lost for workflow ${t}`);await this.applyRetention(t)}async fail(t,e,s,n=c.FAILED){s?await l.eval($,2,this.getLockKey(t),this.getKey(t),s,e.message,n)===0&&console.warn(`Could not fail workflow ${t} safely: Lock lost.`):await l.hset(this.getKey(t),{status:n,error:e.message}),await this.applyRetention(t)}async scheduleSleep(t,e){await this.updateStatus(t,c.SLEEPING),await l.zadd(I,e,t)}async getWorkflowsToWake(t=100){let e=Date.now();return await l.eval(G,1,I,t,e)}async enqueueTask(t){await l.lpush(E,h(t))}async resumeForCatch(t,e,s,n){if(await l.eval(A,2,this.getLockKey(t),this.getKey(t),n,h(e),Date.now(),s)===0)throw new Error(`Lock lost for workflow ${t}`);await l.hset(this.getKey(t),{status:c.RUNNING})}async moveToDLQ(t,e){let s={...t,failedAt:Date.now(),error:e.message,stack:e.stack};await l.lpush(x,h(s))}async scheduleTaskRetry(t,e){let s=Date.now()+e;await l.zadd(L,s,h(t))}async moveDueTasksToQueue(t=100){return await l.eval(U,2,L,E,Date.now(),t)}},b=class{durableFns=new Map;repo;workerId=W();isRunning=!1;schedulerInterval=null;heartbeatInterval=null;sourceRoot;pollingInterval;logger;maxTaskRetries=3;constructor(t){this.sourceRoot=t.sourceRoot,this.repo=new K(t.retention),this.pollingInterval=t.pollingInterval||5e3,this.logger=t.logger||Y}async getState(t){let e=await this.repo.get(t);return e?{workflowId:e.workflowId,name:e.name,version:e.version,status:e.status,step:e.step,input:e.input,output:e.result,state:e.state,error:e.error,createdAt:e.createdAt,updatedAt:e.updatedAt}:null}async start(t,e,s){let n=e.workflowId||W();if(e.workflowId){let r=await this.repo.get(e.workflowId);if(r&&r.status!==c.COMPLETED&&r.status!==c.FAILED)throw new Error(`Workflow with ID '${e.workflowId}' already exists and is in a running state (${r.status}).`)}return this.logger.info(`[RUNTIME] Iniciando workflow '${t.name}' v${t.version} con ID: ${n}`),await this.repo.create({workflowId:n,name:t.name,version:t.version,status:c.RUNNING,input:e.input,parentId:s}),setImmediate(()=>{this._executeStep(n,t).catch(r=>{this.logger.error("Error fatal en ejecuci\xF3n inicial",{error:r,workflowId:n})})}),{workflowId:n,unsubscribe:async()=>{}}}async scheduleExecution(t,e,s,n){setImmediate(()=>{this._executeStep(t,e,s,n).catch(r=>{this.logger.error("Error no manejado en scheduleExecution",{error:r,workflowId:t})})})}async _executeStep(t,e,s,n){let r=await this.repo.acquireLock(t);if(!r)return;let o=setInterval(()=>{this.repo.renewLock(t,r,10).catch(a=>this.logger.warn(`Error renovando lock para ${t}`,{error:a}))},5e3);try{if(n)throw n;let a=await this.repo.get(t);if(!a)return;if(a.status===c.CANCELLING)throw new S(a.error||"Workflow cancelled");if(a.status!==c.RUNNING)return;let p=a.version==="undefined"?void 0:a.version,d=e.version==="undefined"?void 0:e.version;if(String(p??"")!==String(d??"")){let w=new Error(`Version mismatch: DB=${p}, Code=${d}`);await this.repo.fail(t,w,r,c.VERSION_MISMATCH);return}let u={workflowId:t,step:a.step,input:a.input,state:a.state,result:s,log:(w,T)=>this.logger.info(w,{...T,workflowId:t,step:a.step})},g=await e.execute(u);await this.repo.updateState(t,u.state,u.step,r),await this.handleInstruction(g,u,a.name,r)&&(await this.repo.incrementStep(t,r),this.scheduleExecution(t,e,void 0))}catch(a){let p=a instanceof Error?a:new Error(String(a));this.logger.error("Error en workflow",{workflowId:t,error:p.message}),await this.handleFailure(t,p,e,r)}finally{clearInterval(o),await this.repo.releaseLock(t,r)}}async handleInstruction(t,e,s,n){let{workflowId:r}=e;switch(t.type){case"SCHEDULE_TASK":return await this.repo.enqueueTask({workflowId:r,durableFunctionName:s,...t}),!1;case"SCHEDULE_SLEEP":{let o=v(t.duration);if(typeof o!="number")throw new Error(`Invalid time value provided to bSleep: "${t.duration}"`);let a=Date.now()+o;return await this.repo.scheduleSleep(r,a),!1}case"WAIT_FOR_SIGNAL":return await this.repo.updateStatus(r,c.AWAITING_SIGNAL,{awaitingSignal:t.signalName}),await l.sadd(`signals:awaiting:${t.signalName}`,r),!1;case"EXECUTE_SUBWORKFLOW":{let o=this.durableFns.get(t.workflowName);if(!o)throw new Error(`Sub-workflow '${t.workflowName}' no encontrado.`);let{workflowId:a}=await this.start(o,{input:t.input},r);return await this.repo.updateStatus(r,c.AWAITING_SUBWORKFLOW,{subWorkflowId:a}),!1}case"EMIT_EVENT":{let o=`event:${r}`,a=h({eventName:t.eventName,payload:t.payload});return await l.publish(o,a),!0}case"COMPLETE":{let o=`event:${r}`,a=h({eventName:"workflow:completed",payload:t.result});return await l.publish(o,a),await this.repo.complete(r,t.result,n),await this.resumeParentWorkflow(r),!1}}}async handleFailure(t,e,s,n){let r=n;if(!r&&(r=await this.repo.acquireLock(t,20),!r)){this.logger.warn(`No se pudo adquirir lock para fallo en ${t}`);return}try{if(e instanceof S){await this.repo.fail(t,e,r,c.CANCELLED);let u=await this.repo.get(t);u?.subWorkflowId&&await this.cancel(u.subWorkflowId,`Parent workflow ${t} was cancelled`);return}let o=await this.repo.get(t);if(!o||o.status===c.FAILED||o.status===c.COMPLETED)return;let a=o.state.tryCatchStack;if(a&&a.length>0){let g=a.pop()?.catchStep;if(g!==void 0){this.logger.info(`Capturando error en step ${g}`,{workflowId:t}),await this.repo.resumeForCatch(t,o.state,g,r),this.scheduleExecution(t,s,{name:e.name,message:e.message,stack:e.stack});return}}let p=`event:${t}`,d=h({eventName:"workflow:failed",payload:{message:e.message}});await l.publish(p,d),await this.repo.fail(t,e,r),await this.propagateFailureToParent(t,e)}finally{!n&&r&&await this.repo.releaseLock(t,r)}}async resumeParentWorkflow(t){let e=await this.repo.get(t);if(!e?.parentId)return;let s=e.parentId,n=await this.repo.get(s);if(!n||n.status!==c.AWAITING_SUBWORKFLOW||n.subWorkflowId!==t)return;let r=this.durableFns.get(n.name);if(!r){await this.repo.fail(s,new Error(`Definici\xF3n del workflow '${n.name}' no encontrada.`),null);return}await this.repo.updateStatus(s,c.RUNNING,{subWorkflowId:""});let o=await this.repo.acquireLock(s);if(o)try{await this.repo.incrementStep(s,o),this.scheduleExecution(s,r,e.result)}finally{await this.repo.releaseLock(s,o)}else throw this.logger.warn(`Could not lock parent ${s} to resume. Retrying later...`),new Error(`Temporary Lock Failure: Could not acquire parent lock for ${s}`)}async propagateFailureToParent(t,e){let s=await this.repo.get(t);if(!s?.parentId)return;let n=s.parentId,r=await this.repo.get(n);if(!r||r.status!==c.AWAITING_SUBWORKFLOW||r.subWorkflowId!==t)return;let o=this.durableFns.get(r.name);if(!o){await this.repo.fail(n,new Error(`Definici\xF3n del workflow '${r.name}' no encontrada al propagar fallo.`),null);return}await this.repo.updateStatus(n,c.RUNNING,{subWorkflowId:""});let a=new Error(`Sub-workflow '${s.name}' (${t}) fall\xF3: ${e.message}`);a.stack=e.stack,this.scheduleExecution(n,o,void 0,a)}async signal(t,e,s){let n=null;for(let r=0;r<3&&(n=await this.repo.acquireLock(t),!n);r++)await new Promise(o=>setTimeout(o,50));if(!n)return this.logger.warn("Lock timeout en signal",{workflowId:t});try{let r=await this.repo.get(t);if(!r)return this.logger.warn("Se\xF1al para workflow inexistente",{workflowId:t});if(r.status!==c.AWAITING_SIGNAL||r.awaitingSignal!==e)return this.logger.warn("Workflow no esperaba esta se\xF1al",{workflowId:t,expected:r.awaitingSignal,received:e});let o=this.durableFns.get(r.name);if(!o){await this.repo.fail(t,new Error(`Funci\xF3n durable '${r.name}' no encontrada.`),n);return}await this.repo.updateStatus(t,c.RUNNING,{awaitingSignal:""}),await l.srem(`signals:awaiting:${e}`,t),await this.repo.incrementStep(t,n),this.scheduleExecution(t,o,s)}catch(r){let o=r instanceof Error?r:new Error(String(r)),a=(await this.repo.get(t))?.name||"",p=this.durableFns.get(a);await this.handleFailure(t,o,p,n)}finally{n&&await this.repo.releaseLock(t,n)}}async cancel(t,e){let s=await this.repo.acquireLock(t);if(!s)return await new Promise(n=>setTimeout(n,100)),this.cancel(t,e);try{let n=await this.repo.get(t);if(!n||[c.COMPLETED,c.FAILED,c.CANCELLED].includes(n.status))return;if(await this.repo.updateStatus(t,c.CANCELLING,{error:e}),n.status===c.SLEEPING){await l.zrem(I,t);let r=this.durableFns.get(n.name);this.scheduleExecution(t,r)}if(n.status===c.AWAITING_SIGNAL){let r=this.durableFns.get(n.name);this.scheduleExecution(t,r)}}finally{await this.repo.releaseLock(t,s)}}startScheduler(){if(this.schedulerInterval)return;this.logger.info(`Scheduler iniciado (${this.pollingInterval}ms)`);let t=async()=>{await this.checkSleepers(),await this.checkDelayedTasks(),await this.reapDeadWorkers()};this.schedulerInterval=setInterval(t,this.pollingInterval)}async checkDelayedTasks(){try{let t=await this.repo.moveDueTasksToQueue(50);t>0&&this.logger.debug(`Scheduler movi\xF3 ${t} tareas diferidas a la cola activa`)}catch(t){this.logger.error("Error chequeando tareas diferidas",{error:t})}}async checkSleepers(){let e=await this.repo.getWorkflowsToWake(50);e.length!==0&&await Promise.all(e.map(async s=>{let n=await this.repo.acquireLock(s);if(n)try{let r=await this.repo.get(s);if(r){let o=this.durableFns.get(r.name);o&&(this.logger.info("Despertando workflow",{workflowId:s}),await this.repo.updateStatus(s,c.RUNNING),await this.repo.incrementStep(s,n),this.scheduleExecution(s,o,void 0))}}finally{await this.repo.releaseLock(s,n)}}))}async reapDeadWorkers(){let t="0";do{let[e,s]=await l.sscan(k,t,"COUNT",100);t=e;for(let n of s){if(await l.exists(`${R}${n}`))continue;this.logger.warn(`Worker muerto ${n}. Recuperando tareas.`);let r=`${E}:processing:${n}`,o=await l.rpoplpush(r,E);for(;o;)o=await l.rpoplpush(r,E);await l.del(r),await l.srem(k,n)}}while(t!=="0")}startHeartbeat(){let t=`${R}${this.workerId}`,e=Math.max(Math.ceil(this.pollingInterval*3/1e3),5),s=()=>{this.isRunning&&l.set(t,Date.now().toString(),"EX",e).catch(()=>{})};this.heartbeatInterval=setInterval(s,this.pollingInterval),s()}startWorker(){if(this.isRunning)return;this.isRunning=!0;let t=`${E}:processing:${this.workerId}`;this.logger.info(`Worker ${this.workerId} iniciado`),this.startHeartbeat(),(async()=>{for(await l.sadd(k,this.workerId);this.isRunning;)try{let s=await y.brpoplpush(E,t,2);if(!s)continue;let n=m(s);this.logger.debug(`Ejecutando tarea: ${n.exportName}`,{workflowId:n.workflowId});try{let r;n.modulePath.startsWith("virtual:")?r=await import(n.modulePath):r=await import(V(this.sourceRoot,n.modulePath));let o=r[n.exportName];if(typeof o!="function")throw new Error(`'${n.exportName}' no es una funci\xF3n.`);let a=await o(...n.args),p=this.durableFns.get(n.durableFunctionName);if(p){let d=await this.repo.acquireLock(n.workflowId);if(d)try{await this.repo.incrementStep(n.workflowId,d),this.scheduleExecution(n.workflowId,p,a)}finally{await this.repo.releaseLock(n.workflowId,d)}else this.logger.warn(`No se pudo adquirir lock para avanzar workflow ${n.workflowId} tras tarea`,{task:n.exportName})}await l.lrem(t,1,s)}catch(r){let o=r instanceof Error?r:new Error(String(r));this.logger.error(`Fallo en tarea ${n.exportName}`,{workflowId:n.workflowId,error:o.message});let a=this.durableFns.get(n.durableFunctionName),p=a?.retryOptions||{},d=p.maxAttempts??3,u=(n.attempts||0)+1;if(n.attempts=u,u<=d){let g=p.initialInterval?v(p.initialInterval):1e3,f=p.backoffCoefficient??2,w=p.maxInterval?v(p.maxInterval):36e5,T=g*Math.pow(f,u-1);T>w&&(T=w),this.logger.warn(`Reintentando tarea en ${v(T)} (intento ${u}/${d===1/0?"Inf":d})`,{workflowId:n.workflowId}),T>0?await this.repo.scheduleTaskRetry(n,T):await l.lpush(E,h(n)),await l.lrem(t,1,s)}else this.logger.error("Reintentos agotados. Moviendo a DLQ.",{workflowId:n.workflowId}),await this.repo.moveToDLQ(n,o),a?await this.handleFailure(n.workflowId,o,a,null):await this.repo.fail(n.workflowId,new Error(`Def missing for ${n.durableFunctionName}`),null),await l.lrem(t,1,s)}}catch(s){if(!this.isRunning)break;this.logger.error("Error infraestructura worker",{error:s}),await new Promise(n=>setTimeout(n,5e3))}})()}run(t){this.durableFns=t,this.startWorker(),this.startScheduler()}async stop(){this.isRunning=!1,this.schedulerInterval&&clearInterval(this.schedulerInterval),this.heartbeatInterval&&clearInterval(this.heartbeatInterval),await l.srem(k,this.workerId),this.logger.info("Runtime detenido")}};var H=i=>({...i,__isDurable:!0});var B={info:(i,t)=>console.log(`[INFO] ${i}`,t||""),error:(i,t)=>console.error(`[ERROR] ${i}`,t||""),warn:(i,t)=>console.warn(`[WARN] ${i}`,t||""),debug:(i,t)=>console.debug(`[DEBUG] ${i}`,t||"")},Q=i=>{if(!i.startsWith("on")||i.length<=2)return null;let t=i.slice(2);return t.charAt(0).toLowerCase()+t.slice(1)};function Rt(i){let t=i.logger||B;t.info("--- Inicializando Sistema Durable ---"),N({commandClient:i.redisClient,blockingClient:i.blockingRedisClient});let e=new b({sourceRoot:i.sourceRoot,retention:i.retention,pollingInterval:i.pollingInterval,logger:t});e.run(i.durableFunctions);let s=new q(i.redisClient.options),n=new Map;s.psubscribe("event:*",o=>{o&&t.error("Error fatal al suscribirse a los canales de eventos:",{error:o})}),s.on("pmessage",(o,a,p)=>{let d=n.get(a);if(d&&d.length>0)try{let u=m(p),g={name:u.eventName||u.signalName,payload:u.payload};[...d].forEach(f=>f(g))}catch(u){t.error(`Error al parsear evento en ${a}`,{error:u})}});let r=(o,a)=>{let p=`event:${a}`,d=u=>(n.has(p)||n.set(p,[]),n.get(p)?.push(u),()=>{let g=n.get(p);if(g){let f=g.indexOf(u);f>-1&&g.splice(f,1),g.length===0&&n.delete(p)}});return{workflowId:a,signal:async(u,g)=>{await e.signal(a,u,g)},on:async(u,g)=>{let f=C(a),w=await i.redisClient.hgetall(f);return u==="workflow:completed"&&w.status===c.COMPLETED?(g(m(w.result||"null")),{unsubscribe:()=>{}}):u==="workflow:failed"&&w.status===c.FAILED?(g({message:w.error||"Unknown"}),{unsubscribe:()=>{}}):{unsubscribe:d(O=>{O.name===u&&g(O.payload)})}},subscribe:async u=>({unsubscribe:d(f=>{u(f)})})}};return{start:async(o,a)=>{let p=a.workflowId||z(),d=[],u=`event:${p}`,g={};if(Object.keys(a).forEach(f=>{let w=Q(f);w&&typeof a[f]=="function"&&(g[w]=a[f])}),Object.keys(g).length>0){n.has(u)||n.set(u,[]);let f=w=>{let T=g[w.name];T&&T(w.payload)};n.get(u)?.push(f),d.push(()=>{let w=n.get(u);if(w){let T=w.indexOf(f);T>-1&&w.splice(T,1)}})}return await e.start(o,{workflowId:p,input:a.input}),{workflowId:p,unsubscribe:async()=>{d.forEach(f=>f())}}},stop:()=>{e.stop(),s.quit().catch(()=>{})},runtime:e,cancel:(o,a)=>e.cancel(o,a),getState:o=>e.getState(o),getHandle:(o,a)=>r(o,a)}}export{S as WorkflowCancellationError,H as bDurable,Rt as bDurableInitialize};
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@bobtail.software/b-durable",
3
- "version": "1.0.5",
3
+ "version": "1.0.7",
4
4
  "main": "dist/index.mjs",
5
5
  "types": "dist/index.d.mts",
6
6
  "description": "A system for creating durable, resilient, and type-safe workflows in JavaScript/TypeScript.",
@@ -25,6 +25,7 @@
25
25
  "ms": "^2.1.3",
26
26
  "pino": "^10.1.0",
27
27
  "prettier": "^3.6.2",
28
+ "superjson": "^2.2.5",
28
29
  "ts-morph": "^27.0.2"
29
30
  },
30
31
  "devDependencies": {