@blokjs/trigger-worker 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,501 @@
1
+ /**
2
+ * WorkerTrigger - Background job processing for Blok workflows
3
+ *
4
+ * Extends TriggerBase to support long-running background jobs:
5
+ * - Concurrency controls (max N concurrent jobs)
6
+ * - Retry logic with exponential backoff
7
+ * - Job timeouts
8
+ * - Job priority and delay scheduling
9
+ * - Dead letter queue support
10
+ *
11
+ * Pattern:
12
+ * 1. loadNodes() - Load available nodes into NodeMap
13
+ * 2. loadWorkflows() - Load workflows with worker triggers
14
+ * 3. listen() - Connect to job backend and start processing
15
+ * 4. For each job:
16
+ * - Create context with this.createContext()
17
+ * - Populate ctx.request with job data
18
+ * - Execute workflow via this.run(ctx)
19
+ * - Ack on success, retry or DLQ on failure
20
+ */
21
+
22
+ import type { HelperResponse, WorkerTriggerOpts } from "@blokjs/helper";
23
+ import {
24
+ DefaultLogger,
25
+ type GlobalOptions,
26
+ type BlokService,
27
+ NodeMap,
28
+ TriggerBase,
29
+ type TriggerResponse,
30
+ } from "@blokjs/runner";
31
+ import type { Context, RequestContext } from "@blokjs/shared";
32
+ import { type Span, SpanStatusCode, metrics, trace } from "@opentelemetry/api";
33
+ import { v4 as uuid } from "uuid";
34
+
35
+ /**
36
+ * Job received from worker queue
37
+ */
38
+ export interface WorkerJob {
39
+ /** Unique job ID */
40
+ id: string;
41
+ /** Job data payload */
42
+ data: unknown;
43
+ /** Job metadata headers */
44
+ headers: Record<string, string>;
45
+ /** Queue name this job belongs to */
46
+ queue: string;
47
+ /** Job priority (higher = more important) */
48
+ priority: number;
49
+ /** Number of attempts made so far */
50
+ attempts: number;
51
+ /** Maximum retry attempts */
52
+ maxRetries: number;
53
+ /** Timestamp when job was created */
54
+ createdAt: Date;
55
+ /** Delay before processing (ms) */
56
+ delay?: number;
57
+ /** Job timeout (ms) */
58
+ timeout?: number;
59
+ /** Original raw job from provider */
60
+ raw: unknown;
61
+ /** Mark job as completed */
62
+ complete: () => Promise<void>;
63
+ /** Mark job as failed (optionally requeue) */
64
+ fail: (error: Error, requeue?: boolean) => Promise<void>;
65
+ }
66
+
67
+ /**
68
+ * Worker adapter interface - implemented by each job backend
69
+ */
70
+ export interface WorkerAdapter {
71
+ /** Provider name (e.g., "bullmq", "in-memory") */
72
+ readonly provider: string;
73
+
74
+ /** Connect to the job backend */
75
+ connect(): Promise<void>;
76
+
77
+ /** Disconnect from the job backend */
78
+ disconnect(): Promise<void>;
79
+
80
+ /**
81
+ * Start processing jobs from a queue
82
+ * @param config Worker trigger configuration
83
+ * @param handler Callback for each job
84
+ */
85
+ process(config: WorkerTriggerOpts, handler: (job: WorkerJob) => Promise<void>): Promise<void>;
86
+
87
+ /**
88
+ * Add a job to a queue (for programmatic dispatching)
89
+ * @param queue Queue name
90
+ * @param data Job payload
91
+ * @param opts Job options
92
+ */
93
+ addJob(
94
+ queue: string,
95
+ data: unknown,
96
+ opts?: {
97
+ priority?: number;
98
+ delay?: number;
99
+ retries?: number;
100
+ timeout?: number;
101
+ jobId?: string;
102
+ },
103
+ ): Promise<string>;
104
+
105
+ /** Stop processing a specific queue */
106
+ stopProcessing(queue: string): Promise<void>;
107
+
108
+ /** Check if connected */
109
+ isConnected(): boolean;
110
+
111
+ /** Health check */
112
+ healthCheck(): Promise<boolean>;
113
+
114
+ /** Get queue stats */
115
+ getQueueStats(queue: string): Promise<WorkerQueueStats>;
116
+ }
117
+
118
+ /**
119
+ * Queue statistics
120
+ */
121
+ export interface WorkerQueueStats {
122
+ /** Number of jobs waiting to be processed */
123
+ waiting: number;
124
+ /** Number of jobs currently being processed */
125
+ active: number;
126
+ /** Number of completed jobs */
127
+ completed: number;
128
+ /** Number of failed jobs */
129
+ failed: number;
130
+ /** Number of delayed jobs */
131
+ delayed: number;
132
+ }
133
+
134
+ /**
135
+ * Workflow model with worker trigger configuration
136
+ */
137
+ interface WorkerWorkflowModel {
138
+ path: string;
139
+ config: {
140
+ name: string;
141
+ version: string;
142
+ trigger?: {
143
+ worker?: WorkerTriggerOpts;
144
+ [key: string]: unknown;
145
+ };
146
+ [key: string]: unknown;
147
+ };
148
+ }
149
+
150
+ /**
151
+ * WorkerTrigger - Abstract base class for worker-based triggers
152
+ *
153
+ * Provides background job processing with:
154
+ * - Configurable concurrency per queue
155
+ * - Automatic retries with exponential backoff
156
+ * - Job timeouts with automatic failure
157
+ * - Priority-based job ordering
158
+ * - Delayed job scheduling
159
+ * - Queue statistics and monitoring
160
+ */
161
+ export abstract class WorkerTrigger extends TriggerBase {
162
+ protected nodeMap: GlobalOptions = {} as GlobalOptions;
163
+ protected readonly tracer = trace.getTracer(
164
+ process.env.PROJECT_NAME || "trigger-worker-workflow",
165
+ process.env.PROJECT_VERSION || "0.0.1",
166
+ );
167
+ protected readonly logger = new DefaultLogger();
168
+ protected abstract adapter: WorkerAdapter;
169
+
170
+ /** Active queues being processed */
171
+ protected activeQueues: Set<string> = new Set();
172
+
173
+ // Subclasses provide these
174
+ protected abstract nodes: Record<string, BlokService<unknown>>;
175
+ protected abstract workflows: Record<string, HelperResponse>;
176
+
177
+ constructor() {
178
+ super();
179
+ this.loadNodes();
180
+ this.loadWorkflows();
181
+ }
182
+
183
+ /**
184
+ * Load nodes into the node map
185
+ */
186
+ loadNodes(): void {
187
+ this.nodeMap.nodes = new NodeMap();
188
+ const nodeKeys = Object.keys(this.nodes);
189
+ for (const key of nodeKeys) {
190
+ this.nodeMap.nodes.addNode(key, this.nodes[key]);
191
+ }
192
+ }
193
+
194
+ /**
195
+ * Load workflows into the workflow map
196
+ */
197
+ loadWorkflows(): void {
198
+ this.nodeMap.workflows = this.workflows;
199
+ }
200
+
201
+ /**
202
+ * Start the worker processor - main entry point
203
+ */
204
+ async listen(): Promise<number> {
205
+ const startTime = this.startCounter();
206
+
207
+ try {
208
+ // Connect to job backend
209
+ await this.adapter.connect();
210
+ this.logger.log(`Connected to ${this.adapter.provider} worker system`);
211
+
212
+ // Register health dependency
213
+ this.registerHealthDependency(`worker-${this.adapter.provider}`, async () => {
214
+ const healthy = await this.adapter.healthCheck();
215
+ return {
216
+ status: healthy ? ("healthy" as const) : ("unhealthy" as const),
217
+ lastChecked: Date.now(),
218
+ message: healthy ? "Connected" : "Connection lost",
219
+ };
220
+ });
221
+
222
+ // Find all workflows with worker triggers
223
+ const workerWorkflows = this.getWorkerWorkflows();
224
+
225
+ if (workerWorkflows.length === 0) {
226
+ this.logger.log("No workflows with worker triggers found");
227
+ return this.endCounter(startTime);
228
+ }
229
+
230
+ // Start processing each queue
231
+ for (const workflow of workerWorkflows) {
232
+ const config = workflow.config.trigger?.worker as WorkerTriggerOpts;
233
+ this.logger.log(
234
+ `Starting worker for queue: ${config.queue} (concurrency=${config.concurrency}, retries=${config.retries})`,
235
+ );
236
+
237
+ this.activeQueues.add(config.queue);
238
+
239
+ await this.adapter.process(config, async (job) => {
240
+ await this.handleJob(job, workflow, config);
241
+ });
242
+ }
243
+
244
+ this.logger.log(`Worker trigger started. Processing ${workerWorkflows.length} queue(s)`);
245
+
246
+ // Enable HMR in development mode
247
+ if (process.env.BLOK_HMR === "true" || process.env.NODE_ENV === "development") {
248
+ await this.enableHotReload();
249
+ }
250
+
251
+ return this.endCounter(startTime);
252
+ } catch (error) {
253
+ this.logger.error(`Failed to start worker trigger: ${(error as Error).message}`);
254
+ throw error;
255
+ }
256
+ }
257
+
258
+ /**
259
+ * Stop all workers and disconnect
260
+ */
261
+ async stop(): Promise<void> {
262
+ for (const queue of this.activeQueues) {
263
+ await this.adapter.stopProcessing(queue);
264
+ this.logger.log(`Stopped processing queue: ${queue}`);
265
+ }
266
+ this.activeQueues.clear();
267
+ await this.adapter.disconnect();
268
+ this.destroyMonitoring();
269
+ this.logger.log("Worker trigger stopped");
270
+ }
271
+
272
+ protected override async onHmrWorkflowChange(): Promise<void> {
273
+ this.logger.log("[HMR] Worker workflow changed, reloading...");
274
+ await this.waitForInFlightRequests();
275
+ await this.stop();
276
+ this.loadWorkflows();
277
+ await this.listen();
278
+ }
279
+
280
+ /**
281
+ * Dispatch a job to a worker queue
282
+ */
283
+ async dispatch(
284
+ queue: string,
285
+ data: unknown,
286
+ opts?: {
287
+ priority?: number;
288
+ delay?: number;
289
+ retries?: number;
290
+ timeout?: number;
291
+ jobId?: string;
292
+ },
293
+ ): Promise<string> {
294
+ return this.adapter.addJob(queue, data, opts);
295
+ }
296
+
297
+ /**
298
+ * Get statistics for a queue
299
+ */
300
+ async getQueueStats(queue: string): Promise<WorkerQueueStats> {
301
+ return this.adapter.getQueueStats(queue);
302
+ }
303
+
304
+ /**
305
+ * Get list of active queues
306
+ */
307
+ getActiveQueues(): string[] {
308
+ return Array.from(this.activeQueues);
309
+ }
310
+
311
+ /**
312
+ * Get all workflows that have worker triggers
313
+ */
314
+ protected getWorkerWorkflows(): WorkerWorkflowModel[] {
315
+ const workflows: WorkerWorkflowModel[] = [];
316
+
317
+ for (const [path, workflow] of Object.entries(this.nodeMap.workflows || {})) {
318
+ const workflowConfig = (workflow as unknown as { _config: WorkerWorkflowModel["config"] })._config;
319
+
320
+ if (workflowConfig?.trigger) {
321
+ const triggerType = Object.keys(workflowConfig.trigger)[0];
322
+
323
+ if (triggerType === "worker" && workflowConfig.trigger.worker) {
324
+ workflows.push({
325
+ path,
326
+ config: workflowConfig,
327
+ });
328
+ }
329
+ }
330
+ }
331
+
332
+ return workflows;
333
+ }
334
+
335
+ /**
336
+ * Handle an incoming job
337
+ */
338
+ protected async handleJob(job: WorkerJob, workflow: WorkerWorkflowModel, config: WorkerTriggerOpts): Promise<void> {
339
+ const jobId = job.id || uuid();
340
+ const defaultMeter = metrics.getMeter("default");
341
+ const workerJobs = defaultMeter.createCounter("worker_jobs_processed", {
342
+ description: "Worker jobs processed",
343
+ });
344
+ const workerErrors = defaultMeter.createCounter("worker_jobs_failed", {
345
+ description: "Worker job failures",
346
+ });
347
+ const workerRetries = defaultMeter.createCounter("worker_jobs_retried", {
348
+ description: "Worker job retries",
349
+ });
350
+
351
+ await this.tracer.startActiveSpan(`worker:${config.queue}`, async (span: Span) => {
352
+ try {
353
+ const start = performance.now();
354
+
355
+ // Initialize configuration for this workflow
356
+ await this.configuration.init(workflow.path, this.nodeMap);
357
+
358
+ // Create context
359
+ const ctx: Context = this.createContext(undefined, workflow.path, jobId);
360
+
361
+ // Populate request with job data
362
+ ctx.request = {
363
+ body: job.data,
364
+ headers: job.headers,
365
+ query: {},
366
+ params: {
367
+ queue: job.queue,
368
+ jobId: job.id,
369
+ attempt: String(job.attempts),
370
+ priority: String(job.priority),
371
+ },
372
+ } as unknown as RequestContext;
373
+
374
+ // Store worker metadata in context
375
+ if (!ctx.vars) ctx.vars = {};
376
+ ctx.vars["_worker_job"] = {
377
+ id: job.id,
378
+ queue: job.queue,
379
+ attempts: String(job.attempts),
380
+ maxRetries: String(job.maxRetries),
381
+ priority: String(job.priority),
382
+ createdAt: job.createdAt.toISOString(),
383
+ delay: String(job.delay ?? 0),
384
+ timeout: String(job.timeout ?? 0),
385
+ };
386
+
387
+ ctx.logger.log(
388
+ `Processing job ${jobId} from ${config.queue} (attempt ${job.attempts + 1}/${job.maxRetries + 1})`,
389
+ );
390
+
391
+ // Execute workflow with timeout if configured
392
+ let response: TriggerResponse;
393
+ if (config.timeout && config.timeout > 0) {
394
+ response = await this.executeWithTimeout(ctx, config.timeout);
395
+ } else {
396
+ response = await this.run(ctx);
397
+ }
398
+
399
+ const end = performance.now();
400
+
401
+ // Set span attributes
402
+ span.setAttribute("success", true);
403
+ span.setAttribute("job_id", jobId);
404
+ span.setAttribute("queue", config.queue);
405
+ span.setAttribute("attempts", job.attempts);
406
+ span.setAttribute("elapsed_ms", end - start);
407
+ span.setStatus({ code: SpanStatusCode.OK });
408
+
409
+ // Record metrics
410
+ workerJobs.add(1, {
411
+ env: process.env.NODE_ENV,
412
+ queue: config.queue,
413
+ workflow_name: this.configuration.name,
414
+ success: "true",
415
+ });
416
+
417
+ ctx.logger.log(`Job completed in ${(end - start).toFixed(2)}ms: ${jobId}`);
418
+
419
+ // Mark job as completed
420
+ await job.complete();
421
+ } catch (error) {
422
+ const errorMessage = (error as Error).message;
423
+ const shouldRetry = job.attempts < job.maxRetries;
424
+
425
+ // Set span error
426
+ span.setAttribute("success", false);
427
+ span.setAttribute("will_retry", shouldRetry);
428
+ span.recordException(error as Error);
429
+ span.setStatus({ code: SpanStatusCode.ERROR, message: errorMessage });
430
+
431
+ if (shouldRetry) {
432
+ // Retry with exponential backoff
433
+ const backoffMs = this.calculateBackoff(job.attempts, config.delay);
434
+ workerRetries.add(1, {
435
+ env: process.env.NODE_ENV,
436
+ queue: config.queue,
437
+ workflow_name: this.configuration?.name || "unknown",
438
+ attempt: String(job.attempts + 1),
439
+ });
440
+
441
+ this.logger.error(
442
+ `Job ${jobId} failed (attempt ${job.attempts + 1}/${job.maxRetries + 1}), retrying in ${backoffMs}ms: ${errorMessage}`,
443
+ );
444
+
445
+ await job.fail(error as Error, true);
446
+ } else {
447
+ // Max retries exhausted - send to DLQ
448
+ workerErrors.add(1, {
449
+ env: process.env.NODE_ENV,
450
+ queue: config.queue,
451
+ workflow_name: this.configuration?.name || "unknown",
452
+ });
453
+
454
+ this.logger.error(
455
+ `Job ${jobId} permanently failed after ${job.attempts + 1} attempts: ${errorMessage}`,
456
+ (error as Error).stack,
457
+ );
458
+
459
+ await job.fail(error as Error, false);
460
+ }
461
+ } finally {
462
+ span.end();
463
+ }
464
+ });
465
+ }
466
+
467
+ /**
468
+ * Execute workflow with a timeout
469
+ */
470
+ protected async executeWithTimeout(ctx: Context, timeoutMs: number): Promise<TriggerResponse> {
471
+ return new Promise<TriggerResponse>((resolve, reject) => {
472
+ const timer = setTimeout(() => {
473
+ reject(new Error(`Job timed out after ${timeoutMs}ms`));
474
+ }, timeoutMs);
475
+
476
+ this.run(ctx)
477
+ .then((result) => {
478
+ clearTimeout(timer);
479
+ resolve(result);
480
+ })
481
+ .catch((error) => {
482
+ clearTimeout(timer);
483
+ reject(error);
484
+ });
485
+ });
486
+ }
487
+
488
+ /**
489
+ * Calculate exponential backoff delay
490
+ * Formula: min(baseDelay * 2^attempt, 30000) + jitter
491
+ */
492
+ protected calculateBackoff(attempt: number, baseDelay?: number): number {
493
+ const base = baseDelay ?? 1000;
494
+ const maxDelay = 30000; // 30 seconds max
495
+ const exponential = Math.min(base * Math.pow(2, attempt), maxDelay);
496
+ const jitter = Math.random() * exponential * 0.1; // 10% jitter
497
+ return Math.floor(exponential + jitter);
498
+ }
499
+ }
500
+
501
+ export default WorkerTrigger;