@open-mercato/queue 0.4.2-canary-c02407ff85

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/build.mjs ADDED
@@ -0,0 +1,61 @@
1
+ import * as esbuild from 'esbuild'
2
+ import { glob } from 'glob'
3
+ import { readFileSync, writeFileSync, existsSync } from 'node:fs'
4
+ import { dirname, join } from 'node:path'
5
+
6
+ const entryPoints = await glob('src/**/*.ts', {
7
+ ignore: ['**/__tests__/**', '**/*.test.ts']
8
+ })
9
+
10
+ // Plugin to add .js extension to relative imports
11
+ const addJsExtension = {
12
+ name: 'add-js-extension',
13
+ setup(build) {
14
+ build.onEnd(async (result) => {
15
+ if (result.errors.length > 0) return
16
+ const outputFiles = await glob('dist/**/*.js')
17
+ for (const file of outputFiles) {
18
+ const fileDir = dirname(file)
19
+ let content = readFileSync(file, 'utf-8')
20
+ // Add .js to relative imports that don't have an extension
21
+ content = content.replace(
22
+ /from\s+["'](\.[^"']+)["']/g,
23
+ (match, path) => {
24
+ if (path.endsWith('.js') || path.endsWith('.json')) return match
25
+ // Check if it's a directory with index.js
26
+ const resolvedPath = join(fileDir, path)
27
+ if (existsSync(resolvedPath) && existsSync(join(resolvedPath, 'index.js'))) {
28
+ return `from "${path}/index.js"`
29
+ }
30
+ return `from "${path}.js"`
31
+ }
32
+ )
33
+ content = content.replace(
34
+ /import\s*\(\s*["'](\.[^"']+)["']\s*\)/g,
35
+ (match, path) => {
36
+ if (path.endsWith('.js') || path.endsWith('.json')) return match
37
+ // Check if it's a directory with index.js
38
+ const resolvedPath = join(fileDir, path)
39
+ if (existsSync(resolvedPath) && existsSync(join(resolvedPath, 'index.js'))) {
40
+ return `import("${path}/index.js")`
41
+ }
42
+ return `import("${path}.js")`
43
+ }
44
+ )
45
+ writeFileSync(file, content)
46
+ }
47
+ })
48
+ }
49
+ }
50
+
51
+ await esbuild.build({
52
+ entryPoints,
53
+ outdir: 'dist',
54
+ format: 'esm',
55
+ platform: 'node',
56
+ target: 'node18',
57
+ sourcemap: true,
58
+ plugins: [addJsExtension],
59
+ })
60
+
61
+ console.log('queue built successfully')
@@ -0,0 +1,12 @@
1
+ import { createLocalQueue } from "./strategies/local.js";
2
+ import { createAsyncQueue } from "./strategies/async.js";
3
+ function createQueue(name, strategy, options) {
4
+ if (strategy === "async") {
5
+ return createAsyncQueue(name, options);
6
+ }
7
+ return createLocalQueue(name, options);
8
+ }
9
+ export {
10
+ createQueue
11
+ };
12
+ //# sourceMappingURL=factory.js.map
@@ -0,0 +1,7 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../src/factory.ts"],
4
+ "sourcesContent": ["import type { Queue, LocalQueueOptions, AsyncQueueOptions } from './types'\nimport { createLocalQueue } from './strategies/local'\nimport { createAsyncQueue } from './strategies/async'\n\n/**\n * Creates a queue instance with the specified strategy.\n *\n * @template T - The payload type for jobs in this queue\n * @param name - Unique name for the queue\n * @param strategy - Queue strategy: 'local' for file-based, 'async' for BullMQ\n * @param options - Strategy-specific options\n * @returns A Queue instance\n *\n * @example\n * ```typescript\n * // Local file-based queue\n * const localQueue = createQueue<MyJobData>('my-queue', 'local')\n *\n * // BullMQ-based queue\n * const asyncQueue = createQueue<MyJobData>('my-queue', 'async', {\n * connection: { url: 'redis://localhost:6379' },\n * concurrency: 5\n * })\n * ```\n */\nexport function createQueue<T = unknown>(\n name: string,\n strategy: 'local',\n options?: LocalQueueOptions\n): Queue<T>\n\nexport function createQueue<T = unknown>(\n name: string,\n strategy: 'async',\n options?: AsyncQueueOptions\n): Queue<T>\n\n// General overload for dynamic strategy (union type)\nexport function createQueue<T = unknown>(\n name: string,\n strategy: 'local' | 'async',\n options?: LocalQueueOptions | AsyncQueueOptions\n): Queue<T>\n\nexport function createQueue<T = unknown>(\n name: string,\n strategy: 'local' | 'async',\n options?: LocalQueueOptions | AsyncQueueOptions\n): Queue<T> {\n if (strategy === 'async') {\n return createAsyncQueue<T>(name, options as AsyncQueueOptions)\n }\n\n return createLocalQueue<T>(name, options as LocalQueueOptions)\n}\n"],
5
+ "mappings": "AACA,SAAS,wBAAwB;AACjC,SAAS,wBAAwB;AA0C1B,SAAS,YACd,MACA,UACA,SACU;AACV,MAAI,aAAa,SAAS;AACxB,WAAO,iBAAoB,MAAM,OAA4B;AAAA,EAC/D;AAEA,SAAO,iBAAoB,MAAM,OAA4B;AAC/D;",
6
+ "names": []
7
+ }
package/dist/index.js ADDED
@@ -0,0 +1,10 @@
1
+ export * from "./types.js";
2
+ import { createQueue } from "./factory.js";
3
+ export * from "./worker/registry.js";
4
+ import { runWorker, createRoutedHandler } from "./worker/runner.js";
5
+ export {
6
+ createQueue,
7
+ createRoutedHandler,
8
+ runWorker
9
+ };
10
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1,7 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../src/index.ts"],
4
+ "sourcesContent": ["/**\n * @open-mercato/queue\n *\n * Multi-strategy job queue package supporting local (file-based) and async (BullMQ) strategies.\n *\n * @example\n * ```typescript\n * import { createQueue } from '@open-mercato/queue'\n *\n * // Create a local queue\n * const queue = createQueue<{ userId: string }>('my-queue', 'local')\n *\n * // Enqueue a job\n * await queue.enqueue({ userId: '123' })\n *\n * // Process jobs\n * await queue.process(async (job, ctx) => {\n * console.log(`Processing job ${ctx.jobId}:`, job.payload)\n * })\n * ```\n */\n\nexport * from './types'\nexport { createQueue } from './factory'\n\n// Worker utilities\nexport * from './worker/registry'\nexport { runWorker, createRoutedHandler } from './worker/runner'\n"],
5
+ "mappings": "AAsBA,cAAc;AACd,SAAS,mBAAmB;AAG5B,cAAc;AACd,SAAS,WAAW,2BAA2B;",
6
+ "names": []
7
+ }
@@ -0,0 +1,125 @@
1
+ function resolveConnection(options) {
2
+ const url = options?.url ?? process.env.REDIS_URL ?? process.env.QUEUE_REDIS_URL;
3
+ if (url) {
4
+ return url;
5
+ }
6
+ if (options?.host) {
7
+ return {
8
+ host: options.host,
9
+ port: options.port ?? 6379,
10
+ password: options.password
11
+ };
12
+ }
13
+ return { host: "localhost", port: 6379 };
14
+ }
15
+ function createAsyncQueue(name, options) {
16
+ const connection = resolveConnection(options?.connection);
17
+ const concurrency = options?.concurrency ?? 1;
18
+ let bullQueue = null;
19
+ let bullWorker = null;
20
+ let bullmqModule = null;
21
+ async function getBullMQ() {
22
+ if (!bullmqModule) {
23
+ try {
24
+ bullmqModule = await import("bullmq");
25
+ } catch {
26
+ throw new Error(
27
+ "BullMQ is required for async queue strategy. Install it with: npm install bullmq"
28
+ );
29
+ }
30
+ }
31
+ return bullmqModule;
32
+ }
33
+ async function getQueue() {
34
+ if (!bullQueue) {
35
+ const { Queue: BullQueueClass } = await getBullMQ();
36
+ bullQueue = new BullQueueClass(name, { connection });
37
+ }
38
+ return bullQueue;
39
+ }
40
+ async function enqueue(data) {
41
+ const queue = await getQueue();
42
+ const jobData = {
43
+ id: crypto.randomUUID(),
44
+ payload: data,
45
+ createdAt: (/* @__PURE__ */ new Date()).toISOString()
46
+ };
47
+ const job = await queue.add(jobData.id, jobData, {
48
+ removeOnComplete: true,
49
+ removeOnFail: 1e3
50
+ // Keep last 1000 failed jobs
51
+ });
52
+ return job.id ?? jobData.id;
53
+ }
54
+ async function process2(handler) {
55
+ const { Worker } = await getBullMQ();
56
+ bullWorker = new Worker(
57
+ name,
58
+ async (job) => {
59
+ const jobData = job.data;
60
+ await handler(jobData, {
61
+ jobId: job.id ?? jobData.id,
62
+ attemptNumber: job.attemptsMade + 1,
63
+ queueName: name
64
+ });
65
+ },
66
+ {
67
+ connection,
68
+ concurrency
69
+ }
70
+ );
71
+ bullWorker.on("completed", (job) => {
72
+ const jobWithId = job;
73
+ console.log(`[queue:${name}] Job ${jobWithId.id} completed`);
74
+ });
75
+ bullWorker.on("failed", (job, err) => {
76
+ const jobWithId = job;
77
+ const error = err;
78
+ console.error(`[queue:${name}] Job ${jobWithId?.id} failed:`, error.message);
79
+ });
80
+ bullWorker.on("error", (err) => {
81
+ const error = err;
82
+ console.error(`[queue:${name}] Worker error:`, error.message);
83
+ });
84
+ console.log(`[queue:${name}] Worker started with concurrency ${concurrency}`);
85
+ return { processed: -1, failed: -1, lastJobId: void 0 };
86
+ }
87
+ async function clear() {
88
+ const queue = await getQueue();
89
+ await queue.obliterate({ force: true });
90
+ return { removed: -1 };
91
+ }
92
+ async function close() {
93
+ if (bullWorker) {
94
+ await bullWorker.close();
95
+ bullWorker = null;
96
+ }
97
+ if (bullQueue) {
98
+ await bullQueue.close();
99
+ bullQueue = null;
100
+ }
101
+ }
102
+ async function getJobCounts() {
103
+ const queue = await getQueue();
104
+ const counts = await queue.getJobCounts("waiting", "active", "completed", "failed");
105
+ return {
106
+ waiting: counts.waiting ?? 0,
107
+ active: counts.active ?? 0,
108
+ completed: counts.completed ?? 0,
109
+ failed: counts.failed ?? 0
110
+ };
111
+ }
112
+ return {
113
+ name,
114
+ strategy: "async",
115
+ enqueue,
116
+ process: process2,
117
+ clear,
118
+ close,
119
+ getJobCounts
120
+ };
121
+ }
122
+ export {
123
+ createAsyncQueue
124
+ };
125
+ //# sourceMappingURL=async.js.map
@@ -0,0 +1,7 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../../src/strategies/async.ts"],
4
+ "sourcesContent": ["import type { Queue, QueuedJob, JobHandler, AsyncQueueOptions, ProcessResult } from '../types'\n\n// BullMQ interface types - we define the shape we use to maintain type safety\n// while keeping bullmq as an optional peer dependency\ntype ConnectionOptions = { host?: string; port?: number; password?: string } | string\n\ninterface BullQueueInterface<T> {\n add: (name: string, data: T, opts?: { removeOnComplete?: boolean; removeOnFail?: number }) => Promise<{ id?: string }>\n obliterate: (opts?: { force?: boolean }) => Promise<void>\n close: () => Promise<void>\n getJobCounts: (...states: string[]) => Promise<Record<string, number>>\n}\n\ninterface BullWorkerInterface {\n on: (event: string, handler: (...args: unknown[]) => void) => void\n close: () => Promise<void>\n}\n\ninterface BullMQModule {\n Queue: new <T>(name: string, opts: { connection: ConnectionOptions }) => BullQueueInterface<T>\n Worker: new <T>(\n name: string,\n processor: (job: { id?: string; data: T; attemptsMade: number }) => Promise<void>,\n opts: { connection: ConnectionOptions; concurrency: number }\n ) => BullWorkerInterface\n}\n\n/**\n * Resolves Redis connection options from various sources.\n */\nfunction resolveConnection(options?: AsyncQueueOptions['connection']): ConnectionOptions {\n // Priority: explicit options > environment variables\n const url = options?.url ?? process.env.REDIS_URL ?? process.env.QUEUE_REDIS_URL\n\n if (url) {\n return url\n }\n\n if (options?.host) {\n return {\n host: options.host,\n port: options.port ?? 6379,\n password: options.password,\n }\n }\n\n // Default to localhost\n return { host: 'localhost', port: 6379 }\n}\n\n/**\n * Creates a BullMQ-based async queue.\n *\n * This strategy provides:\n * - Persistent job storage in Redis\n * - Automatic retries with exponential backoff\n * - Concurrent job processing\n * - Job prioritization and scheduling\n *\n * @template T - The payload type for jobs\n * @param name - Queue name\n * @param options - Async queue options\n */\nexport function createAsyncQueue<T = unknown>(\n name: string,\n options?: AsyncQueueOptions\n): Queue<T> {\n const connection = resolveConnection(options?.connection)\n const concurrency = options?.concurrency ?? 1\n\n let bullQueue: BullQueueInterface<QueuedJob<T>> | null = null\n let bullWorker: BullWorkerInterface | null = null\n let bullmqModule: BullMQModule | null = null\n\n // -------------------------------------------------------------------------\n // Lazy BullMQ initialization\n // -------------------------------------------------------------------------\n\n async function getBullMQ(): Promise<BullMQModule> {\n if (!bullmqModule) {\n try {\n bullmqModule = await import('bullmq') as unknown as BullMQModule\n } catch {\n throw new Error(\n 'BullMQ is required for async queue strategy. Install it with: npm install bullmq'\n )\n }\n }\n return bullmqModule\n }\n\n async function getQueue(): Promise<BullQueueInterface<QueuedJob<T>>> {\n if (!bullQueue) {\n const { Queue: BullQueueClass } = await getBullMQ()\n bullQueue = new BullQueueClass<QueuedJob<T>>(name, { connection })\n }\n return bullQueue\n }\n\n // -------------------------------------------------------------------------\n // Queue Implementation\n // -------------------------------------------------------------------------\n\n async function enqueue(data: T): Promise<string> {\n const queue = await getQueue()\n const jobData: QueuedJob<T> = {\n id: crypto.randomUUID(),\n payload: data,\n createdAt: new Date().toISOString(),\n }\n\n const job = await queue.add(jobData.id, jobData, {\n removeOnComplete: true,\n removeOnFail: 1000, // Keep last 1000 failed jobs\n })\n\n return job.id ?? jobData.id\n }\n\n async function process(handler: JobHandler<T>): Promise<ProcessResult> {\n const { Worker } = await getBullMQ()\n\n // Create worker that processes jobs\n bullWorker = new Worker<QueuedJob<T>>(\n name,\n async (job) => {\n const jobData = job.data\n await handler(jobData, {\n jobId: job.id ?? jobData.id,\n attemptNumber: job.attemptsMade + 1,\n queueName: name,\n })\n },\n {\n connection,\n concurrency,\n }\n )\n\n // Set up event handlers\n bullWorker.on('completed', (job) => {\n const jobWithId = job as { id?: string }\n console.log(`[queue:${name}] Job ${jobWithId.id} completed`)\n })\n\n bullWorker.on('failed', (job, err) => {\n const jobWithId = job as { id?: string } | undefined\n const error = err as Error\n console.error(`[queue:${name}] Job ${jobWithId?.id} failed:`, error.message)\n })\n\n bullWorker.on('error', (err) => {\n const error = err as Error\n console.error(`[queue:${name}] Worker error:`, error.message)\n })\n\n console.log(`[queue:${name}] Worker started with concurrency ${concurrency}`)\n\n // For async strategy, return a sentinel result indicating worker mode\n // processed=-1 signals that this is a continuous worker, not a batch process\n return { processed: -1, failed: -1, lastJobId: undefined }\n }\n\n async function clear(): Promise<{ removed: number }> {\n const queue = await getQueue()\n\n // Obliterate removes all jobs from the queue\n await queue.obliterate({ force: true })\n\n return { removed: -1 } // BullMQ obliterate doesn't return count\n }\n\n async function close(): Promise<void> {\n if (bullWorker) {\n await bullWorker.close()\n bullWorker = null\n }\n if (bullQueue) {\n await bullQueue.close()\n bullQueue = null\n }\n }\n\n async function getJobCounts(): Promise<{\n waiting: number\n active: number\n completed: number\n failed: number\n }> {\n const queue = await getQueue()\n const counts = await queue.getJobCounts('waiting', 'active', 'completed', 'failed')\n return {\n waiting: counts.waiting ?? 0,\n active: counts.active ?? 0,\n completed: counts.completed ?? 0,\n failed: counts.failed ?? 0,\n }\n }\n\n return {\n name,\n strategy: 'async',\n enqueue,\n process,\n clear,\n close,\n getJobCounts,\n }\n}\n"],
5
+ "mappings": "AA8BA,SAAS,kBAAkB,SAA8D;AAEvF,QAAM,MAAM,SAAS,OAAO,QAAQ,IAAI,aAAa,QAAQ,IAAI;AAEjE,MAAI,KAAK;AACP,WAAO;AAAA,EACT;AAEA,MAAI,SAAS,MAAM;AACjB,WAAO;AAAA,MACL,MAAM,QAAQ;AAAA,MACd,MAAM,QAAQ,QAAQ;AAAA,MACtB,UAAU,QAAQ;AAAA,IACpB;AAAA,EACF;AAGA,SAAO,EAAE,MAAM,aAAa,MAAM,KAAK;AACzC;AAeO,SAAS,iBACd,MACA,SACU;AACV,QAAM,aAAa,kBAAkB,SAAS,UAAU;AACxD,QAAM,cAAc,SAAS,eAAe;AAE5C,MAAI,YAAqD;AACzD,MAAI,aAAyC;AAC7C,MAAI,eAAoC;AAMxC,iBAAe,YAAmC;AAChD,QAAI,CAAC,cAAc;AACjB,UAAI;AACF,uBAAe,MAAM,OAAO,QAAQ;AAAA,MACtC,QAAQ;AACN,cAAM,IAAI;AAAA,UACR;AAAA,QACF;AAAA,MACF;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAEA,iBAAe,WAAsD;AACnE,QAAI,CAAC,WAAW;AACd,YAAM,EAAE,OAAO,eAAe,IAAI,MAAM,UAAU;AAClD,kBAAY,IAAI,eAA6B,MAAM,EAAE,WAAW,CAAC;AAAA,IACnE;AACA,WAAO;AAAA,EACT;AAMA,iBAAe,QAAQ,MAA0B;AAC/C,UAAM,QAAQ,MAAM,SAAS;AAC7B,UAAM,UAAwB;AAAA,MAC5B,IAAI,OAAO,WAAW;AAAA,MACtB,SAAS;AAAA,MACT,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,IACpC;AAEA,UAAM,MAAM,MAAM,MAAM,IAAI,QAAQ,IAAI,SAAS;AAAA,MAC/C,kBAAkB;AAAA,MAClB,cAAc;AAAA;AAAA,IAChB,CAAC;AAED,WAAO,IAAI,MAAM,QAAQ;AAAA,EAC3B;AAEA,iBAAeA,SAAQ,SAAgD;AACrE,UAAM,EAAE,OAAO,IAAI,MAAM,UAAU;AAGnC,iBAAa,IAAI;AAAA,MACf;AAAA,MACA,OAAO,QAAQ;AACb,cAAM,UAAU,IAAI;AACpB,cAAM,QAAQ,SAAS;AAAA,UACrB,OAAO,IAAI,MAAM,QAAQ;AAAA,UACzB,eAAe,IAAI,eAAe;AAAA,UAClC,WAAW;AAAA,QACb,CAAC;AAAA,MACH;AAAA,MACA;AAAA,QACE;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAGA,eAAW,GAAG,aAAa,CAAC,QAAQ;AAClC,YAAM,YAAY;AAClB,cAAQ,IAAI,UAAU,IAAI,SAAS,UAAU,EAAE,YAAY;AAAA,IAC7D,CAAC;AAED,eAAW,GAAG,UAAU,CAAC,KAAK,QAAQ;AACpC,YAAM,YAAY;AAClB,YAAM,QAAQ;AACd,cAAQ,MAAM,UAAU,IAAI,SAAS,WAAW,EAAE,YAAY,MAAM,OAAO;AAAA,IAC7E,CAAC;AAED,eAAW,GAAG,SAAS,CAAC,QAAQ;AAC9B,YAAM,QAAQ;AACd,cAAQ,MAAM,UAAU,IAAI,mBAAmB,MAAM,OAAO;AAAA,IAC9D,CAAC;AAED,YAAQ,IAAI,UAAU,IAAI,qCAAqC,WAAW,EAAE;AAI5E,WAAO,EAAE,WAAW,IAAI,QAAQ,IAAI,WAAW,OAAU;AAAA,EAC3D;AAEA,iBAAe,QAAsC;AACnD,UAAM,QAAQ,MAAM,SAAS;AAG7B,UAAM,MAAM,WAAW,EAAE,OAAO,KAAK,CAAC;AAEtC,WAAO,EAAE,SAAS,GAAG;AAAA,EACvB;AAEA,iBAAe,QAAuB;AACpC,QAAI,YAAY;AACd,YAAM,WAAW,MAAM;AACvB,mBAAa;AAAA,IACf;AACA,QAAI,WAAW;AACb,YAAM,UAAU,MAAM;AACtB,kBAAY;AAAA,IACd;AAAA,EACF;AAEA,iBAAe,eAKZ;AACD,UAAM,QAAQ,MAAM,SAAS;AAC7B,UAAM,SAAS,MAAM,MAAM,aAAa,WAAW,UAAU,aAAa,QAAQ;AAClF,WAAO;AAAA,MACL,SAAS,OAAO,WAAW;AAAA,MAC3B,QAAQ,OAAO,UAAU;AAAA,MACzB,WAAW,OAAO,aAAa;AAAA,MAC/B,QAAQ,OAAO,UAAU;AAAA,IAC3B;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,IACA,UAAU;AAAA,IACV;AAAA,IACA,SAAAA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;",
6
+ "names": ["process"]
7
+ }
@@ -0,0 +1,199 @@
1
+ import fs from "node:fs";
2
+ import path from "node:path";
3
+ import crypto from "node:crypto";
4
+ const DEFAULT_POLL_INTERVAL = 1e3;
5
+ function createLocalQueue(name, options) {
6
+ const baseDir = options?.baseDir ?? path.resolve(".queue");
7
+ const queueDir = path.join(baseDir, name);
8
+ const queueFile = path.join(queueDir, "queue.json");
9
+ const stateFile = path.join(queueDir, "state.json");
10
+ const concurrency = options?.concurrency ?? 1;
11
+ const pollInterval = options?.pollInterval ?? DEFAULT_POLL_INTERVAL;
12
+ let pollingTimer = null;
13
+ let isProcessing = false;
14
+ let activeHandler = null;
15
+ function ensureDir() {
16
+ try {
17
+ fs.mkdirSync(queueDir, { recursive: true });
18
+ } catch (e) {
19
+ const error = e;
20
+ if (error.code !== "EEXIST") throw error;
21
+ }
22
+ try {
23
+ fs.writeFileSync(queueFile, "[]", { encoding: "utf8", flag: "wx" });
24
+ } catch (e) {
25
+ const error = e;
26
+ if (error.code !== "EEXIST") throw error;
27
+ }
28
+ try {
29
+ fs.writeFileSync(stateFile, "{}", { encoding: "utf8", flag: "wx" });
30
+ } catch (e) {
31
+ const error = e;
32
+ if (error.code !== "EEXIST") throw error;
33
+ }
34
+ }
35
+ function readQueue() {
36
+ ensureDir();
37
+ try {
38
+ const content = fs.readFileSync(queueFile, "utf8");
39
+ return JSON.parse(content);
40
+ } catch (error) {
41
+ const e = error;
42
+ if (e.code === "ENOENT") {
43
+ return [];
44
+ }
45
+ console.error(`[queue:${name}] Failed to read queue file:`, e.message);
46
+ throw new Error(`Queue file corrupted or unreadable: ${e.message}`);
47
+ }
48
+ }
49
+ function writeQueue(jobs) {
50
+ ensureDir();
51
+ fs.writeFileSync(queueFile, JSON.stringify(jobs, null, 2), "utf8");
52
+ }
53
+ function readState() {
54
+ ensureDir();
55
+ try {
56
+ const content = fs.readFileSync(stateFile, "utf8");
57
+ return JSON.parse(content);
58
+ } catch {
59
+ return {};
60
+ }
61
+ }
62
+ function writeState(state) {
63
+ ensureDir();
64
+ fs.writeFileSync(stateFile, JSON.stringify(state, null, 2), "utf8");
65
+ }
66
+ function generateId() {
67
+ return crypto.randomUUID();
68
+ }
69
+ async function enqueue(data) {
70
+ const jobs = readQueue();
71
+ const job = {
72
+ id: generateId(),
73
+ payload: data,
74
+ createdAt: (/* @__PURE__ */ new Date()).toISOString()
75
+ };
76
+ jobs.push(job);
77
+ writeQueue(jobs);
78
+ return job.id;
79
+ }
80
+ async function processBatch(handler, options2) {
81
+ const state = readState();
82
+ const jobs = readQueue();
83
+ const lastProcessedIndex = state.lastProcessedId ? jobs.findIndex((j) => j.id === state.lastProcessedId) : -1;
84
+ const pendingJobs = jobs.slice(lastProcessedIndex + 1);
85
+ const jobsToProcess = options2?.limit ? pendingJobs.slice(0, options2.limit) : pendingJobs;
86
+ let processed = 0;
87
+ let failed = 0;
88
+ let lastJobId;
89
+ const jobIdsToRemove = /* @__PURE__ */ new Set();
90
+ for (const job of jobsToProcess) {
91
+ try {
92
+ await Promise.resolve(
93
+ handler(job, {
94
+ jobId: job.id,
95
+ attemptNumber: 1,
96
+ queueName: name
97
+ })
98
+ );
99
+ processed++;
100
+ lastJobId = job.id;
101
+ jobIdsToRemove.add(job.id);
102
+ console.log(`[queue:${name}] Job ${job.id} completed`);
103
+ } catch (error) {
104
+ console.error(`[queue:${name}] Job ${job.id} failed:`, error);
105
+ failed++;
106
+ lastJobId = job.id;
107
+ jobIdsToRemove.add(job.id);
108
+ }
109
+ }
110
+ if (jobIdsToRemove.size > 0) {
111
+ const updatedJobs = jobs.filter((j) => !jobIdsToRemove.has(j.id));
112
+ writeQueue(updatedJobs);
113
+ const newState = {
114
+ lastProcessedId: lastJobId,
115
+ completedCount: (state.completedCount ?? 0) + processed,
116
+ failedCount: (state.failedCount ?? 0) + failed
117
+ };
118
+ writeState(newState);
119
+ }
120
+ return { processed, failed, lastJobId };
121
+ }
122
+ async function pollAndProcess() {
123
+ if (isProcessing || !activeHandler) return;
124
+ isProcessing = true;
125
+ try {
126
+ await processBatch(activeHandler);
127
+ } catch (error) {
128
+ console.error(`[queue:${name}] Polling error:`, error);
129
+ } finally {
130
+ isProcessing = false;
131
+ }
132
+ }
133
+ async function process(handler, options2) {
134
+ if (options2?.limit) {
135
+ return processBatch(handler, options2);
136
+ }
137
+ activeHandler = handler;
138
+ await processBatch(handler);
139
+ pollingTimer = setInterval(() => {
140
+ pollAndProcess().catch((err) => {
141
+ console.error(`[queue:${name}] Poll cycle error:`, err);
142
+ });
143
+ }, pollInterval);
144
+ console.log(`[queue:${name}] Worker started with concurrency ${concurrency}`);
145
+ return { processed: -1, failed: -1, lastJobId: void 0 };
146
+ }
147
+ async function clear() {
148
+ const jobs = readQueue();
149
+ const removed = jobs.length;
150
+ writeQueue([]);
151
+ const state = readState();
152
+ writeState({
153
+ completedCount: state.completedCount,
154
+ failedCount: state.failedCount
155
+ });
156
+ return { removed };
157
+ }
158
+ async function close() {
159
+ if (pollingTimer) {
160
+ clearInterval(pollingTimer);
161
+ pollingTimer = null;
162
+ }
163
+ activeHandler = null;
164
+ const SHUTDOWN_TIMEOUT = 5e3;
165
+ const startTime = Date.now();
166
+ while (isProcessing) {
167
+ if (Date.now() - startTime > SHUTDOWN_TIMEOUT) {
168
+ console.warn(`[queue:${name}] Force closing after ${SHUTDOWN_TIMEOUT}ms timeout`);
169
+ break;
170
+ }
171
+ await new Promise((resolve) => setTimeout(resolve, 50));
172
+ }
173
+ }
174
+ async function getJobCounts() {
175
+ const state = readState();
176
+ const jobs = readQueue();
177
+ return {
178
+ waiting: jobs.length,
179
+ // All jobs in queue are waiting (processed ones are removed)
180
+ active: 0,
181
+ // Local strategy doesn't track active jobs
182
+ completed: state.completedCount ?? 0,
183
+ failed: state.failedCount ?? 0
184
+ };
185
+ }
186
+ return {
187
+ name,
188
+ strategy: "local",
189
+ enqueue,
190
+ process,
191
+ clear,
192
+ close,
193
+ getJobCounts
194
+ };
195
+ }
196
+ export {
197
+ createLocalQueue
198
+ };
199
+ //# sourceMappingURL=local.js.map
@@ -0,0 +1,7 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../../src/strategies/local.ts"],
4
+ "sourcesContent": ["import fs from 'node:fs'\nimport path from 'node:path'\nimport crypto from 'node:crypto'\nimport type { Queue, QueuedJob, JobHandler, LocalQueueOptions, ProcessOptions, ProcessResult } from '../types'\n\ntype LocalState = {\n lastProcessedId?: string\n completedCount?: number\n failedCount?: number\n}\n\ntype StoredJob<T> = QueuedJob<T>\n\n/** Default polling interval in milliseconds */\nconst DEFAULT_POLL_INTERVAL = 1000\n\n/**\n * Creates a file-based local queue.\n *\n * Jobs are stored in JSON files within a directory structure:\n * - `.queue/<name>/queue.json` - Array of queued jobs\n * - `.queue/<name>/state.json` - Processing state (last processed ID)\n *\n * **Limitations:**\n * - Jobs are processed sequentially (concurrency option is for logging/compatibility only)\n * - Not suitable for production or multi-process environments\n * - No retry mechanism for failed jobs\n *\n * @template T - The payload type for jobs\n * @param name - Queue name (used for directory naming)\n * @param options - Local queue options\n */\nexport function createLocalQueue<T = unknown>(\n name: string,\n options?: LocalQueueOptions\n): Queue<T> {\n const baseDir = options?.baseDir ?? path.resolve('.queue')\n const queueDir = path.join(baseDir, name)\n const queueFile = path.join(queueDir, 'queue.json')\n const stateFile = path.join(queueDir, 'state.json')\n // Note: concurrency is stored for logging/compatibility but jobs are processed sequentially\n const concurrency = options?.concurrency ?? 1\n const pollInterval = options?.pollInterval ?? DEFAULT_POLL_INTERVAL\n\n // Worker state for continuous polling\n let pollingTimer: ReturnType<typeof setInterval> | null = null\n let isProcessing = false\n let activeHandler: JobHandler<T> | null = null\n\n // -------------------------------------------------------------------------\n // File Operations\n // -------------------------------------------------------------------------\n\n function ensureDir(): void {\n // Use atomic operations to handle race conditions\n try {\n fs.mkdirSync(queueDir, { recursive: true })\n } catch (e: unknown) {\n const error = e as NodeJS.ErrnoException\n if (error.code !== 'EEXIST') throw error\n }\n\n // Initialize queue file with exclusive create flag\n try {\n fs.writeFileSync(queueFile, '[]', { encoding: 'utf8', flag: 'wx' })\n } catch (e: unknown) {\n const error = e as NodeJS.ErrnoException\n if (error.code !== 'EEXIST') throw error\n }\n\n // Initialize state file with exclusive create flag\n try {\n fs.writeFileSync(stateFile, '{}', { encoding: 'utf8', flag: 'wx' })\n } catch (e: unknown) {\n const error = e as NodeJS.ErrnoException\n if (error.code !== 'EEXIST') throw error\n }\n }\n\n function readQueue(): StoredJob<T>[] {\n ensureDir()\n try {\n const content = fs.readFileSync(queueFile, 'utf8')\n return JSON.parse(content) as StoredJob<T>[]\n } catch (error: unknown) {\n const e = error as NodeJS.ErrnoException\n if (e.code === 'ENOENT') {\n return []\n }\n console.error(`[queue:${name}] Failed to read queue file:`, e.message)\n throw new Error(`Queue file corrupted or unreadable: ${e.message}`)\n }\n }\n\n function writeQueue(jobs: StoredJob<T>[]): void {\n ensureDir()\n fs.writeFileSync(queueFile, JSON.stringify(jobs, null, 2), 'utf8')\n }\n\n function readState(): LocalState {\n ensureDir()\n try {\n const content = fs.readFileSync(stateFile, 'utf8')\n return JSON.parse(content) as LocalState\n } catch {\n return {}\n }\n }\n\n function writeState(state: LocalState): void {\n ensureDir()\n fs.writeFileSync(stateFile, JSON.stringify(state, null, 2), 'utf8')\n }\n\n function generateId(): string {\n return crypto.randomUUID()\n }\n\n // -------------------------------------------------------------------------\n // Queue Implementation\n // -------------------------------------------------------------------------\n\n async function enqueue(data: T): Promise<string> {\n const jobs = readQueue()\n const job: StoredJob<T> = {\n id: generateId(),\n payload: data,\n createdAt: new Date().toISOString(),\n }\n jobs.push(job)\n writeQueue(jobs)\n return job.id\n }\n\n /**\n * Process pending jobs in a single batch (internal helper).\n */\n async function processBatch(\n handler: JobHandler<T>,\n options?: ProcessOptions\n ): Promise<ProcessResult> {\n const state = readState()\n const jobs = readQueue()\n\n // Find jobs that haven't been processed yet\n const lastProcessedIndex = state.lastProcessedId\n ? jobs.findIndex((j) => j.id === state.lastProcessedId)\n : -1\n\n const pendingJobs = jobs.slice(lastProcessedIndex + 1)\n const jobsToProcess = options?.limit\n ? pendingJobs.slice(0, options.limit)\n : pendingJobs\n\n let processed = 0\n let failed = 0\n let lastJobId: string | undefined\n const jobIdsToRemove = new Set<string>()\n\n for (const job of jobsToProcess) {\n try {\n await Promise.resolve(\n handler(job, {\n jobId: job.id,\n attemptNumber: 1,\n queueName: name,\n })\n )\n processed++\n lastJobId = job.id\n jobIdsToRemove.add(job.id)\n console.log(`[queue:${name}] Job ${job.id} completed`)\n } catch (error) {\n console.error(`[queue:${name}] Job ${job.id} failed:`, error)\n failed++\n lastJobId = job.id\n jobIdsToRemove.add(job.id) // Remove failed jobs too (matching async strategy)\n }\n }\n\n // Remove processed jobs from queue (matching async removeOnComplete behavior)\n if (jobIdsToRemove.size > 0) {\n const updatedJobs = jobs.filter((j) => !jobIdsToRemove.has(j.id))\n writeQueue(updatedJobs)\n\n // Update state with running counts\n const newState: LocalState = {\n lastProcessedId: lastJobId,\n completedCount: (state.completedCount ?? 0) + processed,\n failedCount: (state.failedCount ?? 0) + failed,\n }\n writeState(newState)\n }\n\n return { processed, failed, lastJobId }\n }\n\n /**\n * Poll for and process new jobs.\n */\n async function pollAndProcess(): Promise<void> {\n // Skip if already processing to avoid concurrent file access\n if (isProcessing || !activeHandler) return\n\n isProcessing = true\n try {\n await processBatch(activeHandler)\n } catch (error) {\n console.error(`[queue:${name}] Polling error:`, error)\n } finally {\n isProcessing = false\n }\n }\n\n async function process(\n handler: JobHandler<T>,\n options?: ProcessOptions\n ): Promise<ProcessResult> {\n // If limit is specified, do a single batch (backward compatibility)\n if (options?.limit) {\n return processBatch(handler, options)\n }\n\n // Start continuous polling mode (like BullMQ Worker)\n activeHandler = handler\n\n // Process any pending jobs immediately\n await processBatch(handler)\n\n // Start polling interval for new jobs\n pollingTimer = setInterval(() => {\n pollAndProcess().catch((err) => {\n console.error(`[queue:${name}] Poll cycle error:`, err)\n })\n }, pollInterval)\n\n console.log(`[queue:${name}] Worker started with concurrency ${concurrency}`)\n\n // Return sentinel value indicating continuous worker mode (like async strategy)\n return { processed: -1, failed: -1, lastJobId: undefined }\n }\n\n async function clear(): Promise<{ removed: number }> {\n const jobs = readQueue()\n const removed = jobs.length\n writeQueue([])\n // Reset state but preserve counts for historical tracking\n const state = readState()\n writeState({\n completedCount: state.completedCount,\n failedCount: state.failedCount,\n })\n return { removed }\n }\n\n async function close(): Promise<void> {\n // Stop polling timer\n if (pollingTimer) {\n clearInterval(pollingTimer)\n pollingTimer = null\n }\n activeHandler = null\n\n // Wait for any in-progress processing to complete (with timeout)\n const SHUTDOWN_TIMEOUT = 5000\n const startTime = Date.now()\n\n while (isProcessing) {\n if (Date.now() - startTime > SHUTDOWN_TIMEOUT) {\n console.warn(`[queue:${name}] Force closing after ${SHUTDOWN_TIMEOUT}ms timeout`)\n break\n }\n await new Promise((resolve) => setTimeout(resolve, 50))\n }\n }\n\n async function getJobCounts(): Promise<{\n waiting: number\n active: number\n completed: number\n failed: number\n }> {\n const state = readState()\n const jobs = readQueue()\n\n return {\n waiting: jobs.length, // All jobs in queue are waiting (processed ones are removed)\n active: 0, // Local strategy doesn't track active jobs\n completed: state.completedCount ?? 0,\n failed: state.failedCount ?? 0,\n }\n }\n\n return {\n name,\n strategy: 'local',\n enqueue,\n process,\n clear,\n close,\n getJobCounts,\n }\n}\n"],
5
+ "mappings": "AAAA,OAAO,QAAQ;AACf,OAAO,UAAU;AACjB,OAAO,YAAY;AAYnB,MAAM,wBAAwB;AAkBvB,SAAS,iBACd,MACA,SACU;AACV,QAAM,UAAU,SAAS,WAAW,KAAK,QAAQ,QAAQ;AACzD,QAAM,WAAW,KAAK,KAAK,SAAS,IAAI;AACxC,QAAM,YAAY,KAAK,KAAK,UAAU,YAAY;AAClD,QAAM,YAAY,KAAK,KAAK,UAAU,YAAY;AAElD,QAAM,cAAc,SAAS,eAAe;AAC5C,QAAM,eAAe,SAAS,gBAAgB;AAG9C,MAAI,eAAsD;AAC1D,MAAI,eAAe;AACnB,MAAI,gBAAsC;AAM1C,WAAS,YAAkB;AAEzB,QAAI;AACF,SAAG,UAAU,UAAU,EAAE,WAAW,KAAK,CAAC;AAAA,IAC5C,SAAS,GAAY;AACnB,YAAM,QAAQ;AACd,UAAI,MAAM,SAAS,SAAU,OAAM;AAAA,IACrC;AAGA,QAAI;AACF,SAAG,cAAc,WAAW,MAAM,EAAE,UAAU,QAAQ,MAAM,KAAK,CAAC;AAAA,IACpE,SAAS,GAAY;AACnB,YAAM,QAAQ;AACd,UAAI,MAAM,SAAS,SAAU,OAAM;AAAA,IACrC;AAGA,QAAI;AACF,SAAG,cAAc,WAAW,MAAM,EAAE,UAAU,QAAQ,MAAM,KAAK,CAAC;AAAA,IACpE,SAAS,GAAY;AACnB,YAAM,QAAQ;AACd,UAAI,MAAM,SAAS,SAAU,OAAM;AAAA,IACrC;AAAA,EACF;AAEA,WAAS,YAA4B;AACnC,cAAU;AACV,QAAI;AACF,YAAM,UAAU,GAAG,aAAa,WAAW,MAAM;AACjD,aAAO,KAAK,MAAM,OAAO;AAAA,IAC3B,SAAS,OAAgB;AACvB,YAAM,IAAI;AACV,UAAI,EAAE,SAAS,UAAU;AACvB,eAAO,CAAC;AAAA,MACV;AACA,cAAQ,MAAM,UAAU,IAAI,gCAAgC,EAAE,OAAO;AACrE,YAAM,IAAI,MAAM,uCAAuC,EAAE,OAAO,EAAE;AAAA,IACpE;AAAA,EACF;AAEA,WAAS,WAAW,MAA4B;AAC9C,cAAU;AACV,OAAG,cAAc,WAAW,KAAK,UAAU,MAAM,MAAM,CAAC,GAAG,MAAM;AAAA,EACnE;AAEA,WAAS,YAAwB;AAC/B,cAAU;AACV,QAAI;AACF,YAAM,UAAU,GAAG,aAAa,WAAW,MAAM;AACjD,aAAO,KAAK,MAAM,OAAO;AAAA,IAC3B,QAAQ;AACN,aAAO,CAAC;AAAA,IACV;AAAA,EACF;AAEA,WAAS,WAAW,OAAyB;AAC3C,cAAU;AACV,OAAG,cAAc,WAAW,KAAK,UAAU,OAAO,MAAM,CAAC,GAAG,MAAM;AAAA,EACpE;AAEA,WAAS,aAAqB;AAC5B,WAAO,OAAO,WAAW;AAAA,EAC3B;AAMA,iBAAe,QAAQ,MAA0B;AAC/C,UAAM,OAAO,UAAU;AACvB,UAAM,MAAoB;AAAA,MACxB,IAAI,WAAW;AAAA,MACf,SAAS;AAAA,MACT,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,IACpC;AACA,SAAK,KAAK,GAAG;AACb,eAAW,IAAI;AACf,WAAO,IAAI;AAAA,EACb;AAKA,iBAAe,aACb,SACAA,UACwB;AACxB,UAAM,QAAQ,UAAU;AACxB,UAAM,OAAO,UAAU;AAGvB,UAAM,qBAAqB,MAAM,kBAC7B,KAAK,UAAU,CAAC,MAAM,EAAE,OAAO,MAAM,eAAe,IACpD;AAEJ,UAAM,cAAc,KAAK,MAAM,qBAAqB,CAAC;AACrD,UAAM,gBAAgBA,UAAS,QAC3B,YAAY,MAAM,GAAGA,SAAQ,KAAK,IAClC;AAEJ,QAAI,YAAY;AAChB,QAAI,SAAS;AACb,QAAI;AACJ,UAAM,iBAAiB,oBAAI,IAAY;AAEvC,eAAW,OAAO,eAAe;AAC/B,UAAI;AACF,cAAM,QAAQ;AAAA,UACZ,QAAQ,KAAK;AAAA,YACX,OAAO,IAAI;AAAA,YACX,eAAe;AAAA,YACf,WAAW;AAAA,UACb,CAAC;AAAA,QACH;AACA;AACA,oBAAY,IAAI;AAChB,uBAAe,IAAI,IAAI,EAAE;AACzB,gBAAQ,IAAI,UAAU,IAAI,SAAS,IAAI,EAAE,YAAY;AAAA,MACvD,SAAS,OAAO;AACd,gBAAQ,MAAM,UAAU,IAAI,SAAS,IAAI,EAAE,YAAY,KAAK;AAC5D;AACA,oBAAY,IAAI;AAChB,uBAAe,IAAI,IAAI,EAAE;AAAA,MAC3B;AAAA,IACF;AAGA,QAAI,eAAe,OAAO,GAAG;AAC3B,YAAM,cAAc,KAAK,OAAO,CAAC,MAAM,CAAC,eAAe,IAAI,EAAE,EAAE,CAAC;AAChE,iBAAW,WAAW;AAGtB,YAAM,WAAuB;AAAA,QAC3B,iBAAiB;AAAA,QACjB,iBAAiB,MAAM,kBAAkB,KAAK;AAAA,QAC9C,cAAc,MAAM,eAAe,KAAK;AAAA,MAC1C;AACA,iBAAW,QAAQ;AAAA,IACrB;AAEA,WAAO,EAAE,WAAW,QAAQ,UAAU;AAAA,EACxC;AAKA,iBAAe,iBAAgC;AAE7C,QAAI,gBAAgB,CAAC,cAAe;AAEpC,mBAAe;AACf,QAAI;AACF,YAAM,aAAa,aAAa;AAAA,IAClC,SAAS,OAAO;AACd,cAAQ,MAAM,UAAU,IAAI,oBAAoB,KAAK;AAAA,IACvD,UAAE;AACA,qBAAe;AAAA,IACjB;AAAA,EACF;AAEA,iBAAe,QACb,SACAA,UACwB;AAExB,QAAIA,UAAS,OAAO;AAClB,aAAO,aAAa,SAASA,QAAO;AAAA,IACtC;AAGA,oBAAgB;AAGhB,UAAM,aAAa,OAAO;AAG1B,mBAAe,YAAY,MAAM;AAC/B,qBAAe,EAAE,MAAM,CAAC,QAAQ;AAC9B,gBAAQ,MAAM,UAAU,IAAI,uBAAuB,GAAG;AAAA,MACxD,CAAC;AAAA,IACH,GAAG,YAAY;AAEf,YAAQ,IAAI,UAAU,IAAI,qCAAqC,WAAW,EAAE;AAG5E,WAAO,EAAE,WAAW,IAAI,QAAQ,IAAI,WAAW,OAAU;AAAA,EAC3D;AAEA,iBAAe,QAAsC;AACnD,UAAM,OAAO,UAAU;AACvB,UAAM,UAAU,KAAK;AACrB,eAAW,CAAC,CAAC;AAEb,UAAM,QAAQ,UAAU;AACxB,eAAW;AAAA,MACT,gBAAgB,MAAM;AAAA,MACtB,aAAa,MAAM;AAAA,IACrB,CAAC;AACD,WAAO,EAAE,QAAQ;AAAA,EACnB;AAEA,iBAAe,QAAuB;AAEpC,QAAI,cAAc;AAChB,oBAAc,YAAY;AAC1B,qBAAe;AAAA,IACjB;AACA,oBAAgB;AAGhB,UAAM,mBAAmB;AACzB,UAAM,YAAY,KAAK,IAAI;AAE3B,WAAO,cAAc;AACnB,UAAI,KAAK,IAAI,IAAI,YAAY,kBAAkB;AAC7C,gBAAQ,KAAK,UAAU,IAAI,yBAAyB,gBAAgB,YAAY;AAChF;AAAA,MACF;AACA,YAAM,IAAI,QAAQ,CAAC,YAAY,WAAW,SAAS,EAAE,CAAC;AAAA,IACxD;AAAA,EACF;AAEA,iBAAe,eAKZ;AACD,UAAM,QAAQ,UAAU;AACxB,UAAM,OAAO,UAAU;AAEvB,WAAO;AAAA,MACL,SAAS,KAAK;AAAA;AAAA,MACd,QAAQ;AAAA;AAAA,MACR,WAAW,MAAM,kBAAkB;AAAA,MACnC,QAAQ,MAAM,eAAe;AAAA,IAC/B;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,IACA,UAAU;AAAA,IACV;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;",
6
+ "names": ["options"]
7
+ }
package/dist/types.js ADDED
@@ -0,0 +1 @@
1
+ //# sourceMappingURL=types.js.map
@@ -0,0 +1,7 @@
1
+ {
2
+ "version": 3,
3
+ "sources": [],
4
+ "sourcesContent": [],
5
+ "mappings": "",
6
+ "names": []
7
+ }
@@ -0,0 +1,41 @@
1
+ const workers = /* @__PURE__ */ new Map();
2
+ function registerWorker(worker) {
3
+ if (workers.has(worker.id)) {
4
+ console.warn(`[worker-registry] Worker "${worker.id}" already registered, overwriting`);
5
+ }
6
+ workers.set(worker.id, worker);
7
+ }
8
+ function registerModuleWorkers(list) {
9
+ for (const worker of list) {
10
+ registerWorker(worker);
11
+ }
12
+ }
13
+ function getWorkers() {
14
+ return Array.from(workers.values());
15
+ }
16
+ function getWorkersByQueue(queue) {
17
+ return Array.from(workers.values()).filter((w) => w.queue === queue);
18
+ }
19
+ function getWorker(id) {
20
+ return workers.get(id);
21
+ }
22
+ function getRegisteredQueues() {
23
+ const queues = /* @__PURE__ */ new Set();
24
+ for (const worker of workers.values()) {
25
+ queues.add(worker.queue);
26
+ }
27
+ return Array.from(queues);
28
+ }
29
+ function clearWorkers() {
30
+ workers.clear();
31
+ }
32
+ export {
33
+ clearWorkers,
34
+ getRegisteredQueues,
35
+ getWorker,
36
+ getWorkers,
37
+ getWorkersByQueue,
38
+ registerModuleWorkers,
39
+ registerWorker
40
+ };
41
+ //# sourceMappingURL=registry.js.map
@@ -0,0 +1,7 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../../src/worker/registry.ts"],
4
+ "sourcesContent": ["/**\n * Worker Registry\n *\n * Provides registration and lookup for auto-discovered queue workers.\n * Workers are registered during bootstrap and accessed by the CLI worker command.\n */\n\nimport type { WorkerDescriptor } from '../types'\n\nconst workers: Map<string, WorkerDescriptor> = new Map()\n\n/**\n * Register a single worker.\n * @param worker - The worker descriptor to register\n */\nexport function registerWorker(worker: WorkerDescriptor): void {\n if (workers.has(worker.id)) {\n console.warn(`[worker-registry] Worker \"${worker.id}\" already registered, overwriting`)\n }\n workers.set(worker.id, worker)\n}\n\n/**\n * Register multiple workers at once (typically from module discovery).\n * @param list - Array of worker descriptors to register\n */\nexport function registerModuleWorkers(list: WorkerDescriptor[]): void {\n for (const worker of list) {\n registerWorker(worker)\n }\n}\n\n/**\n * Get all registered workers.\n * @returns Array of all worker descriptors\n */\nexport function getWorkers(): WorkerDescriptor[] {\n return Array.from(workers.values())\n}\n\n/**\n * Get workers registered for a specific queue.\n * @param queue - The queue name to filter by\n * @returns Array of workers for the specified queue\n */\nexport function getWorkersByQueue(queue: string): WorkerDescriptor[] {\n return Array.from(workers.values()).filter((w) => w.queue === queue)\n}\n\n/**\n * Get a specific worker by ID.\n * @param id - The worker ID to look up\n * @returns The worker descriptor if found, undefined otherwise\n */\nexport function getWorker(id: string): WorkerDescriptor | undefined {\n return workers.get(id)\n}\n\n/**\n * Get all unique queue names that have registered workers.\n * @returns Array of queue names\n */\nexport function getRegisteredQueues(): string[] {\n const queues = new Set<string>()\n for (const worker of workers.values()) {\n queues.add(worker.queue)\n }\n return Array.from(queues)\n}\n\n/**\n * Clear all registered workers (useful for testing).\n */\nexport function clearWorkers(): void {\n workers.clear()\n}\n"],
5
+ "mappings": "AASA,MAAM,UAAyC,oBAAI,IAAI;AAMhD,SAAS,eAAe,QAAgC;AAC7D,MAAI,QAAQ,IAAI,OAAO,EAAE,GAAG;AAC1B,YAAQ,KAAK,6BAA6B,OAAO,EAAE,mCAAmC;AAAA,EACxF;AACA,UAAQ,IAAI,OAAO,IAAI,MAAM;AAC/B;AAMO,SAAS,sBAAsB,MAAgC;AACpE,aAAW,UAAU,MAAM;AACzB,mBAAe,MAAM;AAAA,EACvB;AACF;AAMO,SAAS,aAAiC;AAC/C,SAAO,MAAM,KAAK,QAAQ,OAAO,CAAC;AACpC;AAOO,SAAS,kBAAkB,OAAmC;AACnE,SAAO,MAAM,KAAK,QAAQ,OAAO,CAAC,EAAE,OAAO,CAAC,MAAM,EAAE,UAAU,KAAK;AACrE;AAOO,SAAS,UAAU,IAA0C;AAClE,SAAO,QAAQ,IAAI,EAAE;AACvB;AAMO,SAAS,sBAAgC;AAC9C,QAAM,SAAS,oBAAI,IAAY;AAC/B,aAAW,UAAU,QAAQ,OAAO,GAAG;AACrC,WAAO,IAAI,OAAO,KAAK;AAAA,EACzB;AACA,SAAO,MAAM,KAAK,MAAM;AAC1B;AAKO,SAAS,eAAqB;AACnC,UAAQ,MAAM;AAChB;",
6
+ "names": []
7
+ }
@@ -0,0 +1,57 @@
1
+ import { createQueue } from "../factory.js";
2
+ async function runWorker(options) {
3
+ const {
4
+ queueName,
5
+ handler,
6
+ connection,
7
+ concurrency = 1,
8
+ gracefulShutdown = true,
9
+ background = false,
10
+ strategy: strategyOption
11
+ } = options;
12
+ const strategy = strategyOption ?? (process.env.QUEUE_STRATEGY === "async" ? "async" : "local");
13
+ console.log(`[worker] Starting worker for queue "${queueName}" (strategy: ${strategy})...`);
14
+ const queue = createQueue(queueName, strategy, {
15
+ connection,
16
+ concurrency
17
+ });
18
+ if (gracefulShutdown) {
19
+ const shutdown = async (signal) => {
20
+ console.log(`[worker] Received ${signal}, shutting down gracefully...`);
21
+ try {
22
+ await queue.close();
23
+ console.log("[worker] Worker closed successfully");
24
+ process.exit(0);
25
+ } catch (error) {
26
+ console.error("[worker] Error during shutdown:", error);
27
+ process.exit(1);
28
+ }
29
+ };
30
+ process.on("SIGTERM", () => shutdown("SIGTERM"));
31
+ process.on("SIGINT", () => shutdown("SIGINT"));
32
+ }
33
+ await queue.process(handler);
34
+ console.log(`[worker] Worker running with concurrency ${concurrency}`);
35
+ if (background) {
36
+ return;
37
+ }
38
+ console.log("[worker] Press Ctrl+C to stop");
39
+ await new Promise(() => {
40
+ });
41
+ }
42
+ function createRoutedHandler(handlers) {
43
+ return async (job, ctx) => {
44
+ const type = job.payload.type;
45
+ const handler = handlers[type];
46
+ if (!handler) {
47
+ console.warn(`[worker] No handler registered for job type "${type}"`);
48
+ return;
49
+ }
50
+ await handler(job, ctx);
51
+ };
52
+ }
53
+ export {
54
+ createRoutedHandler,
55
+ runWorker
56
+ };
57
+ //# sourceMappingURL=runner.js.map
@@ -0,0 +1,7 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../../src/worker/runner.ts"],
4
+ "sourcesContent": ["import { createQueue } from '../factory'\nimport type { JobHandler, AsyncQueueOptions, QueueStrategyType } from '../types'\n\n/**\n * Options for running a queue worker.\n */\nexport type WorkerRunnerOptions<T = unknown> = {\n /** Name of the queue to process */\n queueName: string\n /** Handler function to process each job */\n handler: JobHandler<T>\n /** Redis connection options (only used for async strategy) */\n connection?: AsyncQueueOptions['connection']\n /** Number of concurrent jobs to process */\n concurrency?: number\n /** Whether to set up graceful shutdown handlers */\n gracefulShutdown?: boolean\n /** If true, don't block - return immediately after starting processing (for multi-queue mode) */\n background?: boolean\n /** Queue strategy to use. Defaults to QUEUE_STRATEGY env var or 'local' */\n strategy?: QueueStrategyType\n}\n\n/**\n * Runs a queue worker that processes jobs continuously.\n *\n * This function:\n * 1. Creates an async queue instance\n * 2. Starts a BullMQ worker\n * 3. Sets up graceful shutdown on SIGTERM/SIGINT\n * 4. Keeps the process running until shutdown\n *\n * @template T - The job payload type\n * @param options - Worker configuration\n *\n * @example\n * ```typescript\n * import { runWorker } from '@open-mercato/queue/worker'\n *\n * await runWorker({\n * queueName: 'events',\n * handler: async (job, ctx) => {\n * console.log(`Processing ${ctx.jobId}:`, job.payload)\n * },\n * connection: { url: process.env.REDIS_URL },\n * concurrency: 5,\n * })\n * ```\n */\nexport async function runWorker<T = unknown>(\n options: WorkerRunnerOptions<T>\n): Promise<void> {\n const {\n queueName,\n handler,\n connection,\n concurrency = 1,\n gracefulShutdown = true,\n background = false,\n strategy: strategyOption,\n } = options\n\n // Determine queue strategy from option, env var, or default to 'local'\n const strategy: QueueStrategyType = strategyOption\n ?? (process.env.QUEUE_STRATEGY === 'async' ? 'async' : 'local')\n\n console.log(`[worker] Starting worker for queue \"${queueName}\" (strategy: ${strategy})...`)\n\n const queue = createQueue<T>(queueName, strategy, {\n connection,\n concurrency,\n })\n\n // Set up graceful shutdown\n if (gracefulShutdown) {\n const shutdown = async (signal: string) => {\n console.log(`[worker] Received ${signal}, shutting down gracefully...`)\n try {\n await queue.close()\n console.log('[worker] Worker closed successfully')\n process.exit(0)\n } catch (error) {\n console.error('[worker] Error during shutdown:', error)\n process.exit(1)\n }\n }\n\n process.on('SIGTERM', () => shutdown('SIGTERM'))\n process.on('SIGINT', () => shutdown('SIGINT'))\n }\n\n // Start processing\n await queue.process(handler)\n\n console.log(`[worker] Worker running with concurrency ${concurrency}`)\n\n if (background) {\n // Return immediately for multi-queue mode\n return\n }\n\n console.log('[worker] Press Ctrl+C to stop')\n\n // Keep the process alive (single-queue mode)\n await new Promise(() => {\n // This promise never resolves, keeping the worker running\n })\n}\n\n/**\n * Creates a worker handler that routes jobs to specific handlers based on job type.\n *\n * @template T - Base job payload type (must include a 'type' field)\n * @param handlers - Map of job types to their handlers\n *\n * @example\n * ```typescript\n * const handler = createRoutedHandler({\n * 'user.created': async (job) => { ... },\n * 'order.placed': async (job) => { ... },\n * })\n *\n * await runWorker({ queueName: 'events', handler })\n * ```\n */\nexport function createRoutedHandler<T extends { type: string }>(\n handlers: Record<string, JobHandler<T>>\n): JobHandler<T> {\n return async (job, ctx) => {\n const type = job.payload.type\n const handler = handlers[type]\n\n if (!handler) {\n console.warn(`[worker] No handler registered for job type \"${type}\"`)\n return\n }\n\n await handler(job, ctx)\n }\n}\n"],
5
+ "mappings": "AAAA,SAAS,mBAAmB;AAiD5B,eAAsB,UACpB,SACe;AACf,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA,cAAc;AAAA,IACd,mBAAmB;AAAA,IACnB,aAAa;AAAA,IACb,UAAU;AAAA,EACZ,IAAI;AAGJ,QAAM,WAA8B,mBAC9B,QAAQ,IAAI,mBAAmB,UAAU,UAAU;AAEzD,UAAQ,IAAI,uCAAuC,SAAS,gBAAgB,QAAQ,MAAM;AAE1F,QAAM,QAAQ,YAAe,WAAW,UAAU;AAAA,IAChD;AAAA,IACA;AAAA,EACF,CAAC;AAGD,MAAI,kBAAkB;AACpB,UAAM,WAAW,OAAO,WAAmB;AACzC,cAAQ,IAAI,qBAAqB,MAAM,+BAA+B;AACtE,UAAI;AACF,cAAM,MAAM,MAAM;AAClB,gBAAQ,IAAI,qCAAqC;AACjD,gBAAQ,KAAK,CAAC;AAAA,MAChB,SAAS,OAAO;AACd,gBAAQ,MAAM,mCAAmC,KAAK;AACtD,gBAAQ,KAAK,CAAC;AAAA,MAChB;AAAA,IACF;AAEA,YAAQ,GAAG,WAAW,MAAM,SAAS,SAAS,CAAC;AAC/C,YAAQ,GAAG,UAAU,MAAM,SAAS,QAAQ,CAAC;AAAA,EAC/C;AAGA,QAAM,MAAM,QAAQ,OAAO;AAE3B,UAAQ,IAAI,4CAA4C,WAAW,EAAE;AAErE,MAAI,YAAY;AAEd;AAAA,EACF;AAEA,UAAQ,IAAI,+BAA+B;AAG3C,QAAM,IAAI,QAAQ,MAAM;AAAA,EAExB,CAAC;AACH;AAkBO,SAAS,oBACd,UACe;AACf,SAAO,OAAO,KAAK,QAAQ;AACzB,UAAM,OAAO,IAAI,QAAQ;AACzB,UAAM,UAAU,SAAS,IAAI;AAE7B,QAAI,CAAC,SAAS;AACZ,cAAQ,KAAK,gDAAgD,IAAI,GAAG;AACpE;AAAA,IACF;AAEA,UAAM,QAAQ,KAAK,GAAG;AAAA,EACxB;AACF;",
6
+ "names": []
7
+ }
@@ -0,0 +1,19 @@
1
+ /** @type {import('jest').Config} */
2
+ module.exports = {
3
+ preset: 'ts-jest',
4
+ testEnvironment: 'node',
5
+ rootDir: '.',
6
+ moduleFileExtensions: ['ts', 'tsx', 'js', 'jsx', 'json'],
7
+ transform: {
8
+ '^.+\\.(t|j)sx?$': [
9
+ 'ts-jest',
10
+ {
11
+ tsconfig: {
12
+ jsx: 'react-jsx',
13
+ },
14
+ },
15
+ ],
16
+ },
17
+ testMatch: ['<rootDir>/src/**/__tests__/**/*.test.(ts|tsx)'],
18
+ passWithNoTests: true,
19
+ }