@open-mercato/queue 0.4.2-canary-c02407ff85
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/build.mjs +61 -0
- package/dist/factory.js +12 -0
- package/dist/factory.js.map +7 -0
- package/dist/index.js +10 -0
- package/dist/index.js.map +7 -0
- package/dist/strategies/async.js +125 -0
- package/dist/strategies/async.js.map +7 -0
- package/dist/strategies/local.js +199 -0
- package/dist/strategies/local.js.map +7 -0
- package/dist/types.js +1 -0
- package/dist/types.js.map +7 -0
- package/dist/worker/registry.js +41 -0
- package/dist/worker/registry.js.map +7 -0
- package/dist/worker/runner.js +57 -0
- package/dist/worker/runner.js.map +7 -0
- package/jest.config.cjs +19 -0
- package/package.json +53 -0
- package/src/__tests__/local.strategy.test.ts +192 -0
- package/src/factory.ts +55 -0
- package/src/index.ts +28 -0
- package/src/strategies/async.ts +209 -0
- package/src/strategies/local.ts +303 -0
- package/src/types.ts +238 -0
- package/src/worker/__tests__/registry.test.ts +176 -0
- package/src/worker/registry.ts +76 -0
- package/src/worker/runner.ts +140 -0
- package/tsconfig.json +9 -0
- package/watch.mjs +6 -0
package/package.json
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@open-mercato/queue",
|
|
3
|
+
"version": "0.4.2-canary-c02407ff85",
|
|
4
|
+
"description": "Multi-strategy job queue with local and BullMQ support",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"main": "./dist/index.js",
|
|
7
|
+
"scripts": {
|
|
8
|
+
"build": "node build.mjs",
|
|
9
|
+
"watch": "node watch.mjs",
|
|
10
|
+
"test": "jest --config jest.config.cjs",
|
|
11
|
+
"typecheck": "tsc --noEmit"
|
|
12
|
+
},
|
|
13
|
+
"exports": {
|
|
14
|
+
".": {
|
|
15
|
+
"types": "./src/index.ts",
|
|
16
|
+
"default": "./dist/index.js"
|
|
17
|
+
},
|
|
18
|
+
"./worker": {
|
|
19
|
+
"types": "./src/worker/runner.ts",
|
|
20
|
+
"default": "./dist/worker/runner.js"
|
|
21
|
+
},
|
|
22
|
+
"./*": {
|
|
23
|
+
"types": [
|
|
24
|
+
"./src/*.ts"
|
|
25
|
+
],
|
|
26
|
+
"default": "./dist/*.js"
|
|
27
|
+
},
|
|
28
|
+
"./*/*": {
|
|
29
|
+
"types": [
|
|
30
|
+
"./src/*/*.ts"
|
|
31
|
+
],
|
|
32
|
+
"default": "./dist/*/*.js"
|
|
33
|
+
}
|
|
34
|
+
},
|
|
35
|
+
"peerDependencies": {
|
|
36
|
+
"bullmq": "^5.0.0"
|
|
37
|
+
},
|
|
38
|
+
"peerDependenciesMeta": {
|
|
39
|
+
"bullmq": {
|
|
40
|
+
"optional": true
|
|
41
|
+
}
|
|
42
|
+
},
|
|
43
|
+
"devDependencies": {
|
|
44
|
+
"@types/jest": "^30.0.0",
|
|
45
|
+
"@types/node": "^20.0.0",
|
|
46
|
+
"jest": "^30.2.0",
|
|
47
|
+
"ts-jest": "^29.4.6"
|
|
48
|
+
},
|
|
49
|
+
"publishConfig": {
|
|
50
|
+
"access": "public"
|
|
51
|
+
},
|
|
52
|
+
"stableVersion": "0.4.1"
|
|
53
|
+
}
|
|
@@ -0,0 +1,192 @@
|
|
|
1
|
+
import fs from 'node:fs'
|
|
2
|
+
import os from 'node:os'
|
|
3
|
+
import path from 'node:path'
|
|
4
|
+
|
|
5
|
+
import { createQueue } from '../factory'
|
|
6
|
+
import type { QueuedJob } from '../types'
|
|
7
|
+
|
|
8
|
+
function readJson(p: string) { return JSON.parse(fs.readFileSync(p, 'utf8')) }
|
|
9
|
+
|
|
10
|
+
describe('Queue - local strategy', () => {
|
|
11
|
+
const origCwd = process.cwd()
|
|
12
|
+
let tmp: string
|
|
13
|
+
|
|
14
|
+
beforeEach(() => {
|
|
15
|
+
tmp = fs.mkdtempSync(path.join(os.tmpdir(), 'queue-test-'))
|
|
16
|
+
process.chdir(tmp)
|
|
17
|
+
})
|
|
18
|
+
|
|
19
|
+
afterEach(() => {
|
|
20
|
+
process.chdir(origCwd)
|
|
21
|
+
try { fs.rmSync(tmp, { recursive: true, force: true }) } catch {}
|
|
22
|
+
})
|
|
23
|
+
|
|
24
|
+
test('enqueue adds job to queue file', async () => {
|
|
25
|
+
const queue = createQueue<{ value: number }>('test-queue', 'local')
|
|
26
|
+
const queuePath = path.join('.queue', 'test-queue', 'queue.json')
|
|
27
|
+
|
|
28
|
+
const jobId = await queue.enqueue({ value: 42 })
|
|
29
|
+
|
|
30
|
+
expect(typeof jobId).toBe('string')
|
|
31
|
+
expect(jobId.length).toBeGreaterThan(0)
|
|
32
|
+
|
|
33
|
+
const jobs = readJson(queuePath)
|
|
34
|
+
expect(jobs.length).toBe(1)
|
|
35
|
+
expect(jobs[0].payload).toEqual({ value: 42 })
|
|
36
|
+
expect(jobs[0].id).toBe(jobId)
|
|
37
|
+
|
|
38
|
+
await queue.close()
|
|
39
|
+
})
|
|
40
|
+
|
|
41
|
+
test('process executes handler for each job', async () => {
|
|
42
|
+
const queue = createQueue<{ value: number }>('test-queue', 'local')
|
|
43
|
+
const processed: QueuedJob<{ value: number }>[] = []
|
|
44
|
+
|
|
45
|
+
await queue.enqueue({ value: 1 })
|
|
46
|
+
await queue.enqueue({ value: 2 })
|
|
47
|
+
await queue.enqueue({ value: 3 })
|
|
48
|
+
|
|
49
|
+
// Use limit to trigger batch mode (without limit, enters continuous polling mode)
|
|
50
|
+
const result = await queue.process((job) => {
|
|
51
|
+
processed.push(job)
|
|
52
|
+
}, { limit: 10 })
|
|
53
|
+
|
|
54
|
+
expect(result).toBeDefined()
|
|
55
|
+
expect(result!.processed).toBe(3)
|
|
56
|
+
expect(result!.failed).toBe(0)
|
|
57
|
+
expect(processed.length).toBe(3)
|
|
58
|
+
expect(processed.map(j => j.payload.value)).toEqual([1, 2, 3])
|
|
59
|
+
|
|
60
|
+
await queue.close()
|
|
61
|
+
})
|
|
62
|
+
|
|
63
|
+
test('process with limit only processes specified number of jobs', async () => {
|
|
64
|
+
const queue = createQueue<{ value: number }>('test-queue', 'local')
|
|
65
|
+
const processed: number[] = []
|
|
66
|
+
|
|
67
|
+
await queue.enqueue({ value: 1 })
|
|
68
|
+
await queue.enqueue({ value: 2 })
|
|
69
|
+
await queue.enqueue({ value: 3 })
|
|
70
|
+
|
|
71
|
+
const result = await queue.process(
|
|
72
|
+
(job) => { processed.push(job.payload.value) },
|
|
73
|
+
{ limit: 2 }
|
|
74
|
+
)
|
|
75
|
+
|
|
76
|
+
expect(result!.processed).toBe(2)
|
|
77
|
+
expect(processed).toEqual([1, 2])
|
|
78
|
+
|
|
79
|
+
// Process remaining (use limit to stay in batch mode)
|
|
80
|
+
const result2 = await queue.process(
|
|
81
|
+
(job) => { processed.push(job.payload.value) },
|
|
82
|
+
{ limit: 10 }
|
|
83
|
+
)
|
|
84
|
+
|
|
85
|
+
expect(result2!.processed).toBe(1)
|
|
86
|
+
expect(processed).toEqual([1, 2, 3])
|
|
87
|
+
|
|
88
|
+
await queue.close()
|
|
89
|
+
})
|
|
90
|
+
|
|
91
|
+
test('clear removes all jobs from queue', async () => {
|
|
92
|
+
const queue = createQueue<{ value: number }>('test-queue', 'local')
|
|
93
|
+
const queuePath = path.join('.queue', 'test-queue', 'queue.json')
|
|
94
|
+
|
|
95
|
+
await queue.enqueue({ value: 1 })
|
|
96
|
+
await queue.enqueue({ value: 2 })
|
|
97
|
+
|
|
98
|
+
const before = readJson(queuePath)
|
|
99
|
+
expect(before.length).toBe(2)
|
|
100
|
+
|
|
101
|
+
const result = await queue.clear()
|
|
102
|
+
expect(result.removed).toBe(2)
|
|
103
|
+
|
|
104
|
+
const after = readJson(queuePath)
|
|
105
|
+
expect(after.length).toBe(0)
|
|
106
|
+
|
|
107
|
+
await queue.close()
|
|
108
|
+
})
|
|
109
|
+
|
|
110
|
+
test('getJobCounts returns correct counts', async () => {
|
|
111
|
+
const queue = createQueue<{ value: number }>('test-queue', 'local')
|
|
112
|
+
|
|
113
|
+
await queue.enqueue({ value: 1 })
|
|
114
|
+
await queue.enqueue({ value: 2 })
|
|
115
|
+
await queue.enqueue({ value: 3 })
|
|
116
|
+
|
|
117
|
+
const counts = await queue.getJobCounts()
|
|
118
|
+
expect(counts.waiting).toBe(3)
|
|
119
|
+
expect(counts.completed).toBe(0)
|
|
120
|
+
|
|
121
|
+
await queue.process(() => {}, { limit: 1 })
|
|
122
|
+
|
|
123
|
+
const counts2 = await queue.getJobCounts()
|
|
124
|
+
expect(counts2.waiting).toBe(2)
|
|
125
|
+
expect(counts2.completed).toBe(1)
|
|
126
|
+
|
|
127
|
+
await queue.close()
|
|
128
|
+
})
|
|
129
|
+
|
|
130
|
+
test('queue name is used for directory', async () => {
|
|
131
|
+
const queue = createQueue('my-custom-queue', 'local')
|
|
132
|
+
const queueDir = path.join('.queue', 'my-custom-queue')
|
|
133
|
+
|
|
134
|
+
await queue.enqueue({ data: 'test' })
|
|
135
|
+
|
|
136
|
+
expect(fs.existsSync(queueDir)).toBe(true)
|
|
137
|
+
expect(fs.existsSync(path.join(queueDir, 'queue.json'))).toBe(true)
|
|
138
|
+
expect(fs.existsSync(path.join(queueDir, 'state.json'))).toBe(true)
|
|
139
|
+
|
|
140
|
+
await queue.close()
|
|
141
|
+
})
|
|
142
|
+
|
|
143
|
+
test('custom baseDir option is respected', async () => {
|
|
144
|
+
const customDir = path.join(tmp, 'custom-queue-dir')
|
|
145
|
+
const queue = createQueue('test', 'local', { baseDir: customDir })
|
|
146
|
+
|
|
147
|
+
await queue.enqueue({ data: 'test' })
|
|
148
|
+
|
|
149
|
+
expect(fs.existsSync(path.join(customDir, 'test', 'queue.json'))).toBe(true)
|
|
150
|
+
|
|
151
|
+
await queue.close()
|
|
152
|
+
})
|
|
153
|
+
|
|
154
|
+
test('handler errors are caught and counted as failures', async () => {
|
|
155
|
+
const queue = createQueue<{ shouldFail: boolean }>('test-queue', 'local')
|
|
156
|
+
|
|
157
|
+
await queue.enqueue({ shouldFail: false })
|
|
158
|
+
await queue.enqueue({ shouldFail: true })
|
|
159
|
+
await queue.enqueue({ shouldFail: false })
|
|
160
|
+
|
|
161
|
+
// Use limit to trigger batch mode (without limit, enters continuous polling mode)
|
|
162
|
+
const result = await queue.process((job) => {
|
|
163
|
+
if (job.payload.shouldFail) {
|
|
164
|
+
throw new Error('Intentional test error')
|
|
165
|
+
}
|
|
166
|
+
}, { limit: 10 })
|
|
167
|
+
|
|
168
|
+
expect(result!.processed).toBe(2)
|
|
169
|
+
expect(result!.failed).toBe(1)
|
|
170
|
+
|
|
171
|
+
await queue.close()
|
|
172
|
+
})
|
|
173
|
+
|
|
174
|
+
test('job context contains correct information', async () => {
|
|
175
|
+
const queue = createQueue<{ value: number }>('context-test', 'local')
|
|
176
|
+
let capturedContext: any = null
|
|
177
|
+
|
|
178
|
+
const jobId = await queue.enqueue({ value: 42 })
|
|
179
|
+
|
|
180
|
+
// Use limit to trigger batch mode
|
|
181
|
+
await queue.process((job, ctx) => {
|
|
182
|
+
capturedContext = ctx
|
|
183
|
+
}, { limit: 10 })
|
|
184
|
+
|
|
185
|
+
expect(capturedContext).not.toBeNull()
|
|
186
|
+
expect(capturedContext.jobId).toBe(jobId)
|
|
187
|
+
expect(capturedContext.attemptNumber).toBe(1)
|
|
188
|
+
expect(capturedContext.queueName).toBe('context-test')
|
|
189
|
+
|
|
190
|
+
await queue.close()
|
|
191
|
+
})
|
|
192
|
+
})
|
package/src/factory.ts
ADDED
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
import type { Queue, LocalQueueOptions, AsyncQueueOptions } from './types'
|
|
2
|
+
import { createLocalQueue } from './strategies/local'
|
|
3
|
+
import { createAsyncQueue } from './strategies/async'
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Creates a queue instance with the specified strategy.
|
|
7
|
+
*
|
|
8
|
+
* @template T - The payload type for jobs in this queue
|
|
9
|
+
* @param name - Unique name for the queue
|
|
10
|
+
* @param strategy - Queue strategy: 'local' for file-based, 'async' for BullMQ
|
|
11
|
+
* @param options - Strategy-specific options
|
|
12
|
+
* @returns A Queue instance
|
|
13
|
+
*
|
|
14
|
+
* @example
|
|
15
|
+
* ```typescript
|
|
16
|
+
* // Local file-based queue
|
|
17
|
+
* const localQueue = createQueue<MyJobData>('my-queue', 'local')
|
|
18
|
+
*
|
|
19
|
+
* // BullMQ-based queue
|
|
20
|
+
* const asyncQueue = createQueue<MyJobData>('my-queue', 'async', {
|
|
21
|
+
* connection: { url: 'redis://localhost:6379' },
|
|
22
|
+
* concurrency: 5
|
|
23
|
+
* })
|
|
24
|
+
* ```
|
|
25
|
+
*/
|
|
26
|
+
export function createQueue<T = unknown>(
|
|
27
|
+
name: string,
|
|
28
|
+
strategy: 'local',
|
|
29
|
+
options?: LocalQueueOptions
|
|
30
|
+
): Queue<T>
|
|
31
|
+
|
|
32
|
+
export function createQueue<T = unknown>(
|
|
33
|
+
name: string,
|
|
34
|
+
strategy: 'async',
|
|
35
|
+
options?: AsyncQueueOptions
|
|
36
|
+
): Queue<T>
|
|
37
|
+
|
|
38
|
+
// General overload for dynamic strategy (union type)
|
|
39
|
+
export function createQueue<T = unknown>(
|
|
40
|
+
name: string,
|
|
41
|
+
strategy: 'local' | 'async',
|
|
42
|
+
options?: LocalQueueOptions | AsyncQueueOptions
|
|
43
|
+
): Queue<T>
|
|
44
|
+
|
|
45
|
+
export function createQueue<T = unknown>(
|
|
46
|
+
name: string,
|
|
47
|
+
strategy: 'local' | 'async',
|
|
48
|
+
options?: LocalQueueOptions | AsyncQueueOptions
|
|
49
|
+
): Queue<T> {
|
|
50
|
+
if (strategy === 'async') {
|
|
51
|
+
return createAsyncQueue<T>(name, options as AsyncQueueOptions)
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
return createLocalQueue<T>(name, options as LocalQueueOptions)
|
|
55
|
+
}
|
package/src/index.ts
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @open-mercato/queue
|
|
3
|
+
*
|
|
4
|
+
* Multi-strategy job queue package supporting local (file-based) and async (BullMQ) strategies.
|
|
5
|
+
*
|
|
6
|
+
* @example
|
|
7
|
+
* ```typescript
|
|
8
|
+
* import { createQueue } from '@open-mercato/queue'
|
|
9
|
+
*
|
|
10
|
+
* // Create a local queue
|
|
11
|
+
* const queue = createQueue<{ userId: string }>('my-queue', 'local')
|
|
12
|
+
*
|
|
13
|
+
* // Enqueue a job
|
|
14
|
+
* await queue.enqueue({ userId: '123' })
|
|
15
|
+
*
|
|
16
|
+
* // Process jobs
|
|
17
|
+
* await queue.process(async (job, ctx) => {
|
|
18
|
+
* console.log(`Processing job ${ctx.jobId}:`, job.payload)
|
|
19
|
+
* })
|
|
20
|
+
* ```
|
|
21
|
+
*/
|
|
22
|
+
|
|
23
|
+
export * from './types'
|
|
24
|
+
export { createQueue } from './factory'
|
|
25
|
+
|
|
26
|
+
// Worker utilities
|
|
27
|
+
export * from './worker/registry'
|
|
28
|
+
export { runWorker, createRoutedHandler } from './worker/runner'
|
|
@@ -0,0 +1,209 @@
|
|
|
1
|
+
import type { Queue, QueuedJob, JobHandler, AsyncQueueOptions, ProcessResult } from '../types'
|
|
2
|
+
|
|
3
|
+
// BullMQ interface types - we define the shape we use to maintain type safety
|
|
4
|
+
// while keeping bullmq as an optional peer dependency
|
|
5
|
+
type ConnectionOptions = { host?: string; port?: number; password?: string } | string
|
|
6
|
+
|
|
7
|
+
interface BullQueueInterface<T> {
|
|
8
|
+
add: (name: string, data: T, opts?: { removeOnComplete?: boolean; removeOnFail?: number }) => Promise<{ id?: string }>
|
|
9
|
+
obliterate: (opts?: { force?: boolean }) => Promise<void>
|
|
10
|
+
close: () => Promise<void>
|
|
11
|
+
getJobCounts: (...states: string[]) => Promise<Record<string, number>>
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
interface BullWorkerInterface {
|
|
15
|
+
on: (event: string, handler: (...args: unknown[]) => void) => void
|
|
16
|
+
close: () => Promise<void>
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
interface BullMQModule {
|
|
20
|
+
Queue: new <T>(name: string, opts: { connection: ConnectionOptions }) => BullQueueInterface<T>
|
|
21
|
+
Worker: new <T>(
|
|
22
|
+
name: string,
|
|
23
|
+
processor: (job: { id?: string; data: T; attemptsMade: number }) => Promise<void>,
|
|
24
|
+
opts: { connection: ConnectionOptions; concurrency: number }
|
|
25
|
+
) => BullWorkerInterface
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
/**
|
|
29
|
+
* Resolves Redis connection options from various sources.
|
|
30
|
+
*/
|
|
31
|
+
function resolveConnection(options?: AsyncQueueOptions['connection']): ConnectionOptions {
|
|
32
|
+
// Priority: explicit options > environment variables
|
|
33
|
+
const url = options?.url ?? process.env.REDIS_URL ?? process.env.QUEUE_REDIS_URL
|
|
34
|
+
|
|
35
|
+
if (url) {
|
|
36
|
+
return url
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
if (options?.host) {
|
|
40
|
+
return {
|
|
41
|
+
host: options.host,
|
|
42
|
+
port: options.port ?? 6379,
|
|
43
|
+
password: options.password,
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
// Default to localhost
|
|
48
|
+
return { host: 'localhost', port: 6379 }
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
/**
|
|
52
|
+
* Creates a BullMQ-based async queue.
|
|
53
|
+
*
|
|
54
|
+
* This strategy provides:
|
|
55
|
+
* - Persistent job storage in Redis
|
|
56
|
+
* - Automatic retries with exponential backoff
|
|
57
|
+
* - Concurrent job processing
|
|
58
|
+
* - Job prioritization and scheduling
|
|
59
|
+
*
|
|
60
|
+
* @template T - The payload type for jobs
|
|
61
|
+
* @param name - Queue name
|
|
62
|
+
* @param options - Async queue options
|
|
63
|
+
*/
|
|
64
|
+
export function createAsyncQueue<T = unknown>(
|
|
65
|
+
name: string,
|
|
66
|
+
options?: AsyncQueueOptions
|
|
67
|
+
): Queue<T> {
|
|
68
|
+
const connection = resolveConnection(options?.connection)
|
|
69
|
+
const concurrency = options?.concurrency ?? 1
|
|
70
|
+
|
|
71
|
+
let bullQueue: BullQueueInterface<QueuedJob<T>> | null = null
|
|
72
|
+
let bullWorker: BullWorkerInterface | null = null
|
|
73
|
+
let bullmqModule: BullMQModule | null = null
|
|
74
|
+
|
|
75
|
+
// -------------------------------------------------------------------------
|
|
76
|
+
// Lazy BullMQ initialization
|
|
77
|
+
// -------------------------------------------------------------------------
|
|
78
|
+
|
|
79
|
+
async function getBullMQ(): Promise<BullMQModule> {
|
|
80
|
+
if (!bullmqModule) {
|
|
81
|
+
try {
|
|
82
|
+
bullmqModule = await import('bullmq') as unknown as BullMQModule
|
|
83
|
+
} catch {
|
|
84
|
+
throw new Error(
|
|
85
|
+
'BullMQ is required for async queue strategy. Install it with: npm install bullmq'
|
|
86
|
+
)
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
return bullmqModule
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
async function getQueue(): Promise<BullQueueInterface<QueuedJob<T>>> {
|
|
93
|
+
if (!bullQueue) {
|
|
94
|
+
const { Queue: BullQueueClass } = await getBullMQ()
|
|
95
|
+
bullQueue = new BullQueueClass<QueuedJob<T>>(name, { connection })
|
|
96
|
+
}
|
|
97
|
+
return bullQueue
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
// -------------------------------------------------------------------------
|
|
101
|
+
// Queue Implementation
|
|
102
|
+
// -------------------------------------------------------------------------
|
|
103
|
+
|
|
104
|
+
async function enqueue(data: T): Promise<string> {
|
|
105
|
+
const queue = await getQueue()
|
|
106
|
+
const jobData: QueuedJob<T> = {
|
|
107
|
+
id: crypto.randomUUID(),
|
|
108
|
+
payload: data,
|
|
109
|
+
createdAt: new Date().toISOString(),
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
const job = await queue.add(jobData.id, jobData, {
|
|
113
|
+
removeOnComplete: true,
|
|
114
|
+
removeOnFail: 1000, // Keep last 1000 failed jobs
|
|
115
|
+
})
|
|
116
|
+
|
|
117
|
+
return job.id ?? jobData.id
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
async function process(handler: JobHandler<T>): Promise<ProcessResult> {
|
|
121
|
+
const { Worker } = await getBullMQ()
|
|
122
|
+
|
|
123
|
+
// Create worker that processes jobs
|
|
124
|
+
bullWorker = new Worker<QueuedJob<T>>(
|
|
125
|
+
name,
|
|
126
|
+
async (job) => {
|
|
127
|
+
const jobData = job.data
|
|
128
|
+
await handler(jobData, {
|
|
129
|
+
jobId: job.id ?? jobData.id,
|
|
130
|
+
attemptNumber: job.attemptsMade + 1,
|
|
131
|
+
queueName: name,
|
|
132
|
+
})
|
|
133
|
+
},
|
|
134
|
+
{
|
|
135
|
+
connection,
|
|
136
|
+
concurrency,
|
|
137
|
+
}
|
|
138
|
+
)
|
|
139
|
+
|
|
140
|
+
// Set up event handlers
|
|
141
|
+
bullWorker.on('completed', (job) => {
|
|
142
|
+
const jobWithId = job as { id?: string }
|
|
143
|
+
console.log(`[queue:${name}] Job ${jobWithId.id} completed`)
|
|
144
|
+
})
|
|
145
|
+
|
|
146
|
+
bullWorker.on('failed', (job, err) => {
|
|
147
|
+
const jobWithId = job as { id?: string } | undefined
|
|
148
|
+
const error = err as Error
|
|
149
|
+
console.error(`[queue:${name}] Job ${jobWithId?.id} failed:`, error.message)
|
|
150
|
+
})
|
|
151
|
+
|
|
152
|
+
bullWorker.on('error', (err) => {
|
|
153
|
+
const error = err as Error
|
|
154
|
+
console.error(`[queue:${name}] Worker error:`, error.message)
|
|
155
|
+
})
|
|
156
|
+
|
|
157
|
+
console.log(`[queue:${name}] Worker started with concurrency ${concurrency}`)
|
|
158
|
+
|
|
159
|
+
// For async strategy, return a sentinel result indicating worker mode
|
|
160
|
+
// processed=-1 signals that this is a continuous worker, not a batch process
|
|
161
|
+
return { processed: -1, failed: -1, lastJobId: undefined }
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
async function clear(): Promise<{ removed: number }> {
|
|
165
|
+
const queue = await getQueue()
|
|
166
|
+
|
|
167
|
+
// Obliterate removes all jobs from the queue
|
|
168
|
+
await queue.obliterate({ force: true })
|
|
169
|
+
|
|
170
|
+
return { removed: -1 } // BullMQ obliterate doesn't return count
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
async function close(): Promise<void> {
|
|
174
|
+
if (bullWorker) {
|
|
175
|
+
await bullWorker.close()
|
|
176
|
+
bullWorker = null
|
|
177
|
+
}
|
|
178
|
+
if (bullQueue) {
|
|
179
|
+
await bullQueue.close()
|
|
180
|
+
bullQueue = null
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
async function getJobCounts(): Promise<{
|
|
185
|
+
waiting: number
|
|
186
|
+
active: number
|
|
187
|
+
completed: number
|
|
188
|
+
failed: number
|
|
189
|
+
}> {
|
|
190
|
+
const queue = await getQueue()
|
|
191
|
+
const counts = await queue.getJobCounts('waiting', 'active', 'completed', 'failed')
|
|
192
|
+
return {
|
|
193
|
+
waiting: counts.waiting ?? 0,
|
|
194
|
+
active: counts.active ?? 0,
|
|
195
|
+
completed: counts.completed ?? 0,
|
|
196
|
+
failed: counts.failed ?? 0,
|
|
197
|
+
}
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
return {
|
|
201
|
+
name,
|
|
202
|
+
strategy: 'async',
|
|
203
|
+
enqueue,
|
|
204
|
+
process,
|
|
205
|
+
clear,
|
|
206
|
+
close,
|
|
207
|
+
getJobCounts,
|
|
208
|
+
}
|
|
209
|
+
}
|