@nicnocquee/dataqueue 1.16.0 → 1.17.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/index.test.ts CHANGED
@@ -38,23 +38,23 @@ describe('index integration', () => {
38
38
 
39
39
  it('should add a job and retrieve it', async () => {
40
40
  const jobId = await jobQueue.addJob({
41
- job_type: 'email',
41
+ jobType: 'email',
42
42
  payload: { to: 'test@example.com' },
43
43
  });
44
44
  expect(typeof jobId).toBe('number');
45
45
  const job = await jobQueue.getJob(jobId);
46
46
  expect(job).not.toBeNull();
47
- expect(job?.job_type).toBe('email');
47
+ expect(job?.jobType).toBe('email');
48
48
  expect(job?.payload).toEqual({ to: 'test@example.com' });
49
49
  });
50
50
 
51
51
  it('should get jobs by status', async () => {
52
52
  const jobId1 = await jobQueue.addJob({
53
- job_type: 'email',
53
+ jobType: 'email',
54
54
  payload: { to: 'a@example.com' },
55
55
  });
56
56
  const jobId2 = await jobQueue.addJob({
57
- job_type: 'sms',
57
+ jobType: 'sms',
58
58
  payload: { to: 'b@example.com' },
59
59
  });
60
60
  const jobs = await jobQueue.getJobsByStatus('pending');
@@ -66,7 +66,7 @@ describe('index integration', () => {
66
66
  it('should process a job with a registered handler', async () => {
67
67
  const handler = vi.fn(async (_payload, _signal) => {});
68
68
  const jobId = await jobQueue.addJob({
69
- job_type: 'test',
69
+ jobType: 'test',
70
70
  payload: { foo: 'bar' },
71
71
  });
72
72
  const processor = jobQueue.createProcessor(
@@ -87,7 +87,7 @@ describe('index integration', () => {
87
87
 
88
88
  it('should retry a failed job', async () => {
89
89
  const jobId = await jobQueue.addJob({
90
- job_type: 'email',
90
+ jobType: 'email',
91
91
  payload: { to: 'fail@example.com' },
92
92
  });
93
93
  // Manually mark as failed
@@ -103,7 +103,7 @@ describe('index integration', () => {
103
103
 
104
104
  it('should cleanup old completed jobs', async () => {
105
105
  const jobId = await jobQueue.addJob({
106
- job_type: 'email',
106
+ jobType: 'email',
107
107
  payload: { to: 'cleanup@example.com' },
108
108
  });
109
109
  // Mark as completed
@@ -123,7 +123,7 @@ describe('index integration', () => {
123
123
 
124
124
  it('should cancel a scheduled job', async () => {
125
125
  const jobId = await jobQueue.addJob({
126
- job_type: 'email',
126
+ jobType: 'email',
127
127
  payload: { to: 'cancelme@example.com' },
128
128
  });
129
129
  // Cancel the job
@@ -133,7 +133,7 @@ describe('index integration', () => {
133
133
 
134
134
  // Try to cancel a completed job (should not change status)
135
135
  const jobId2 = await jobQueue.addJob({
136
- job_type: 'email',
136
+ jobType: 'email',
137
137
  payload: { to: 'done@example.com' },
138
138
  });
139
139
  await pool.query(
@@ -148,20 +148,20 @@ describe('index integration', () => {
148
148
  it('should cancel all upcoming jobs via JobQueue API', async () => {
149
149
  // Add three pending jobs
150
150
  const jobId1 = await jobQueue.addJob({
151
- job_type: 'email',
151
+ jobType: 'email',
152
152
  payload: { to: 'cancelall1@example.com' },
153
153
  });
154
154
  const jobId2 = await jobQueue.addJob({
155
- job_type: 'email',
155
+ jobType: 'email',
156
156
  payload: { to: 'cancelall2@example.com' },
157
157
  });
158
158
  const jobId3 = await jobQueue.addJob({
159
- job_type: 'email',
159
+ jobType: 'email',
160
160
  payload: { to: 'cancelall3@example.com' },
161
161
  });
162
162
  // Add a completed job
163
163
  const jobId4 = await jobQueue.addJob({
164
- job_type: 'email',
164
+ jobType: 'email',
165
165
  payload: { to: 'done@example.com' },
166
166
  });
167
167
  await pool.query(
@@ -186,18 +186,18 @@ describe('index integration', () => {
186
186
  expect(completedJob?.status).toBe('completed');
187
187
  });
188
188
 
189
- it('should cancel all upcoming jobs by job_type', async () => {
189
+ it('should cancel all upcoming jobs by jobType', async () => {
190
190
  const jobId1 = await jobQueue.addJob({
191
- job_type: 'email',
191
+ jobType: 'email',
192
192
  payload: { to: 'a@example.com' },
193
193
  });
194
194
  const jobId2 = await jobQueue.addJob({
195
- job_type: 'sms',
195
+ jobType: 'sms',
196
196
  payload: { to: 'b@example.com' },
197
197
  });
198
198
  // Cancel only email jobs
199
199
  const cancelledCount = await jobQueue.cancelAllUpcomingJobs({
200
- job_type: 'email',
200
+ jobType: 'email',
201
201
  });
202
202
  expect(cancelledCount).toBeGreaterThanOrEqual(1);
203
203
  const job1 = await jobQueue.getJob(jobId1);
@@ -208,12 +208,12 @@ describe('index integration', () => {
208
208
 
209
209
  it('should cancel all upcoming jobs by priority', async () => {
210
210
  const jobId1 = await jobQueue.addJob({
211
- job_type: 'email',
211
+ jobType: 'email',
212
212
  payload: { to: 'a@example.com' },
213
213
  priority: 1,
214
214
  });
215
215
  const jobId2 = await jobQueue.addJob({
216
- job_type: 'email',
216
+ jobType: 'email',
217
217
  payload: { to: 'b@example.com' },
218
218
  priority: 2,
219
219
  });
@@ -228,20 +228,20 @@ describe('index integration', () => {
228
228
  expect(job2?.status).toBe('cancelled');
229
229
  });
230
230
 
231
- it('should cancel all upcoming jobs by run_at', async () => {
231
+ it('should cancel all upcoming jobs by runAt', async () => {
232
232
  const runAt = new Date(Date.now() + 60 * 60 * 1000); // 1 hour in future
233
233
  const jobId1 = await jobQueue.addJob({
234
- job_type: 'email',
234
+ jobType: 'email',
235
235
  payload: { to: 'a@example.com' },
236
- run_at: runAt,
236
+ runAt: runAt,
237
237
  });
238
238
  const jobId2 = await jobQueue.addJob({
239
- job_type: 'email',
239
+ jobType: 'email',
240
240
  payload: { to: 'b@example.com' },
241
241
  });
242
- // Cancel only jobs with run_at = runAt
242
+ // Cancel only jobs with runAt = runAt
243
243
  const cancelledCount = await jobQueue.cancelAllUpcomingJobs({
244
- run_at: runAt,
244
+ runAt: runAt,
245
245
  });
246
246
  expect(cancelledCount).toBeGreaterThanOrEqual(1);
247
247
  const job1 = await jobQueue.getJob(jobId1);
@@ -250,25 +250,25 @@ describe('index integration', () => {
250
250
  expect(job2?.status).toBe('pending');
251
251
  });
252
252
 
253
- it('should cancel all upcoming jobs by job_type and priority', async () => {
253
+ it('should cancel all upcoming jobs by jobType and priority', async () => {
254
254
  const jobId1 = await jobQueue.addJob({
255
- job_type: 'email',
255
+ jobType: 'email',
256
256
  payload: { to: 'a@example.com' },
257
257
  priority: 1,
258
258
  });
259
259
  const jobId2 = await jobQueue.addJob({
260
- job_type: 'email',
260
+ jobType: 'email',
261
261
  payload: { to: 'b@example.com' },
262
262
  priority: 2,
263
263
  });
264
264
  const jobId3 = await jobQueue.addJob({
265
- job_type: 'sms',
265
+ jobType: 'sms',
266
266
  payload: { to: 'c@example.com' },
267
267
  priority: 2,
268
268
  });
269
269
  // Cancel only email jobs with priority 2
270
270
  const cancelledCount = await jobQueue.cancelAllUpcomingJobs({
271
- job_type: 'email',
271
+ jobType: 'email',
272
272
  priority: 2,
273
273
  });
274
274
  expect(cancelledCount).toBeGreaterThanOrEqual(1);
package/src/index.ts CHANGED
@@ -62,7 +62,7 @@ export const initJobQueue = async <PayloadMap = any>(
62
62
  config.verbose ?? false,
63
63
  ),
64
64
  cancelAllUpcomingJobs: withLogContext(
65
- (filters?: { job_type?: string; priority?: number; run_at?: Date }) =>
65
+ (filters?: { jobType?: string; priority?: number; runAt?: Date }) =>
66
66
  cancelAllUpcomingJobs(pool, filters),
67
67
  config.verbose ?? false,
68
68
  ),
@@ -51,7 +51,7 @@ describe('processor integration', () => {
51
51
  typeC: vi.fn(async () => {}),
52
52
  };
53
53
  const jobId = await queue.addJob<TestPayloadMap, 'test'>(pool, {
54
- job_type: 'test',
54
+ jobType: 'test',
55
55
  payload: { foo: 'bar' },
56
56
  });
57
57
  const job = await queue.getJob<TestPayloadMap, 'test'>(pool, jobId);
@@ -80,7 +80,7 @@ describe('processor integration', () => {
80
80
  typeC: vi.fn(async () => {}),
81
81
  };
82
82
  const jobId = await queue.addJob<TestPayloadMap, 'fail'>(pool, {
83
- job_type: 'fail',
83
+ jobType: 'fail',
84
84
  payload: {},
85
85
  });
86
86
  const job = await queue.getJob<TestPayloadMap, 'fail'>(pool, jobId);
@@ -88,8 +88,8 @@ describe('processor integration', () => {
88
88
  await processJobWithHandlers(pool, job!, handlers);
89
89
  const failed = await queue.getJob(pool, jobId);
90
90
  expect(failed?.status).toBe('failed');
91
- expect(failed?.error_history?.[0]?.message).toBe('fail!');
92
- expect(failed?.failure_reason).toBe('handler_error');
91
+ expect(failed?.errorHistory?.[0]?.message).toBe('fail!');
92
+ expect(failed?.failureReason).toBe('handler_error');
93
93
  });
94
94
 
95
95
  it('should mark job as failed if no handler registered', async () => {
@@ -106,7 +106,7 @@ describe('processor integration', () => {
106
106
  typeC: vi.fn(async () => {}),
107
107
  };
108
108
  const jobId = await queue.addJob<TestPayloadMap, 'missing'>(pool, {
109
- job_type: 'missing',
109
+ jobType: 'missing',
110
110
  payload: {},
111
111
  });
112
112
  const job = await queue.getJob<TestPayloadMap, 'missing'>(pool, jobId);
@@ -115,10 +115,10 @@ describe('processor integration', () => {
115
115
  await processJobWithHandlers(pool, job!, handlers);
116
116
  const failed = await queue.getJob(pool, jobId);
117
117
  expect(failed?.status).toBe('failed');
118
- expect(failed?.error_history?.[0]?.message).toContain(
118
+ expect(failed?.errorHistory?.[0]?.message).toContain(
119
119
  'No handler registered',
120
120
  );
121
- expect(failed?.failure_reason).toBe('no_handler');
121
+ expect(failed?.failureReason).toBe('no_handler');
122
122
  });
123
123
 
124
124
  it('should process a batch of jobs', async () => {
@@ -135,11 +135,11 @@ describe('processor integration', () => {
135
135
  };
136
136
  const ids = await Promise.all([
137
137
  queue.addJob<TestPayloadMap, 'batch'>(pool, {
138
- job_type: 'batch',
138
+ jobType: 'batch',
139
139
  payload: { i: 1 },
140
140
  }),
141
141
  queue.addJob<TestPayloadMap, 'batch'>(pool, {
142
- job_type: 'batch',
142
+ jobType: 'batch',
143
143
  payload: { i: 2 },
144
144
  }),
145
145
  ]);
@@ -172,7 +172,7 @@ describe('processor integration', () => {
172
172
  typeC: vi.fn(async () => {}),
173
173
  };
174
174
  await queue.addJob<TestPayloadMap, 'proc'>(pool, {
175
- job_type: 'proc',
175
+ jobType: 'proc',
176
176
  payload: { x: 1 },
177
177
  });
178
178
  const processor = createProcessor(pool, handlers, { pollInterval: 200 });
@@ -182,7 +182,7 @@ describe('processor integration', () => {
182
182
  processor.stop();
183
183
  expect(processor.isRunning()).toBe(false);
184
184
  const jobs = await queue.getJobsByStatus(pool, 'completed');
185
- expect(jobs.some((j) => j.job_type === 'proc')).toBe(true);
185
+ expect(jobs.some((j) => j.jobType === 'proc')).toBe(true);
186
186
  });
187
187
 
188
188
  it('should process only jobs of a specific job type with processBatch', async () => {
@@ -199,15 +199,15 @@ describe('processor integration', () => {
199
199
  typeC: vi.fn(async () => {}),
200
200
  };
201
201
  const idA1 = await queue.addJob<TestPayloadMap, 'typeA'>(pool, {
202
- job_type: 'typeA',
202
+ jobType: 'typeA',
203
203
  payload: { n: 1 },
204
204
  });
205
205
  const idA2 = await queue.addJob<TestPayloadMap, 'typeA'>(pool, {
206
- job_type: 'typeA',
206
+ jobType: 'typeA',
207
207
  payload: { n: 2 },
208
208
  });
209
209
  const idB1 = await queue.addJob<TestPayloadMap, 'typeB'>(pool, {
210
- job_type: 'typeB',
210
+ jobType: 'typeB',
211
211
  payload: { n: 3 },
212
212
  });
213
213
  // Only process typeA
@@ -246,15 +246,15 @@ describe('processor integration', () => {
246
246
  typeC: handlerC,
247
247
  };
248
248
  const idA = await queue.addJob<TestPayloadMap, 'typeA'>(pool, {
249
- job_type: 'typeA',
249
+ jobType: 'typeA',
250
250
  payload: { n: 1 },
251
251
  });
252
252
  const idB = await queue.addJob<TestPayloadMap, 'typeB'>(pool, {
253
- job_type: 'typeB',
253
+ jobType: 'typeB',
254
254
  payload: { n: 2 },
255
255
  });
256
256
  const idC = await queue.addJob<TestPayloadMap, 'typeC'>(pool, {
257
- job_type: 'typeC',
257
+ jobType: 'typeC',
258
258
  payload: { n: 3 },
259
259
  });
260
260
  // Only process typeA and typeC
@@ -293,11 +293,11 @@ describe('processor integration', () => {
293
293
  typeC: vi.fn(async () => {}),
294
294
  };
295
295
  const idA = await queue.addJob<TestPayloadMap, 'typeA'>(pool, {
296
- job_type: 'typeA',
296
+ jobType: 'typeA',
297
297
  payload: { n: 1 },
298
298
  });
299
299
  const idB = await queue.addJob<TestPayloadMap, 'typeB'>(pool, {
300
- job_type: 'typeB',
300
+ jobType: 'typeB',
301
301
  payload: { n: 2 },
302
302
  });
303
303
  const processor = createProcessor(pool, handlers, {
@@ -335,7 +335,7 @@ describe('concurrency option', () => {
335
335
  async function addJobs(n: number) {
336
336
  for (let i = 0; i < n; i++) {
337
337
  await queue.addJob<{ test: {} }, 'test'>(pool, {
338
- job_type: 'test',
338
+ jobType: 'test',
339
339
  payload: {},
340
340
  });
341
341
  }
@@ -444,7 +444,7 @@ describe('per-job timeout', () => {
444
444
  test: handler,
445
445
  };
446
446
  const jobId = await queue.addJob<{ test: {} }, 'test'>(pool, {
447
- job_type: 'test',
447
+ jobType: 'test',
448
448
  payload: {},
449
449
  timeoutMs: 50, // 50ms
450
450
  });
@@ -453,8 +453,8 @@ describe('per-job timeout', () => {
453
453
  await processJobWithHandlers(pool, job!, handlers);
454
454
  const failed = await queue.getJob(pool, jobId);
455
455
  expect(failed?.status).toBe('failed');
456
- expect(failed?.error_history?.[0]?.message).toContain('timed out');
457
- expect(failed?.failure_reason).toBe(FailureReason.Timeout);
456
+ expect(failed?.errorHistory?.[0]?.message).toContain('timed out');
457
+ expect(failed?.failureReason).toBe(FailureReason.Timeout);
458
458
  });
459
459
 
460
460
  it('should complete the job if handler finishes before timeoutMs', async () => {
@@ -465,7 +465,7 @@ describe('per-job timeout', () => {
465
465
  test: handler,
466
466
  };
467
467
  const jobId = await queue.addJob<{ test: {} }, 'test'>(pool, {
468
- job_type: 'test',
468
+ jobType: 'test',
469
469
  payload: {},
470
470
  timeoutMs: 200, // 200ms
471
471
  });
package/src/processor.ts CHANGED
@@ -27,25 +27,25 @@ export async function processJobWithHandlers<
27
27
  job: JobRecord<PayloadMap, T>,
28
28
  jobHandlers: JobHandlers<PayloadMap>,
29
29
  ): Promise<void> {
30
- const handler = jobHandlers[job.job_type];
30
+ const handler = jobHandlers[job.jobType];
31
31
 
32
32
  if (!handler) {
33
33
  await setPendingReasonForUnpickedJobs(
34
34
  pool,
35
- `No handler registered for job type: ${job.job_type}`,
36
- job.job_type,
35
+ `No handler registered for job type: ${job.jobType}`,
36
+ job.jobType,
37
37
  );
38
38
  await failJob(
39
39
  pool,
40
40
  job.id,
41
- new Error(`No handler registered for job type: ${job.job_type}`),
41
+ new Error(`No handler registered for job type: ${job.jobType}`),
42
42
  FailureReason.NoHandler,
43
43
  );
44
44
  return;
45
45
  }
46
46
 
47
47
  // Per-job timeout logic
48
- const timeoutMs = job.timeout_ms ?? undefined;
48
+ const timeoutMs = job.timeoutMs ?? undefined;
49
49
  let timeoutId: NodeJS.Timeout | undefined;
50
50
  const controller = new AbortController();
51
51
  try {