@nicnocquee/dataqueue 1.16.0 → 1.18.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/queue.ts CHANGED
@@ -37,27 +37,27 @@ export const recordJobEvent = async (
37
37
  export const addJob = async <PayloadMap, T extends keyof PayloadMap & string>(
38
38
  pool: Pool,
39
39
  {
40
- job_type,
40
+ jobType,
41
41
  payload,
42
- max_attempts = 3,
42
+ maxAttempts = 3,
43
43
  priority = 0,
44
- run_at = null,
44
+ runAt = null,
45
45
  timeoutMs = undefined,
46
46
  }: JobOptions<PayloadMap, T>,
47
47
  ): Promise<number> => {
48
48
  const client = await pool.connect();
49
49
  try {
50
50
  let result;
51
- if (run_at) {
51
+ if (runAt) {
52
52
  result = await client.query(
53
53
  `INSERT INTO job_queue
54
54
  (job_type, payload, max_attempts, priority, run_at, timeout_ms)
55
55
  VALUES ($1, $2, $3, $4, $5, $6)
56
56
  RETURNING id`,
57
- [job_type, payload, max_attempts, priority, run_at, timeoutMs ?? null],
57
+ [jobType, payload, maxAttempts, priority, runAt, timeoutMs ?? null],
58
58
  );
59
59
  log(
60
- `Added job ${result.rows[0].id}: payload ${JSON.stringify(payload)}, run_at ${run_at.toISOString()}, priority ${priority}, max_attempts ${max_attempts} job_type ${job_type}`,
60
+ `Added job ${result.rows[0].id}: payload ${JSON.stringify(payload)}, runAt ${runAt.toISOString()}, priority ${priority}, maxAttempts ${maxAttempts} jobType ${jobType}`,
61
61
  );
62
62
  } else {
63
63
  result = await client.query(
@@ -65,14 +65,14 @@ export const addJob = async <PayloadMap, T extends keyof PayloadMap & string>(
65
65
  (job_type, payload, max_attempts, priority, timeout_ms)
66
66
  VALUES ($1, $2, $3, $4, $5)
67
67
  RETURNING id`,
68
- [job_type, payload, max_attempts, priority, timeoutMs ?? null],
68
+ [jobType, payload, maxAttempts, priority, timeoutMs ?? null],
69
69
  );
70
70
  log(
71
- `Added job ${result.rows[0].id}: payload ${JSON.stringify(payload)}, priority ${priority}, max_attempts ${max_attempts} job_type ${job_type}`,
71
+ `Added job ${result.rows[0].id}: payload ${JSON.stringify(payload)}, priority ${priority}, maxAttempts ${maxAttempts} jobType ${jobType}`,
72
72
  );
73
73
  }
74
74
  await recordJobEvent(pool, result.rows[0].id, JobEventType.Added, {
75
- job_type,
75
+ jobType,
76
76
  payload,
77
77
  });
78
78
  return result.rows[0].id;
@@ -93,9 +93,10 @@ export const getJob = async <PayloadMap, T extends keyof PayloadMap & string>(
93
93
  ): Promise<JobRecord<PayloadMap, T> | null> => {
94
94
  const client = await pool.connect();
95
95
  try {
96
- const result = await client.query('SELECT * FROM job_queue WHERE id = $1', [
97
- id,
98
- ]);
96
+ const result = await client.query(
97
+ `SELECT id, job_type AS "jobType", payload, status, max_attempts AS "maxAttempts", attempts, priority, run_at AS "runAt", timeout_ms AS "timeoutMs", created_at AS "createdAt", updated_at AS "updatedAt", started_at AS "startedAt", completed_at AS "completedAt", last_failed_at AS "lastFailedAt", locked_at AS "lockedAt", locked_by AS "lockedBy", error_history AS "errorHistory", failure_reason AS "failureReason", next_attempt_at AS "nextAttemptAt", last_failed_at AS "lastFailedAt", last_retried_at AS "lastRetriedAt", last_cancelled_at AS "lastCancelledAt", pending_reason AS "pendingReason" FROM job_queue WHERE id = $1`,
98
+ [id],
99
+ );
99
100
 
100
101
  if (result.rows.length === 0) {
101
102
  log(`Job ${id} not found`);
@@ -104,11 +105,13 @@ export const getJob = async <PayloadMap, T extends keyof PayloadMap & string>(
104
105
 
105
106
  log(`Found job ${id}`);
106
107
 
108
+ const job = result.rows[0] as JobRecord<PayloadMap, T>;
109
+
107
110
  return {
108
- ...result.rows[0],
109
- payload: result.rows[0].payload,
110
- timeout_ms: result.rows[0].timeout_ms,
111
- failure_reason: result.rows[0].failure_reason,
111
+ ...job,
112
+ payload: job.payload,
113
+ timeoutMs: job.timeoutMs,
114
+ failureReason: job.failureReason,
112
115
  };
113
116
  } catch (error) {
114
117
  log(`Error getting job ${id}: ${error}`);
@@ -133,17 +136,17 @@ export const getJobsByStatus = async <
133
136
  const client = await pool.connect();
134
137
  try {
135
138
  const result = await client.query(
136
- 'SELECT * FROM job_queue WHERE status = $1 ORDER BY created_at DESC LIMIT $2 OFFSET $3',
139
+ `SELECT id, job_type AS "jobType", payload, status, max_attempts AS "maxAttempts", attempts, priority, run_at AS "runAt", timeout_ms AS "timeoutMs", created_at AS "createdAt", updated_at AS "updatedAt", started_at AS "startedAt", completed_at AS "completedAt", last_failed_at AS "lastFailedAt", locked_at AS "lockedAt", locked_by AS "lockedBy", error_history AS "errorHistory", failure_reason AS "failureReason", next_attempt_at AS "nextAttemptAt", last_failed_at AS "lastFailedAt", last_retried_at AS "lastRetriedAt", last_cancelled_at AS "lastCancelledAt", pending_reason AS "pendingReason" FROM job_queue WHERE status = $1 ORDER BY created_at DESC LIMIT $2 OFFSET $3`,
137
140
  [status, limit, offset],
138
141
  );
139
142
 
140
143
  log(`Found ${result.rows.length} jobs by status ${status}`);
141
144
 
142
- return result.rows.map((row) => ({
143
- ...row,
144
- payload: row.payload,
145
- timeout_ms: row.timeout_ms,
146
- failure_reason: row.failure_reason,
145
+ return result.rows.map((job) => ({
146
+ ...job,
147
+ payload: job.payload,
148
+ timeoutMs: job.timeoutMs,
149
+ failureReason: job.failureReason,
147
150
  }));
148
151
  } catch (error) {
149
152
  log(`Error getting jobs by status ${status}: ${error}`);
@@ -209,7 +212,7 @@ export const getNextBatch = async <
209
212
  LIMIT $2
210
213
  FOR UPDATE SKIP LOCKED
211
214
  )
212
- RETURNING *
215
+ RETURNING id, job_type AS "jobType", payload, status, max_attempts AS "maxAttempts", attempts, priority, run_at AS "runAt", timeout_ms AS "timeoutMs", created_at AS "createdAt", updated_at AS "updatedAt", started_at AS "startedAt", completed_at AS "completedAt", last_failed_at AS "lastFailedAt", locked_at AS "lockedAt", locked_by AS "lockedBy", error_history AS "errorHistory", failure_reason AS "failureReason", next_attempt_at AS "nextAttemptAt", last_retried_at AS "lastRetriedAt", last_cancelled_at AS "lastCancelledAt", pending_reason AS "pendingReason"
213
216
  `,
214
217
  params,
215
218
  );
@@ -224,10 +227,10 @@ export const getNextBatch = async <
224
227
  await recordJobEvent(pool, row.id, JobEventType.Processing);
225
228
  }
226
229
 
227
- return result.rows.map((row) => ({
228
- ...row,
229
- payload: row.payload,
230
- timeout_ms: row.timeout_ms,
230
+ return result.rows.map((job) => ({
231
+ ...job,
232
+ payload: job.payload,
233
+ timeoutMs: job.timeoutMs,
231
234
  }));
232
235
  } catch (error) {
233
236
  log(`Error getting next batch: ${error}`);
@@ -397,7 +400,7 @@ export const cancelJob = async (pool: Pool, jobId: number): Promise<void> => {
397
400
  */
398
401
  export const cancelAllUpcomingJobs = async (
399
402
  pool: Pool,
400
- filters?: { job_type?: string; priority?: number; run_at?: Date },
403
+ filters?: { jobType?: string; priority?: number; runAt?: Date },
401
404
  ): Promise<number> => {
402
405
  const client = await pool.connect();
403
406
  try {
@@ -408,17 +411,17 @@ export const cancelAllUpcomingJobs = async (
408
411
  const params: any[] = [];
409
412
  let paramIdx = 1;
410
413
  if (filters) {
411
- if (filters.job_type) {
414
+ if (filters.jobType) {
412
415
  query += ` AND job_type = $${paramIdx++}`;
413
- params.push(filters.job_type);
416
+ params.push(filters.jobType);
414
417
  }
415
418
  if (filters.priority !== undefined) {
416
419
  query += ` AND priority = $${paramIdx++}`;
417
420
  params.push(filters.priority);
418
421
  }
419
- if (filters.run_at) {
422
+ if (filters.runAt) {
420
423
  query += ` AND run_at = $${paramIdx++}`;
421
- params.push(filters.run_at);
424
+ params.push(filters.runAt);
422
425
  }
423
426
  }
424
427
  query += '\nRETURNING id';
@@ -447,14 +450,14 @@ export const getAllJobs = async <
447
450
  const client = await pool.connect();
448
451
  try {
449
452
  const result = await client.query(
450
- 'SELECT * FROM job_queue ORDER BY created_at DESC LIMIT $1 OFFSET $2',
453
+ `SELECT id, job_type AS "jobType", payload, status, max_attempts AS "maxAttempts", attempts, priority, run_at AS "runAt", timeout_ms AS "timeoutMs", created_at AS "createdAt", updated_at AS "updatedAt", started_at AS "startedAt", completed_at AS "completedAt", last_failed_at AS "lastFailedAt", locked_at AS "lockedAt", locked_by AS "lockedBy", error_history AS "errorHistory", failure_reason AS "failureReason", next_attempt_at AS "nextAttemptAt", last_failed_at AS "lastFailedAt", last_retried_at AS "lastRetriedAt", last_cancelled_at AS "lastCancelledAt", pending_reason AS "pendingReason" FROM job_queue ORDER BY created_at DESC LIMIT $1 OFFSET $2`,
451
454
  [limit, offset],
452
455
  );
453
456
  log(`Found ${result.rows.length} jobs (all)`);
454
- return result.rows.map((row) => ({
455
- ...row,
456
- payload: row.payload,
457
- timeout_ms: row.timeout_ms,
457
+ return result.rows.map((job) => ({
458
+ ...job,
459
+ payload: job.payload,
460
+ timeoutMs: job.timeoutMs,
458
461
  }));
459
462
  } catch (error) {
460
463
  log(`Error getting all jobs: ${error}`);
@@ -528,7 +531,7 @@ export const reclaimStuckJobs = async (
528
531
  };
529
532
 
530
533
  /**
531
- * Get all events for a job, ordered by created_at ascending
534
+ * Get all events for a job, ordered by createdAt ascending
532
535
  */
533
536
  export const getJobEvents = async (
534
537
  pool: Pool,
@@ -537,10 +540,10 @@ export const getJobEvents = async (
537
540
  const client = await pool.connect();
538
541
  try {
539
542
  const res = await client.query(
540
- 'SELECT * FROM job_events WHERE job_id = $1 ORDER BY created_at ASC',
543
+ `SELECT id, job_id AS "jobId", event_type AS "eventType", metadata, created_at AS "createdAt" FROM job_events WHERE job_id = $1 ORDER BY created_at ASC`,
541
544
  [jobId],
542
545
  );
543
- return res.rows;
546
+ return res.rows as JobEvent[];
544
547
  } finally {
545
548
  client.release();
546
549
  }
package/src/types.ts CHANGED
@@ -4,11 +4,11 @@ import { Pool } from 'pg';
4
4
  export type JobType<PayloadMap> = keyof PayloadMap & string;
5
5
 
6
6
  export interface JobOptions<PayloadMap, T extends JobType<PayloadMap>> {
7
- job_type: T;
7
+ jobType: T;
8
8
  payload: PayloadMap[T];
9
- max_attempts?: number;
9
+ maxAttempts?: number;
10
10
  priority?: number;
11
- run_at?: Date | null;
11
+ runAt?: Date | null;
12
12
  /**
13
13
  * Timeout for this job in milliseconds. If not set, uses the processor default or unlimited.
14
14
  */
@@ -26,9 +26,9 @@ export enum JobEventType {
26
26
 
27
27
  export interface JobEvent {
28
28
  id: number;
29
- job_id: number;
30
- event_type: JobEventType;
31
- created_at: Date;
29
+ jobId: number;
30
+ eventType: JobEventType;
31
+ createdAt: Date;
32
32
  metadata: any;
33
33
  }
34
34
 
@@ -38,50 +38,57 @@ export enum FailureReason {
38
38
  NoHandler = 'no_handler',
39
39
  }
40
40
 
41
+ export type JobStatus =
42
+ | 'pending'
43
+ | 'processing'
44
+ | 'completed'
45
+ | 'failed'
46
+ | 'cancelled';
47
+
41
48
  export interface JobRecord<PayloadMap, T extends JobType<PayloadMap>> {
42
49
  id: number;
43
- job_type: T;
50
+ jobType: T;
44
51
  payload: PayloadMap[T];
45
- status: 'pending' | 'processing' | 'completed' | 'failed' | 'cancelled';
46
- created_at: Date;
47
- updated_at: Date;
48
- locked_at: Date | null;
49
- locked_by: string | null;
52
+ status: JobStatus;
53
+ createdAt: Date;
54
+ updatedAt: Date;
55
+ lockedAt: Date | null;
56
+ lockedBy: string | null;
50
57
  attempts: number;
51
- max_attempts: number;
52
- next_attempt_at: Date | null;
58
+ maxAttempts: number;
59
+ nextAttemptAt: Date | null;
53
60
  priority: number;
54
- run_at: Date;
55
- pending_reason?: string | null;
56
- error_history?: { message: string; timestamp: string }[];
61
+ runAt: Date;
62
+ pendingReason?: string | null;
63
+ errorHistory?: { message: string; timestamp: string }[];
57
64
  /**
58
65
  * Timeout for this job in milliseconds (null means no timeout).
59
66
  */
60
- timeout_ms?: number | null;
67
+ timeoutMs?: number | null;
61
68
  /**
62
69
  * The reason for the last failure, if any.
63
70
  */
64
- failure_reason?: FailureReason | null;
71
+ failureReason?: FailureReason | null;
65
72
  /**
66
73
  * The time the job was completed, if completed.
67
74
  */
68
- completed_at: Date | null;
75
+ completedAt: Date | null;
69
76
  /**
70
77
  * The time the job was first picked up for processing.
71
78
  */
72
- started_at: Date | null;
79
+ startedAt: Date | null;
73
80
  /**
74
81
  * The time the job was last retried.
75
82
  */
76
- last_retried_at: Date | null;
83
+ lastRetriedAt: Date | null;
77
84
  /**
78
85
  * The time the job last failed.
79
86
  */
80
- last_failed_at: Date | null;
87
+ lastFailedAt: Date | null;
81
88
  /**
82
89
  * The time the job was last cancelled.
83
90
  */
84
- last_cancelled_at: Date | null;
91
+ lastCancelledAt: Date | null;
85
92
  }
86
93
 
87
94
  export type JobHandler<PayloadMap, T extends keyof PayloadMap> = (
@@ -179,10 +186,10 @@ export interface JobQueue<PayloadMap> {
179
186
  * Get jobs by their status, with pagination.
180
187
  * - If no limit is provided, all jobs are returned.
181
188
  * - If no offset is provided, the first page is returned.
182
- * - The jobs are returned in descending order of created_at.
189
+ * - The jobs are returned in descending order of createdAt.
183
190
  */
184
191
  getJobsByStatus: <T extends JobType<PayloadMap>>(
185
- status: string,
192
+ status: JobStatus,
186
193
  limit?: number,
187
194
  offset?: number,
188
195
  ) => Promise<JobRecord<PayloadMap, T>[]>;
@@ -219,14 +226,14 @@ export interface JobQueue<PayloadMap> {
219
226
  * - If no filters are provided, all upcoming jobs are cancelled.
220
227
  * - If filters are provided, only jobs that match the filters are cancelled.
221
228
  * - The filters are:
222
- * - job_type: The job type to cancel.
229
+ * - jobType: The job type to cancel.
223
230
  * - priority: The priority of the job to cancel.
224
- * - run_at: The time the job is scheduled to run at.
231
+ * - runAt: The time the job is scheduled to run at.
225
232
  */
226
233
  cancelAllUpcomingJobs: (filters?: {
227
- job_type?: string;
234
+ jobType?: string;
228
235
  priority?: number;
229
- run_at?: Date;
236
+ runAt?: Date;
230
237
  }) => Promise<number>;
231
238
  /**
232
239
  * Create a job processor. Handlers must be provided per-processor.
package/src/utils.ts ADDED
@@ -0,0 +1,19 @@
1
+ export const toCamelCase = (str: string) => {
2
+ return str.replace(/_([a-z])/g, (_, letter) => letter.toUpperCase());
3
+ };
4
+
5
+ export const toSnakeCase = (str: string) => {
6
+ return str.replace(/([A-Z])/g, '_$1').toLowerCase();
7
+ };
8
+
9
+ export const objectKeysToCamelCase = (obj: Record<string, any>) => {
10
+ return Object.fromEntries(
11
+ Object.entries(obj).map(([key, value]) => [toCamelCase(key), value]),
12
+ );
13
+ };
14
+
15
+ export const objectKeysToSnakeCase = (obj: Record<string, any>) => {
16
+ return Object.fromEntries(
17
+ Object.entries(obj).map(([key, value]) => [toSnakeCase(key), value]),
18
+ );
19
+ };