@nicnocquee/dataqueue 1.25.0 → 1.26.0-beta.20260223202259

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. package/ai/build-docs-content.ts +96 -0
  2. package/ai/build-llms-full.ts +42 -0
  3. package/ai/docs-content.json +284 -0
  4. package/ai/rules/advanced.md +150 -0
  5. package/ai/rules/basic.md +159 -0
  6. package/ai/rules/react-dashboard.md +83 -0
  7. package/ai/skills/dataqueue-advanced/SKILL.md +370 -0
  8. package/ai/skills/dataqueue-core/SKILL.md +234 -0
  9. package/ai/skills/dataqueue-react/SKILL.md +189 -0
  10. package/dist/cli.cjs +1149 -14
  11. package/dist/cli.cjs.map +1 -1
  12. package/dist/cli.d.cts +66 -1
  13. package/dist/cli.d.ts +66 -1
  14. package/dist/cli.js +1146 -13
  15. package/dist/cli.js.map +1 -1
  16. package/dist/index.cjs +3236 -1237
  17. package/dist/index.cjs.map +1 -1
  18. package/dist/index.d.cts +697 -23
  19. package/dist/index.d.ts +697 -23
  20. package/dist/index.js +3235 -1238
  21. package/dist/index.js.map +1 -1
  22. package/dist/mcp-server.cjs +186 -0
  23. package/dist/mcp-server.cjs.map +1 -0
  24. package/dist/mcp-server.d.cts +32 -0
  25. package/dist/mcp-server.d.ts +32 -0
  26. package/dist/mcp-server.js +175 -0
  27. package/dist/mcp-server.js.map +1 -0
  28. package/migrations/1781200000004_create_cron_schedules_table.sql +33 -0
  29. package/migrations/1781200000005_add_retry_config_to_job_queue.sql +17 -0
  30. package/package.json +24 -21
  31. package/src/backend.ts +170 -5
  32. package/src/backends/postgres.ts +992 -63
  33. package/src/backends/redis-scripts.ts +358 -26
  34. package/src/backends/redis.test.ts +1532 -0
  35. package/src/backends/redis.ts +993 -35
  36. package/src/cli.test.ts +82 -6
  37. package/src/cli.ts +73 -10
  38. package/src/cron.test.ts +126 -0
  39. package/src/cron.ts +40 -0
  40. package/src/db-util.ts +1 -1
  41. package/src/index.test.ts +1034 -11
  42. package/src/index.ts +267 -39
  43. package/src/init-command.test.ts +449 -0
  44. package/src/init-command.ts +709 -0
  45. package/src/install-mcp-command.test.ts +216 -0
  46. package/src/install-mcp-command.ts +185 -0
  47. package/src/install-rules-command.test.ts +218 -0
  48. package/src/install-rules-command.ts +233 -0
  49. package/src/install-skills-command.test.ts +176 -0
  50. package/src/install-skills-command.ts +124 -0
  51. package/src/mcp-server.test.ts +162 -0
  52. package/src/mcp-server.ts +231 -0
  53. package/src/processor.ts +104 -113
  54. package/src/queue.test.ts +465 -0
  55. package/src/queue.ts +34 -252
  56. package/src/supervisor.test.ts +340 -0
  57. package/src/supervisor.ts +177 -0
  58. package/src/types.ts +476 -12
  59. package/LICENSE +0 -21
package/src/queue.test.ts CHANGED
@@ -141,6 +141,35 @@ describe('queue integration', () => {
141
141
  expect(job).toBeNull();
142
142
  });
143
143
 
144
+ it('should cleanup old completed jobs in batches', async () => {
145
+ // Add and complete 5 jobs
146
+ const ids: number[] = [];
147
+ for (let i = 0; i < 5; i++) {
148
+ const jobId = await queue.addJob<{ email: { to: string } }, 'email'>(
149
+ pool,
150
+ {
151
+ jobType: 'email',
152
+ payload: { to: `batch-${i}@example.com` },
153
+ },
154
+ );
155
+ await queue.getNextBatch(pool, 'worker-batch-cleanup', 1);
156
+ await queue.completeJob(pool, jobId);
157
+ ids.push(jobId);
158
+ }
159
+ // Manually backdate all 5
160
+ await pool.query(
161
+ `UPDATE job_queue SET updated_at = NOW() - INTERVAL '31 days' WHERE id = ANY($1::int[])`,
162
+ [ids],
163
+ );
164
+ // Cleanup with batchSize=2 so it takes multiple iterations
165
+ const deleted = await queue.cleanupOldJobs(pool, 30, 2);
166
+ expect(deleted).toBe(5);
167
+ for (const id of ids) {
168
+ const job = await queue.getJob(pool, id);
169
+ expect(job).toBeNull();
170
+ }
171
+ });
172
+
144
173
  it('should cancel a scheduled job', async () => {
145
174
  const jobId = await queue.addJob<{ email: { to: string } }, 'email'>(pool, {
146
175
  jobType: 'email',
@@ -1929,4 +1958,440 @@ describe('getJobs', () => {
1929
1958
  const batch3 = await queue.getNextBatch(pool, 'worker-1', 1);
1930
1959
  expect(batch3.length).toBe(0);
1931
1960
  });
1961
+
1962
+ // ── Configurable retry strategy tests ────────────────────────────────
1963
+
1964
+ it('uses legacy backoff when no retry config is set', async () => {
1965
+ // Setup
1966
+ const jobId = await queue.addJob<{ email: { to: string } }, 'email'>(pool, {
1967
+ jobType: 'email',
1968
+ payload: { to: 'legacy@example.com' },
1969
+ maxAttempts: 3,
1970
+ });
1971
+
1972
+ // Act
1973
+ await queue.getNextBatch(pool, 'worker-1', 1);
1974
+ await queue.failJob(pool, jobId, new Error('fail'));
1975
+
1976
+ // Assert — legacy formula: 2^1 * 60s = 120s from now
1977
+ const job = await queue.getJob(pool, jobId);
1978
+ expect(job?.nextAttemptAt).not.toBeNull();
1979
+ const delaySec =
1980
+ (job!.nextAttemptAt!.getTime() - job!.lastFailedAt!.getTime()) / 1000;
1981
+ expect(delaySec).toBeGreaterThanOrEqual(115);
1982
+ expect(delaySec).toBeLessThanOrEqual(125);
1983
+ });
1984
+
1985
+ it('uses fixed delay when retryBackoff is false', async () => {
1986
+ // Setup
1987
+ const jobId = await queue.addJob<{ email: { to: string } }, 'email'>(pool, {
1988
+ jobType: 'email',
1989
+ payload: { to: 'fixed@example.com' },
1990
+ maxAttempts: 3,
1991
+ retryDelay: 10,
1992
+ retryBackoff: false,
1993
+ });
1994
+
1995
+ // Act
1996
+ await queue.getNextBatch(pool, 'worker-1', 1);
1997
+ await queue.failJob(pool, jobId, new Error('fail'));
1998
+
1999
+ // Assert — fixed 10s delay
2000
+ const job = await queue.getJob(pool, jobId);
2001
+ expect(job?.nextAttemptAt).not.toBeNull();
2002
+ expect(job?.retryDelay).toBe(10);
2003
+ expect(job?.retryBackoff).toBe(false);
2004
+ const delaySec =
2005
+ (job!.nextAttemptAt!.getTime() - job!.lastFailedAt!.getTime()) / 1000;
2006
+ expect(delaySec).toBeGreaterThanOrEqual(9);
2007
+ expect(delaySec).toBeLessThanOrEqual(11);
2008
+ });
2009
+
2010
+ it('uses exponential backoff with custom retryDelay', async () => {
2011
+ // Setup
2012
+ const jobId = await queue.addJob<{ email: { to: string } }, 'email'>(pool, {
2013
+ jobType: 'email',
2014
+ payload: { to: 'expo@example.com' },
2015
+ maxAttempts: 3,
2016
+ retryDelay: 5,
2017
+ retryBackoff: true,
2018
+ });
2019
+
2020
+ // Act — attempt 1
2021
+ await queue.getNextBatch(pool, 'worker-1', 1);
2022
+ await queue.failJob(pool, jobId, new Error('fail'));
2023
+
2024
+ // Assert — exponential: 5 * 2^1 = 10s, with jitter [5, 10]
2025
+ const job = await queue.getJob(pool, jobId);
2026
+ expect(job?.nextAttemptAt).not.toBeNull();
2027
+ const delaySec =
2028
+ (job!.nextAttemptAt!.getTime() - job!.lastFailedAt!.getTime()) / 1000;
2029
+ expect(delaySec).toBeGreaterThanOrEqual(4);
2030
+ expect(delaySec).toBeLessThanOrEqual(11);
2031
+ });
2032
+
2033
+ it('caps exponential backoff with retryDelayMax', async () => {
2034
+ // Setup
2035
+ const jobId = await queue.addJob<{ email: { to: string } }, 'email'>(pool, {
2036
+ jobType: 'email',
2037
+ payload: { to: 'capped@example.com' },
2038
+ maxAttempts: 5,
2039
+ retryDelay: 100,
2040
+ retryBackoff: true,
2041
+ retryDelayMax: 30,
2042
+ });
2043
+
2044
+ // Act — attempt 1
2045
+ await queue.getNextBatch(pool, 'worker-1', 1);
2046
+ await queue.failJob(pool, jobId, new Error('fail'));
2047
+
2048
+ // Assert — 100 * 2^1 = 200s but capped at 30s, with jitter [15, 30]
2049
+ const job = await queue.getJob(pool, jobId);
2050
+ expect(job?.nextAttemptAt).not.toBeNull();
2051
+ expect(job?.retryDelayMax).toBe(30);
2052
+ const delaySec =
2053
+ (job!.nextAttemptAt!.getTime() - job!.lastFailedAt!.getTime()) / 1000;
2054
+ expect(delaySec).toBeGreaterThanOrEqual(14);
2055
+ expect(delaySec).toBeLessThanOrEqual(31);
2056
+ });
2057
+
2058
+ it('stores retry config on job record', async () => {
2059
+ // Setup
2060
+ const jobId = await queue.addJob<{ email: { to: string } }, 'email'>(pool, {
2061
+ jobType: 'email',
2062
+ payload: { to: 'config@example.com' },
2063
+ retryDelay: 30,
2064
+ retryBackoff: false,
2065
+ retryDelayMax: 120,
2066
+ });
2067
+
2068
+ // Act
2069
+ const job = await queue.getJob(pool, jobId);
2070
+
2071
+ // Assert
2072
+ expect(job?.retryDelay).toBe(30);
2073
+ expect(job?.retryBackoff).toBe(false);
2074
+ expect(job?.retryDelayMax).toBe(120);
2075
+ });
2076
+
2077
+ it('returns null retry config for jobs without it', async () => {
2078
+ // Setup
2079
+ const jobId = await queue.addJob<{ email: { to: string } }, 'email'>(pool, {
2080
+ jobType: 'email',
2081
+ payload: { to: 'noconfig@example.com' },
2082
+ });
2083
+
2084
+ // Act
2085
+ const job = await queue.getJob(pool, jobId);
2086
+
2087
+ // Assert
2088
+ expect(job?.retryDelay).toBeNull();
2089
+ expect(job?.retryBackoff).toBeNull();
2090
+ expect(job?.retryDelayMax).toBeNull();
2091
+ });
2092
+
2093
+ it('allows editing retry config via editJob', async () => {
2094
+ // Setup
2095
+ const jobId = await queue.addJob<{ email: { to: string } }, 'email'>(pool, {
2096
+ jobType: 'email',
2097
+ payload: { to: 'edit@example.com' },
2098
+ });
2099
+
2100
+ // Act
2101
+ await queue.editJob(pool, jobId, {
2102
+ retryDelay: 15,
2103
+ retryBackoff: false,
2104
+ retryDelayMax: 60,
2105
+ });
2106
+
2107
+ // Assert
2108
+ const job = await queue.getJob(pool, jobId);
2109
+ expect(job?.retryDelay).toBe(15);
2110
+ expect(job?.retryBackoff).toBe(false);
2111
+ expect(job?.retryDelayMax).toBe(60);
2112
+ });
2113
+ });
2114
+
2115
+ describe('queue.addJob with db option (BYOC)', () => {
2116
+ let pool: Pool;
2117
+ let dbName: string;
2118
+
2119
+ beforeEach(async () => {
2120
+ const setup = await createTestDbAndPool();
2121
+ pool = setup.pool;
2122
+ dbName = setup.dbName;
2123
+ });
2124
+
2125
+ afterEach(async () => {
2126
+ await pool.end();
2127
+ await destroyTestDb(dbName);
2128
+ });
2129
+
2130
+ it('rolls back the job when the transaction is rolled back', async () => {
2131
+ // Setup
2132
+ const client = await pool.connect();
2133
+ await client.query('BEGIN');
2134
+
2135
+ // Act
2136
+ const jobId = await queue.addJob<{ email: { to: string } }, 'email'>(
2137
+ pool,
2138
+ { jobType: 'email', payload: { to: 'rollback@example.com' } },
2139
+ { db: client },
2140
+ );
2141
+ await client.query('ROLLBACK');
2142
+ client.release();
2143
+
2144
+ // Assert
2145
+ const job = await queue.getJob(pool, jobId);
2146
+ expect(job).toBeNull();
2147
+ });
2148
+
2149
+ it('persists the job when the transaction is committed', async () => {
2150
+ // Setup
2151
+ const client = await pool.connect();
2152
+ await client.query('BEGIN');
2153
+
2154
+ // Act
2155
+ const jobId = await queue.addJob<{ email: { to: string } }, 'email'>(
2156
+ pool,
2157
+ { jobType: 'email', payload: { to: 'commit@example.com' } },
2158
+ { db: client },
2159
+ );
2160
+ await client.query('COMMIT');
2161
+ client.release();
2162
+
2163
+ // Assert
2164
+ const job = await queue.getJob(pool, jobId);
2165
+ expect(job).not.toBeNull();
2166
+ expect(job?.payload).toEqual({ to: 'commit@example.com' });
2167
+ });
2168
+ });
2169
+
2170
+ describe('addJobs batch insert', () => {
2171
+ let pool: Pool;
2172
+ let dbName: string;
2173
+
2174
+ beforeEach(async () => {
2175
+ const setup = await createTestDbAndPool();
2176
+ pool = setup.pool;
2177
+ dbName = setup.dbName;
2178
+ });
2179
+
2180
+ afterEach(async () => {
2181
+ await pool.end();
2182
+ await destroyTestDb(dbName);
2183
+ });
2184
+
2185
+ it('inserts multiple jobs and returns IDs in order', async () => {
2186
+ // Act
2187
+ const ids = await queue.addJobs<
2188
+ { email: { to: string }; report: { id: string } },
2189
+ 'email' | 'report'
2190
+ >(pool, [
2191
+ { jobType: 'email', payload: { to: 'a@test.com' } },
2192
+ { jobType: 'report', payload: { id: 'r1' } },
2193
+ { jobType: 'email', payload: { to: 'b@test.com' } },
2194
+ ]);
2195
+
2196
+ // Assert
2197
+ expect(ids).toHaveLength(3);
2198
+ expect(ids[0]).toBeLessThan(ids[1]);
2199
+ expect(ids[1]).toBeLessThan(ids[2]);
2200
+
2201
+ const job0 = await queue.getJob(pool, ids[0]);
2202
+ expect(job0?.jobType).toBe('email');
2203
+ expect(job0?.payload).toEqual({ to: 'a@test.com' });
2204
+
2205
+ const job1 = await queue.getJob(pool, ids[1]);
2206
+ expect(job1?.jobType).toBe('report');
2207
+ expect(job1?.payload).toEqual({ id: 'r1' });
2208
+
2209
+ const job2 = await queue.getJob(pool, ids[2]);
2210
+ expect(job2?.jobType).toBe('email');
2211
+ expect(job2?.payload).toEqual({ to: 'b@test.com' });
2212
+ });
2213
+
2214
+ it('returns empty array for empty input', async () => {
2215
+ // Act
2216
+ const ids = await queue.addJobs(pool, []);
2217
+
2218
+ // Assert
2219
+ expect(ids).toEqual([]);
2220
+ });
2221
+
2222
+ it('respects priority and runAt per job', async () => {
2223
+ // Setup
2224
+ const futureDate = new Date(Date.now() + 60_000);
2225
+
2226
+ // Act
2227
+ const ids = await queue.addJobs<{ task: { n: number } }, 'task'>(pool, [
2228
+ { jobType: 'task', payload: { n: 1 }, priority: 5 },
2229
+ { jobType: 'task', payload: { n: 2 }, priority: 10, runAt: futureDate },
2230
+ ]);
2231
+
2232
+ // Assert
2233
+ const job0 = await queue.getJob(pool, ids[0]);
2234
+ expect(job0?.priority).toBe(5);
2235
+
2236
+ const job1 = await queue.getJob(pool, ids[1]);
2237
+ expect(job1?.priority).toBe(10);
2238
+ expect(job1?.runAt.getTime()).toBeCloseTo(futureDate.getTime(), -3);
2239
+ });
2240
+
2241
+ it('handles idempotency keys for new jobs', async () => {
2242
+ // Act
2243
+ const ids = await queue.addJobs<{ task: { n: number } }, 'task'>(pool, [
2244
+ { jobType: 'task', payload: { n: 1 }, idempotencyKey: 'key-a' },
2245
+ { jobType: 'task', payload: { n: 2 }, idempotencyKey: 'key-b' },
2246
+ ]);
2247
+
2248
+ // Assert
2249
+ expect(ids).toHaveLength(2);
2250
+ expect(ids[0]).not.toBe(ids[1]);
2251
+
2252
+ const job0 = await queue.getJob(pool, ids[0]);
2253
+ expect(job0?.idempotencyKey).toBe('key-a');
2254
+
2255
+ const job1 = await queue.getJob(pool, ids[1]);
2256
+ expect(job1?.idempotencyKey).toBe('key-b');
2257
+ });
2258
+
2259
+ it('returns existing IDs for conflicting idempotency keys', async () => {
2260
+ // Setup — insert a job first
2261
+ const existingId = await queue.addJob<{ task: { n: number } }, 'task'>(
2262
+ pool,
2263
+ { jobType: 'task', payload: { n: 0 }, idempotencyKey: 'dup-key' },
2264
+ );
2265
+
2266
+ // Act — batch includes a duplicate key
2267
+ const ids = await queue.addJobs<{ task: { n: number } }, 'task'>(pool, [
2268
+ { jobType: 'task', payload: { n: 1 } },
2269
+ { jobType: 'task', payload: { n: 2 }, idempotencyKey: 'dup-key' },
2270
+ { jobType: 'task', payload: { n: 3 } },
2271
+ ]);
2272
+
2273
+ // Assert
2274
+ expect(ids).toHaveLength(3);
2275
+ expect(ids[1]).toBe(existingId);
2276
+ expect(ids[0]).not.toBe(existingId);
2277
+ expect(ids[2]).not.toBe(existingId);
2278
+ });
2279
+
2280
+ it('handles mix of keyed and non-keyed jobs', async () => {
2281
+ // Act
2282
+ const ids = await queue.addJobs<{ task: { n: number } }, 'task'>(pool, [
2283
+ { jobType: 'task', payload: { n: 1 } },
2284
+ { jobType: 'task', payload: { n: 2 }, idempotencyKey: 'mix-1' },
2285
+ { jobType: 'task', payload: { n: 3 } },
2286
+ { jobType: 'task', payload: { n: 4 }, idempotencyKey: 'mix-2' },
2287
+ { jobType: 'task', payload: { n: 5 } },
2288
+ ]);
2289
+
2290
+ // Assert
2291
+ expect(ids).toHaveLength(5);
2292
+ const uniqueIds = new Set(ids);
2293
+ expect(uniqueIds.size).toBe(5);
2294
+
2295
+ const job1 = await queue.getJob(pool, ids[1]);
2296
+ expect(job1?.idempotencyKey).toBe('mix-1');
2297
+
2298
+ const job3 = await queue.getJob(pool, ids[3]);
2299
+ expect(job3?.idempotencyKey).toBe('mix-2');
2300
+ });
2301
+
2302
+ it('records added events only for newly inserted jobs', async () => {
2303
+ // Setup — pre-insert a job with a known key
2304
+ const existingId = await queue.addJob<{ task: { n: number } }, 'task'>(
2305
+ pool,
2306
+ { jobType: 'task', payload: { n: 0 }, idempotencyKey: 'evt-key' },
2307
+ );
2308
+
2309
+ // Act
2310
+ const ids = await queue.addJobs<{ task: { n: number } }, 'task'>(pool, [
2311
+ { jobType: 'task', payload: { n: 1 } },
2312
+ { jobType: 'task', payload: { n: 2 }, idempotencyKey: 'evt-key' },
2313
+ ]);
2314
+
2315
+ // Assert — the new job should have an event from addJobs
2316
+ const events0 = await queue.getJobEvents(pool, ids[0]);
2317
+ const addedEvents0 = events0.filter(
2318
+ (e: JobEvent) => e.eventType === JobEventType.Added,
2319
+ );
2320
+ expect(addedEvents0).toHaveLength(1);
2321
+
2322
+ // The duplicate should only have the original event from addJob, not a second from addJobs
2323
+ const eventsExisting = await queue.getJobEvents(pool, existingId);
2324
+ const addedEventsExisting = eventsExisting.filter(
2325
+ (e: JobEvent) => e.eventType === JobEventType.Added,
2326
+ );
2327
+ expect(addedEventsExisting).toHaveLength(1);
2328
+ });
2329
+
2330
+ it('stores tags correctly per job', async () => {
2331
+ // Act
2332
+ const ids = await queue.addJobs<{ task: { n: number } }, 'task'>(pool, [
2333
+ { jobType: 'task', payload: { n: 1 }, tags: ['urgent', 'billing'] },
2334
+ { jobType: 'task', payload: { n: 2 }, tags: ['low-priority'] },
2335
+ { jobType: 'task', payload: { n: 3 } },
2336
+ ]);
2337
+
2338
+ // Assert
2339
+ const job0 = await queue.getJob(pool, ids[0]);
2340
+ expect(job0?.tags).toEqual(['urgent', 'billing']);
2341
+
2342
+ const job1 = await queue.getJob(pool, ids[1]);
2343
+ expect(job1?.tags).toEqual(['low-priority']);
2344
+
2345
+ const job2 = await queue.getJob(pool, ids[2]);
2346
+ expect(job2?.tags).toBeNull();
2347
+ });
2348
+
2349
+ it('works with transactional db option — commit', async () => {
2350
+ // Setup
2351
+ const client = await pool.connect();
2352
+ await client.query('BEGIN');
2353
+
2354
+ // Act
2355
+ const ids = await queue.addJobs<{ task: { n: number } }, 'task'>(
2356
+ pool,
2357
+ [
2358
+ { jobType: 'task', payload: { n: 1 } },
2359
+ { jobType: 'task', payload: { n: 2 } },
2360
+ ],
2361
+ { db: client },
2362
+ );
2363
+ await client.query('COMMIT');
2364
+ client.release();
2365
+
2366
+ // Assert
2367
+ expect(ids).toHaveLength(2);
2368
+ const job0 = await queue.getJob(pool, ids[0]);
2369
+ expect(job0).not.toBeNull();
2370
+ const job1 = await queue.getJob(pool, ids[1]);
2371
+ expect(job1).not.toBeNull();
2372
+ });
2373
+
2374
+ it('works with transactional db option — rollback', async () => {
2375
+ // Setup
2376
+ const client = await pool.connect();
2377
+ await client.query('BEGIN');
2378
+
2379
+ // Act
2380
+ const ids = await queue.addJobs<{ task: { n: number } }, 'task'>(
2381
+ pool,
2382
+ [
2383
+ { jobType: 'task', payload: { n: 1 } },
2384
+ { jobType: 'task', payload: { n: 2 } },
2385
+ ],
2386
+ { db: client },
2387
+ );
2388
+ await client.query('ROLLBACK');
2389
+ client.release();
2390
+
2391
+ // Assert — jobs should not exist after rollback
2392
+ const job0 = await queue.getJob(pool, ids[0]);
2393
+ expect(job0).toBeNull();
2394
+ const job1 = await queue.getJob(pool, ids[1]);
2395
+ expect(job1).toBeNull();
2396
+ });
1932
2397
  });