@pingpolls/redisq 0.2.0 → 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -26,10 +26,10 @@ bun install @pingpolls/redisq
26
26
  ## Quick Start
27
27
 
28
28
  ```typescript
29
- import RedisQueue from '@pingpolls/redisq';
29
+ import { RedisQ } from '@pingpolls/redisq';
30
30
 
31
31
  // Initialize the queue
32
- const queue = new RedisQueue({
32
+ const queue = new RedisQ({
33
33
  host: '127.0.0.1',
34
34
  port: '6379',
35
35
  namespace: 'myapp'
@@ -45,8 +45,8 @@ await queue.sendMessage({
45
45
  });
46
46
 
47
47
  // Start a worker to process messages
48
- await queue.startWorker('emails', async (message) => {
49
- const email = JSON.parse(message.message);
48
+ await queue.startWorker('emails', async (received) => {
49
+ const email = JSON.parse(received.message);
50
50
  console.log(`Sending email to ${email.to}`);
51
51
 
52
52
  // Process your message here
@@ -70,14 +70,12 @@ Batch queues (suffix `:batch`) collect messages over a time period and process t
70
70
 
71
71
  ### SvelteKit
72
72
 
73
- #### Option 1: Direct Import (Recommended for Simple Apps)
74
-
75
73
  Create a queue service in `src/lib/server/queue.ts`:
76
74
 
77
75
  ```typescript
78
- import RedisQueue from '@pingpolls/redisq';
76
+ import { RedisQ } from '@pingpolls/redisq';
79
77
 
80
- export const queue = new RedisQueue({
78
+ export const queue = new RedisQ({
81
79
  host: import.meta.env.REDIS_HOST || '127.0.0.1',
82
80
  port: import.meta.env.REDIS_PORT || '6379',
83
81
  namespace: 'sveltekit-app'
@@ -102,8 +100,8 @@ import { queue, initQueues } from '$lib/server/queue';
102
100
  await initQueues();
103
101
 
104
102
  // Start email worker
105
- queue.startWorker('emails', async (message) => {
106
- const data = JSON.parse(message.message);
103
+ queue.startWorker('emails', async (received) => {
104
+ const data = JSON.parse(received.message);
107
105
  await sendEmail(data);
108
106
  return { success: true };
109
107
  }, { silent: false });
@@ -160,148 +158,14 @@ export async function load() {
160
158
  }
161
159
  ```
162
160
 
163
- #### Option 2: Using event.locals (Recommended for Larger Apps)
164
-
165
- This approach provides better dependency injection and testing capabilities with SvelteKit 5.
166
-
167
- Create the queue service in `src/lib/server/queue.ts`:
168
-
169
- ```typescript
170
- import RedisQueue from '@pingpolls/redisq';
171
-
172
- export function createQueue() {
173
- return new RedisQueue({
174
- host: import.meta.env.REDIS_HOST || '127.0.0.1',
175
- port: import.meta.env.REDIS_PORT || '6379',
176
- namespace: 'sveltekit-app'
177
- });
178
- }
179
-
180
- export async function initQueues(queue: RedisQueue) {
181
- await queue.createQueue({ qname: 'emails', maxRetries: 3 });
182
- await queue.createQueue({
183
- qname: 'analytics:batch',
184
- every: 60,
185
- maxRetries: 2
186
- });
187
- }
188
- ```
189
-
190
- Setup in `src/hooks.server.ts`:
191
-
192
- ```typescript
193
- import type { Handle } from '@sveltejs/kit';
194
- import { createQueue, initQueues } from '$lib/server/queue';
195
-
196
- // Create single queue instance
197
- const queue = createQueue();
198
- await initQueues(queue);
199
-
200
- // Start workers
201
- queue.startWorker('emails', async (message) => {
202
- const data = JSON.parse(message.message);
203
- await sendEmail(data);
204
- return { success: true };
205
- });
206
-
207
- queue.startWorker('analytics:batch', async (batch) => {
208
- await saveToDB(batch.messages);
209
- return { success: true };
210
- });
211
-
212
- export const handle: Handle = async ({ event, resolve }) => {
213
- // Attach queue to event.locals
214
- event.locals.queue = queue;
215
-
216
- return resolve(event);
217
- };
218
- ```
219
-
220
- Add TypeScript types in `src/app.d.ts`:
221
-
222
- ```typescript
223
- import type RedisQueue from '@pingpolls/redisq';
224
-
225
- declare global {
226
- namespace App {
227
- interface Locals {
228
- queue: RedisQueue;
229
- }
230
- }
231
- }
232
-
233
- export {};
234
- ```
235
-
236
- Now use `event.locals.queue` in your routes:
237
-
238
- ```typescript
239
- // src/routes/api/signup/+server.ts
240
- import { json } from '@sveltejs/kit';
241
- import type { RequestHandler } from './$types';
242
-
243
- export const POST: RequestHandler = async ({ request, locals }) => {
244
- const { email, name } = await request.json();
245
-
246
- // Access queue from locals
247
- await locals.queue.sendMessage({
248
- qname: 'emails',
249
- message: JSON.stringify({ to: email, template: 'welcome', name })
250
- });
251
-
252
- await locals.queue.sendBatchMessage({
253
- qname: 'analytics:batch',
254
- batchId: 'signups',
255
- message: JSON.stringify({ event: 'signup', email, timestamp: Date.now() })
256
- });
257
-
258
- return json({ success: true });
259
- };
260
- ```
261
-
262
- ```typescript
263
- // src/routes/orders/[id]/+page.server.ts
264
- import type { PageServerLoad, Actions } from './$types';
265
-
266
- export const load: PageServerLoad = async ({ params, locals }) => {
267
- // Queue background task
268
- await locals.queue.sendMessage({
269
- qname: 'emails',
270
- message: JSON.stringify({ type: 'order-confirmation', orderId: params.id })
271
- });
272
-
273
- return {
274
- orderId: params.id
275
- };
276
- };
277
-
278
- export const actions: Actions = {
279
- cancel: async ({ params, locals }) => {
280
- await locals.queue.sendMessage({
281
- qname: 'emails',
282
- message: JSON.stringify({ type: 'order-cancellation', orderId: params.id })
283
- });
284
-
285
- return { success: true };
286
- }
287
- };
288
- ```
289
-
290
- **Benefits of using `event.locals`:**
291
- - Single queue instance across your app
292
- - Better for dependency injection and testing
293
- - Cleaner imports in routes
294
- - Type-safe access with TypeScript
295
- - Follows SvelteKit 5 best practices
296
-
297
161
  ### Next.js (App Router)
298
162
 
299
163
  Create queue in `lib/queue.ts`:
300
164
 
301
165
  ```typescript
302
- import RedisQueue from '@pingpolls/redisq';
166
+ import { RedisQ } from '@pingpolls/redisq';
303
167
 
304
- export const queue = new RedisQueue({
168
+ export const queue = new RedisQ({
305
169
  host: process.env.REDIS_HOST!,
306
170
  port: process.env.REDIS_PORT!,
307
171
  namespace: 'nextjs-app'
@@ -317,8 +181,8 @@ import { queue } from '@/lib/queue';
317
181
  await queue.createQueue({ qname: 'notifications', maxRetries: 3 });
318
182
 
319
183
  // Start worker
320
- queue.startWorker('notifications', async (message) => {
321
- const notification = JSON.parse(message.message);
184
+ queue.startWorker('notifications', async (received) => {
185
+ const notification = JSON.parse(received.message);
322
186
  await sendPushNotification(notification);
323
187
  return { success: true };
324
188
  });
@@ -353,10 +217,10 @@ export async function POST(request: Request) {
353
217
  Create queue plugin in `server/plugins/queue.ts`:
354
218
 
355
219
  ```typescript
356
- import RedisQueue from '@pingpolls/redisq';
220
+ import { RedisQ } from '@pingpolls/redisq';
357
221
 
358
222
  export default defineNitroPlugin(async (nitroApp) => {
359
- const queue = new RedisQueue({
223
+ const queue = new RedisQ({
360
224
  host: process.env.REDIS_HOST || '127.0.0.1',
361
225
  port: process.env.REDIS_PORT || '6379',
362
226
  namespace: 'nuxt-app'
@@ -365,8 +229,8 @@ export default defineNitroPlugin(async (nitroApp) => {
365
229
  await queue.createQueue({ qname: 'tasks', maxRetries: 3 });
366
230
 
367
231
  // Start worker
368
- queue.startWorker('tasks', async (message) => {
369
- const task = JSON.parse(message.message);
232
+ queue.startWorker('tasks', async (received) => {
233
+ const task = JSON.parse(received.message);
370
234
  await processTask(task);
371
235
  return { success: true };
372
236
  });
@@ -396,11 +260,11 @@ export default defineEventHandler(async (event) => {
396
260
 
397
261
  ```typescript
398
262
  import { Hono } from 'hono';
399
- import RedisQueue from '@pingpolls/redisq';
263
+ import { RedisQ } from '@pingpolls/redisq';
400
264
 
401
265
  const app = new Hono();
402
266
 
403
- const queue = new RedisQueue({
267
+ const queue = new RedisQ({
404
268
  host: '127.0.0.1',
405
269
  port: '6379',
406
270
  namespace: 'hono-app'
@@ -415,8 +279,8 @@ await queue.createQueue({
415
279
  });
416
280
 
417
281
  // Start regular queue worker
418
- queue.startWorker('webhooks', async (message) => {
419
- const webhook = JSON.parse(message.message);
282
+ queue.startWorker('webhooks', async (received) => {
283
+ const webhook = JSON.parse(received.message);
420
284
  await fetch(webhook.url, {
421
285
  method: 'POST',
422
286
  body: JSON.stringify(webhook.data),
@@ -465,7 +329,7 @@ export default app;
465
329
  ### Constructor
466
330
 
467
331
  ```typescript
468
- new RedisQueue(options: QueueOptions)
332
+ new RedisQ(options: QueueOptions)
469
333
  ```
470
334
 
471
335
  **Options:**
@@ -543,11 +407,11 @@ Starts a worker to process messages.
543
407
 
544
408
  ```typescript
545
409
  // Regular queue worker
546
- await queue.startWorker('my-queue', async (message) => {
547
- console.log(message.id);
548
- console.log(message.message);
549
- console.log(message.attempt);
550
- console.log(message.sent);
410
+ await queue.startWorker('my-queue', async (received) => {
411
+ console.log(received.id);
412
+ console.log(received.message);
413
+ console.log(received.attempt);
414
+ console.log(received.sent);
551
415
 
552
416
  return { success: true }; // or { success: false } to retry
553
417
  }, {
@@ -722,11 +586,11 @@ REDIS_TLS=false
722
586
 
723
587
  ```typescript
724
588
  import { describe, test, expect } from 'bun:test';
725
- import RedisQueue from '@pingpolls/redisq';
589
+ import { RedisQ } from '@pingpolls/redisq';
726
590
 
727
591
  describe('Queue Tests', () => {
728
592
  test('processes messages', async () => {
729
- const queue = new RedisQueue({ host: '127.0.0.1', port: '6379' });
593
+ const queue = new RedisQ({ host: '127.0.0.1', port: '6379' });
730
594
 
731
595
  await queue.createQueue({ qname: 'test' });
732
596
 
@@ -757,16 +621,16 @@ Run the stress test to benchmark on your hardware:
757
621
  bun stress
758
622
  ```
759
623
 
760
- Individual Queue Performance:
761
- - **Tiny messages (100B)**: 47181 msg/s (p50: 49.97ms)
762
- - **Small messages (1KB)**: 33709 msg/s (p50: 69.05ms)
763
- - **Medium messages (10KB)**: 4056 msg/s (p50: 261.32ms)
764
-
765
- Overall:
766
- - **Throughput**: ~28,315 messages/second
767
- - **Latency (p50)**: 126.78 ms
768
- - **Latency (p95)**: 299.27 ms
769
- - **Latency (p99)**: 383.86 ms
624
+ Individual Queue Performance:
625
+ - **Tiny messages (100B)**: 49,302 msg/s (p50: 49.75ms)
626
+ - **Small messages (1KB)**: 35,061 msg/s (p50: 74.34ms)
627
+ - **Medium messages (10KB)**: 9,437 msg/s (p50: 213.58ms)
628
+
629
+ 💡 Overall (Averaged across all tests):
630
+ - **Throughput**: ~31,267 messages/second
631
+ - **Latency (p50)**: 112.56 ms
632
+ - **Latency (p95)**: 312.11 ms
633
+ - **Latency (p99)**: 676.23 ms
770
634
 
771
635
  Tested on Dockerized `redis:alpine` through WSL2 with 1 CPU and 1GB instance.
772
636
 
package/app.test.ts CHANGED
@@ -1,5 +1,5 @@
1
1
  import { afterAll, describe, expect, test } from "bun:test";
2
- import RedisQueue from "./app";
2
+ import { RedisQ } from "./app";
3
3
 
4
4
  const redisConfig = {
5
5
  host: process.env.REDIS_HOST || "127.0.0.1",
@@ -16,9 +16,9 @@ const workerConfig = {
16
16
  */
17
17
  const sleep = (ms: number) => new Promise((resolve) => setTimeout(resolve, ms));
18
18
 
19
- describe("RedisQueue Tests", () => {
19
+ describe("RedisQ Tests", () => {
20
20
  test("1. Can send and receive single message", async () => {
21
- const queue = new RedisQueue(redisConfig);
21
+ const queue = new RedisQ(redisConfig);
22
22
 
23
23
  await queue.createQueue({ qname: "test-basic" });
24
24
 
@@ -51,7 +51,7 @@ describe("RedisQueue Tests", () => {
51
51
  });
52
52
 
53
53
  test("2. Can send and receive delayed message", async () => {
54
- const queue = new RedisQueue(redisConfig);
54
+ const queue = new RedisQ(redisConfig);
55
55
 
56
56
  await queue.createQueue({ qname: "test-delayed" });
57
57
 
@@ -97,7 +97,7 @@ describe("RedisQueue Tests", () => {
97
97
  });
98
98
 
99
99
  test("3. Can retry send and receive single message", async () => {
100
- const queue = new RedisQueue(redisConfig);
100
+ const queue = new RedisQ(redisConfig);
101
101
 
102
102
  await queue.createQueue({
103
103
  maxBackoffSeconds: 1,
@@ -141,7 +141,7 @@ describe("RedisQueue Tests", () => {
141
141
  });
142
142
 
143
143
  test("4. Can retry send and receive delayed message", async () => {
144
- const queue = new RedisQueue(redisConfig);
144
+ const queue = new RedisQ(redisConfig);
145
145
 
146
146
  await queue.createQueue({
147
147
  maxBackoffSeconds: 1,
@@ -198,7 +198,7 @@ describe("RedisQueue Tests", () => {
198
198
  });
199
199
 
200
200
  test("5. Can send and receive batched messages (multiple batches in same period)", async () => {
201
- const queue = new RedisQueue(redisConfig);
201
+ const queue = new RedisQ(redisConfig);
202
202
 
203
203
  /**
204
204
  * Create batch queue with 3-second interval
@@ -328,7 +328,7 @@ describe("RedisQueue Tests", () => {
328
328
  });
329
329
 
330
330
  test("6. Can send and receive batched messages with selective retry", async () => {
331
- const queue = new RedisQueue(redisConfig);
331
+ const queue = new RedisQ(redisConfig);
332
332
 
333
333
  /**
334
334
  * Create batch queue with 2-second interval and retry enabled
@@ -509,7 +509,7 @@ describe("RedisQueue Tests", () => {
509
509
  test("7. Can handle high volume and concurrency", async () => {
510
510
  const totalMessages = 10_000;
511
511
 
512
- const queue = new RedisQueue(redisConfig);
512
+ const queue = new RedisQ(redisConfig);
513
513
 
514
514
  await queue.createQueue({ qname: "test-concurrency" });
515
515
 
package/app.ts CHANGED
@@ -79,7 +79,7 @@ interface StoredBatchMeta {
79
79
  attempt: number;
80
80
  }
81
81
 
82
- export class RedisQueue {
82
+ export class RedisQ {
83
83
  private redis: BunRedisClient;
84
84
  private redisUrl: string;
85
85
  private ns: string;
@@ -550,7 +550,7 @@ export class RedisQueue {
550
550
 
551
551
  private async processBatches(
552
552
  qname: string,
553
- handler: (message: BatchMessage) => Promise<{ success: boolean }>,
553
+ handler: (batch: BatchMessage) => Promise<{ success: boolean }>,
554
554
  silent: boolean,
555
555
  ): Promise<void> {
556
556
  if (this.isClosing) return;
@@ -600,7 +600,7 @@ export class RedisQueue {
600
600
  async startWorker<QueueName extends `${string}:batch` | (string & {})>(
601
601
  qname: QueueName,
602
602
  handler: (
603
- message: QueueName extends `${string}:batch`
603
+ received: QueueName extends `${string}:batch`
604
604
  ? BatchMessage
605
605
  : Message,
606
606
  ) => Promise<{ success: boolean }>,
@@ -630,7 +630,7 @@ export class RedisQueue {
630
630
  await this.processBatches(
631
631
  qname,
632
632
  handler as (
633
- message: BatchMessage,
633
+ received: BatchMessage,
634
634
  ) => Promise<{ success: boolean }>,
635
635
  silent,
636
636
  );
@@ -653,7 +653,7 @@ export class RedisQueue {
653
653
  this.runWorker(
654
654
  qname,
655
655
  handler as (
656
- message: Message,
656
+ received: Message,
657
657
  ) => Promise<{ success: boolean }>,
658
658
  controller.signal,
659
659
  i,
@@ -667,7 +667,7 @@ export class RedisQueue {
667
667
 
668
668
  private async runWorker(
669
669
  qname: string,
670
- handler: (message: Message) => Promise<{ success: boolean }>,
670
+ handler: (received: Message) => Promise<{ success: boolean }>,
671
671
  signal: AbortSignal,
672
672
  workerId: number,
673
673
  silent: boolean,
@@ -687,13 +687,16 @@ export class RedisQueue {
687
687
  ids as string[],
688
688
  );
689
689
 
690
- const processingPromises = messages.map((message) =>
691
- handler(message)
690
+ const processingPromises = messages.map((received) =>
691
+ handler(received)
692
692
  .then(async ({ success }) => {
693
693
  if (success) {
694
- await this.deleteMessage(qname, message.id);
694
+ await this.deleteMessage(
695
+ qname,
696
+ received.id,
697
+ );
695
698
  } else {
696
- await this.retryMessage(qname, message.id);
699
+ await this.retryMessage(qname, received.id);
697
700
  }
698
701
  })
699
702
  .catch(async (error) => {
@@ -703,7 +706,7 @@ export class RedisQueue {
703
706
  (error as Error).message,
704
707
  );
705
708
  }
706
- await this.retryMessage(qname, message.id);
709
+ await this.retryMessage(qname, received.id);
707
710
  }),
708
711
  );
709
712
  await Promise.all(processingPromises);
@@ -763,4 +766,4 @@ export class RedisQueue {
763
766
  }
764
767
  }
765
768
 
766
- export default RedisQueue;
769
+ export default { RedisQ };
@@ -1,4 +1,5 @@
1
- import RedisQueue from "../app";
1
+ import { RedisQ } from "../app";
2
+
2
3
  declare var self: Worker;
3
4
 
4
5
  interface WorkerMessage {
@@ -22,7 +23,7 @@ self.addEventListener("message", async (event: MessageEvent<WorkerMessage>) => {
22
23
  event.data.data;
23
24
 
24
25
  try {
25
- const queue = new RedisQueue(redisConfig);
26
+ const queue = new RedisQ(redisConfig);
26
27
  const latencies: number[] = [];
27
28
  const CHUNK_SIZE = 1000;
28
29
 
@@ -1,7 +1,7 @@
1
1
  #!/usr/bin/env bun
2
2
 
3
3
  import type { Worker } from "bun";
4
- import RedisQueue from "../app";
4
+ import { RedisQ } from "../app";
5
5
  import mediumMsg from "./medium.txt";
6
6
  import smallMsg from "./small.txt";
7
7
  import tinyMsg from "./tiny.txt";
@@ -108,7 +108,7 @@ function printResults(testName: string, results: TestResults) {
108
108
  async function testRegularQueueParallel(
109
109
  config: StressTestConfig,
110
110
  ): Promise<TestResults> {
111
- const queue = new RedisQueue({
111
+ const queue = new RedisQ({
112
112
  host: process.env.REDIS_HOST || "127.0.0.1",
113
113
  namespace: "stress-test",
114
114
  port: process.env.REDIS_PORT || "6379",
package/package.json CHANGED
@@ -27,5 +27,5 @@
27
27
  "stress": "bun benchmark/stress.ts"
28
28
  },
29
29
  "type": "module",
30
- "version": "0.2.0"
30
+ "version": "1.0.0"
31
31
  }
package/tsconfig.json CHANGED
@@ -1,29 +1,29 @@
1
1
  {
2
- "compilerOptions": {
3
- // Environment setup & latest features
4
- "lib": ["ESNext"],
5
- "target": "ESNext",
6
- "module": "Preserve",
7
- "moduleDetection": "force",
8
- "jsx": "react-jsx",
9
- "allowJs": true,
2
+ "compilerOptions": {
3
+ "allowImportingTsExtensions": true,
4
+ "allowJs": true,
5
+ "jsx": "react-jsx",
6
+ // Environment setup & latest features
7
+ "lib": ["ESNext"],
8
+ "module": "Preserve",
9
+ "moduleDetection": "force",
10
10
 
11
- // Bundler mode
12
- "moduleResolution": "bundler",
13
- "allowImportingTsExtensions": true,
14
- "verbatimModuleSyntax": true,
15
- "noEmit": true,
11
+ // Bundler mode
12
+ "moduleResolution": "bundler",
13
+ "noEmit": true,
14
+ "noFallthroughCasesInSwitch": true,
15
+ "noImplicitOverride": true,
16
+ "noPropertyAccessFromIndexSignature": false,
17
+ "noUncheckedIndexedAccess": true,
16
18
 
17
- // Best practices
18
- "strict": true,
19
- "skipLibCheck": true,
20
- "noFallthroughCasesInSwitch": true,
21
- "noUncheckedIndexedAccess": true,
22
- "noImplicitOverride": true,
19
+ // Some stricter flags (disabled by default)
20
+ "noUnusedLocals": false,
21
+ "noUnusedParameters": false,
22
+ "skipLibCheck": true,
23
23
 
24
- // Some stricter flags (disabled by default)
25
- "noUnusedLocals": false,
26
- "noUnusedParameters": false,
27
- "noPropertyAccessFromIndexSignature": false
28
- }
24
+ // Best practices
25
+ "strict": true,
26
+ "target": "ESNext",
27
+ "verbatimModuleSyntax": true
28
+ }
29
29
  }