@pingpolls/redisq 0.2.1 → 1.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +23 -23
- package/dist/app.d.ts +92 -0
- package/{app.ts → dist/app.js} +496 -769
- package/dist/app.test.d.ts +1 -0
- package/dist/app.test.js +427 -0
- package/dist/benchmark/stress-worker.d.ts +1 -0
- package/dist/benchmark/stress-worker.js +39 -0
- package/dist/benchmark/stress.d.ts +2 -0
- package/{benchmark/stress.ts → dist/benchmark/stress.js} +219 -364
- package/package.json +17 -3
- package/.env.example +0 -2
- package/.prototools +0 -1
- package/app.test.ts +0 -574
- package/benchmark/medium.txt +0 -1
- package/benchmark/small.txt +0 -1
- package/benchmark/stress-worker.ts +0 -63
- package/benchmark/tiny.txt +0 -1
- package/biome.json +0 -79
- package/compose.yml +0 -25
- package/redis.conf +0 -1
- package/tsconfig.json +0 -29
package/README.md
CHANGED
|
@@ -26,10 +26,10 @@ bun install @pingpolls/redisq
|
|
|
26
26
|
## Quick Start
|
|
27
27
|
|
|
28
28
|
```typescript
|
|
29
|
-
import
|
|
29
|
+
import { RedisQ } from '@pingpolls/redisq';
|
|
30
30
|
|
|
31
31
|
// Initialize the queue
|
|
32
|
-
const queue = new
|
|
32
|
+
const queue = new RedisQ({
|
|
33
33
|
host: '127.0.0.1',
|
|
34
34
|
port: '6379',
|
|
35
35
|
namespace: 'myapp'
|
|
@@ -73,9 +73,9 @@ Batch queues (suffix `:batch`) collect messages over a time period and process t
|
|
|
73
73
|
Create a queue service in `src/lib/server/queue.ts`:
|
|
74
74
|
|
|
75
75
|
```typescript
|
|
76
|
-
import
|
|
76
|
+
import { RedisQ } from '@pingpolls/redisq';
|
|
77
77
|
|
|
78
|
-
export const queue = new
|
|
78
|
+
export const queue = new RedisQ({
|
|
79
79
|
host: import.meta.env.REDIS_HOST || '127.0.0.1',
|
|
80
80
|
port: import.meta.env.REDIS_PORT || '6379',
|
|
81
81
|
namespace: 'sveltekit-app'
|
|
@@ -163,9 +163,9 @@ export async function load() {
|
|
|
163
163
|
Create queue in `lib/queue.ts`:
|
|
164
164
|
|
|
165
165
|
```typescript
|
|
166
|
-
import
|
|
166
|
+
import { RedisQ } from '@pingpolls/redisq';
|
|
167
167
|
|
|
168
|
-
export const queue = new
|
|
168
|
+
export const queue = new RedisQ({
|
|
169
169
|
host: process.env.REDIS_HOST!,
|
|
170
170
|
port: process.env.REDIS_PORT!,
|
|
171
171
|
namespace: 'nextjs-app'
|
|
@@ -217,10 +217,10 @@ export async function POST(request: Request) {
|
|
|
217
217
|
Create queue plugin in `server/plugins/queue.ts`:
|
|
218
218
|
|
|
219
219
|
```typescript
|
|
220
|
-
import
|
|
220
|
+
import { RedisQ } from '@pingpolls/redisq';
|
|
221
221
|
|
|
222
222
|
export default defineNitroPlugin(async (nitroApp) => {
|
|
223
|
-
const queue = new
|
|
223
|
+
const queue = new RedisQ({
|
|
224
224
|
host: process.env.REDIS_HOST || '127.0.0.1',
|
|
225
225
|
port: process.env.REDIS_PORT || '6379',
|
|
226
226
|
namespace: 'nuxt-app'
|
|
@@ -260,11 +260,11 @@ export default defineEventHandler(async (event) => {
|
|
|
260
260
|
|
|
261
261
|
```typescript
|
|
262
262
|
import { Hono } from 'hono';
|
|
263
|
-
import
|
|
263
|
+
import { RedisQ } from '@pingpolls/redisq';
|
|
264
264
|
|
|
265
265
|
const app = new Hono();
|
|
266
266
|
|
|
267
|
-
const queue = new
|
|
267
|
+
const queue = new RedisQ({
|
|
268
268
|
host: '127.0.0.1',
|
|
269
269
|
port: '6379',
|
|
270
270
|
namespace: 'hono-app'
|
|
@@ -329,7 +329,7 @@ export default app;
|
|
|
329
329
|
### Constructor
|
|
330
330
|
|
|
331
331
|
```typescript
|
|
332
|
-
new
|
|
332
|
+
new RedisQ(options: QueueOptions)
|
|
333
333
|
```
|
|
334
334
|
|
|
335
335
|
**Options:**
|
|
@@ -586,11 +586,11 @@ REDIS_TLS=false
|
|
|
586
586
|
|
|
587
587
|
```typescript
|
|
588
588
|
import { describe, test, expect } from 'bun:test';
|
|
589
|
-
import
|
|
589
|
+
import { RedisQ } from '@pingpolls/redisq';
|
|
590
590
|
|
|
591
591
|
describe('Queue Tests', () => {
|
|
592
592
|
test('processes messages', async () => {
|
|
593
|
-
const queue = new
|
|
593
|
+
const queue = new RedisQ({ host: '127.0.0.1', port: '6379' });
|
|
594
594
|
|
|
595
595
|
await queue.createQueue({ qname: 'test' });
|
|
596
596
|
|
|
@@ -621,16 +621,16 @@ Run the stress test to benchmark on your hardware:
|
|
|
621
621
|
bun stress
|
|
622
622
|
```
|
|
623
623
|
|
|
624
|
-
Individual Queue Performance:
|
|
625
|
-
- **Tiny messages (100B)**:
|
|
626
|
-
- **Small messages (1KB)**:
|
|
627
|
-
- **Medium messages (10KB)**:
|
|
628
|
-
|
|
629
|
-
Overall:
|
|
630
|
-
- **Throughput**: ~
|
|
631
|
-
- **Latency (p50)**:
|
|
632
|
-
- **Latency (p95)**:
|
|
633
|
-
- **Latency (p99)**:
|
|
624
|
+
✅ Individual Queue Performance:
|
|
625
|
+
- **Tiny messages (100B)**: 49,302 msg/s (p50: 49.75ms)
|
|
626
|
+
- **Small messages (1KB)**: 35,061 msg/s (p50: 74.34ms)
|
|
627
|
+
- **Medium messages (10KB)**: 9,437 msg/s (p50: 213.58ms)
|
|
628
|
+
|
|
629
|
+
💡 Overall (Averaged across all tests):
|
|
630
|
+
- **Throughput**: ~31,267 messages/second
|
|
631
|
+
- **Latency (p50)**: 112.56 ms
|
|
632
|
+
- **Latency (p95)**: 312.11 ms
|
|
633
|
+
- **Latency (p99)**: 676.23 ms
|
|
634
634
|
|
|
635
635
|
Tested on Dockerized `redis:alpine` through WSL2 with 1 CPU and 1GB instance.
|
|
636
636
|
|
package/dist/app.d.ts
ADDED
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
import { RedisClient as BunRedisClient } from "bun";
|
|
2
|
+
export type QueueOptions = {
|
|
3
|
+
redis: BunRedisClient;
|
|
4
|
+
} | {
|
|
5
|
+
host: string;
|
|
6
|
+
port: string;
|
|
7
|
+
user?: string;
|
|
8
|
+
password?: string;
|
|
9
|
+
namespace?: string;
|
|
10
|
+
tls?: boolean;
|
|
11
|
+
};
|
|
12
|
+
export type CreateQueueOptions<QueueName extends string = string> = QueueName extends `${string}:batch` ? {
|
|
13
|
+
qname: QueueName;
|
|
14
|
+
maxsize?: number;
|
|
15
|
+
maxRetries?: number;
|
|
16
|
+
maxBackoffSeconds?: number;
|
|
17
|
+
every?: number;
|
|
18
|
+
} : {
|
|
19
|
+
qname: QueueName;
|
|
20
|
+
maxsize?: number;
|
|
21
|
+
maxRetries?: number;
|
|
22
|
+
maxBackoffSeconds?: number;
|
|
23
|
+
};
|
|
24
|
+
export interface QueueAttributes {
|
|
25
|
+
maxsize: number;
|
|
26
|
+
created: number;
|
|
27
|
+
msgs: number;
|
|
28
|
+
isBatch: boolean;
|
|
29
|
+
maxRetries: number;
|
|
30
|
+
maxBackoffSeconds: number;
|
|
31
|
+
every?: number;
|
|
32
|
+
}
|
|
33
|
+
export interface SendMessageOptions {
|
|
34
|
+
qname: string;
|
|
35
|
+
message: string;
|
|
36
|
+
delay?: number;
|
|
37
|
+
}
|
|
38
|
+
export interface SendBatchMessageOptions {
|
|
39
|
+
qname: string;
|
|
40
|
+
batchId: string;
|
|
41
|
+
message: string;
|
|
42
|
+
}
|
|
43
|
+
export interface Message {
|
|
44
|
+
id: string;
|
|
45
|
+
message: string;
|
|
46
|
+
sent: number;
|
|
47
|
+
attempt: number;
|
|
48
|
+
}
|
|
49
|
+
export interface BatchMessage {
|
|
50
|
+
batchId: string;
|
|
51
|
+
messages: Omit<Message, "attempt">[];
|
|
52
|
+
sent: number;
|
|
53
|
+
attempt: number;
|
|
54
|
+
}
|
|
55
|
+
export declare class RedisQ {
|
|
56
|
+
private redis;
|
|
57
|
+
private redisUrl;
|
|
58
|
+
private ns;
|
|
59
|
+
private workers;
|
|
60
|
+
private batchJobs;
|
|
61
|
+
private isClosing;
|
|
62
|
+
constructor(options: QueueOptions);
|
|
63
|
+
private getKey;
|
|
64
|
+
private isBatchQueue;
|
|
65
|
+
createQueue<QueueName extends string>(options: CreateQueueOptions<QueueName>): Promise<boolean>;
|
|
66
|
+
listQueues(): Promise<string[]>;
|
|
67
|
+
getQueue(qname: string): Promise<QueueAttributes | null>;
|
|
68
|
+
deleteQueue(qname: string): Promise<boolean>;
|
|
69
|
+
sendMessage(options: SendMessageOptions): Promise<string>;
|
|
70
|
+
sendBatchMessage(options: SendBatchMessageOptions): Promise<string>;
|
|
71
|
+
private encodeMessage;
|
|
72
|
+
private decodeMessage;
|
|
73
|
+
private encodeBatchMeta;
|
|
74
|
+
private decodeBatchMeta;
|
|
75
|
+
private fetchMessages;
|
|
76
|
+
private fetchBatchMessage;
|
|
77
|
+
deleteMessage(qname: string, id: string): Promise<boolean>;
|
|
78
|
+
private deleteBatch;
|
|
79
|
+
private retryMessage;
|
|
80
|
+
private retryBatch;
|
|
81
|
+
private processDelayedMessages;
|
|
82
|
+
private processBatches;
|
|
83
|
+
startWorker<QueueName extends `${string}:batch` | (string & {})>(qname: QueueName, handler: (received: QueueName extends `${string}:batch` ? BatchMessage : Message) => Promise<{
|
|
84
|
+
success: boolean;
|
|
85
|
+
}>, options?: {
|
|
86
|
+
concurrency?: number;
|
|
87
|
+
silent?: boolean;
|
|
88
|
+
}): Promise<void>;
|
|
89
|
+
private runWorker;
|
|
90
|
+
stopWorker(qname: string): void;
|
|
91
|
+
close(): Promise<void>;
|
|
92
|
+
}
|