@bloomneo/appkit 1.5.1 → 1.5.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/AGENTS.md +195 -0
- package/CHANGELOG.md +253 -0
- package/README.md +147 -799
- package/bin/commands/generate.js +7 -7
- package/cookbook/README.md +26 -0
- package/cookbook/api-key-service.ts +106 -0
- package/cookbook/auth-protected-crud.ts +112 -0
- package/cookbook/file-upload-pipeline.ts +113 -0
- package/cookbook/multi-tenant-saas.ts +87 -0
- package/cookbook/real-time-chat.ts +121 -0
- package/dist/auth/auth.d.ts +21 -4
- package/dist/auth/auth.d.ts.map +1 -1
- package/dist/auth/auth.js +56 -44
- package/dist/auth/auth.js.map +1 -1
- package/dist/auth/defaults.d.ts +1 -1
- package/dist/auth/defaults.js +35 -35
- package/dist/cache/cache.d.ts +29 -6
- package/dist/cache/cache.d.ts.map +1 -1
- package/dist/cache/cache.js +72 -44
- package/dist/cache/cache.js.map +1 -1
- package/dist/cache/defaults.js +25 -25
- package/dist/cache/index.d.ts +19 -10
- package/dist/cache/index.d.ts.map +1 -1
- package/dist/cache/index.js +21 -18
- package/dist/cache/index.js.map +1 -1
- package/dist/config/defaults.d.ts +1 -1
- package/dist/config/defaults.js +8 -8
- package/dist/config/index.d.ts +3 -3
- package/dist/config/index.js +4 -4
- package/dist/database/adapters/mongoose.js +2 -2
- package/dist/database/adapters/prisma.js +2 -2
- package/dist/database/defaults.d.ts +1 -1
- package/dist/database/defaults.js +4 -4
- package/dist/database/index.js +2 -2
- package/dist/database/index.js.map +1 -1
- package/dist/email/defaults.js +20 -20
- package/dist/error/defaults.d.ts +1 -1
- package/dist/error/defaults.js +12 -12
- package/dist/error/error.d.ts +12 -0
- package/dist/error/error.d.ts.map +1 -1
- package/dist/error/error.js +19 -0
- package/dist/error/error.js.map +1 -1
- package/dist/error/index.d.ts +14 -3
- package/dist/error/index.d.ts.map +1 -1
- package/dist/error/index.js +14 -3
- package/dist/error/index.js.map +1 -1
- package/dist/event/defaults.js +30 -30
- package/dist/logger/defaults.d.ts +1 -1
- package/dist/logger/defaults.js +40 -40
- package/dist/logger/index.d.ts +1 -0
- package/dist/logger/index.d.ts.map +1 -1
- package/dist/logger/index.js.map +1 -1
- package/dist/logger/logger.d.ts +8 -0
- package/dist/logger/logger.d.ts.map +1 -1
- package/dist/logger/logger.js +13 -3
- package/dist/logger/logger.js.map +1 -1
- package/dist/logger/transports/console.js +1 -1
- package/dist/logger/transports/http.d.ts +1 -1
- package/dist/logger/transports/http.js +1 -1
- package/dist/logger/transports/webhook.d.ts +1 -1
- package/dist/logger/transports/webhook.js +1 -1
- package/dist/queue/defaults.d.ts +2 -2
- package/dist/queue/defaults.js +38 -38
- package/dist/security/defaults.d.ts +1 -1
- package/dist/security/defaults.js +29 -29
- package/dist/security/index.d.ts +1 -1
- package/dist/security/index.js +3 -3
- package/dist/security/security.d.ts +1 -1
- package/dist/security/security.js +4 -4
- package/dist/storage/defaults.js +19 -19
- package/dist/util/defaults.d.ts +1 -1
- package/dist/util/defaults.js +34 -34
- package/dist/util/env.d.ts +35 -0
- package/dist/util/env.d.ts.map +1 -0
- package/dist/util/env.js +50 -0
- package/dist/util/env.js.map +1 -0
- package/dist/util/errors.d.ts +52 -0
- package/dist/util/errors.d.ts.map +1 -0
- package/dist/util/errors.js +82 -0
- package/dist/util/errors.js.map +1 -0
- package/examples/.env.example +80 -0
- package/examples/README.md +16 -0
- package/examples/auth.ts +228 -0
- package/examples/cache.ts +36 -0
- package/examples/config.ts +45 -0
- package/examples/database.ts +69 -0
- package/examples/email.ts +53 -0
- package/examples/error.ts +50 -0
- package/examples/event.ts +42 -0
- package/examples/logger.ts +41 -0
- package/examples/queue.ts +58 -0
- package/examples/security.ts +46 -0
- package/examples/storage.ts +44 -0
- package/examples/util.ts +47 -0
- package/llms.txt +591 -0
- package/package.json +19 -10
- package/src/auth/README.md +850 -0
- package/src/cache/README.md +756 -0
- package/src/config/README.md +604 -0
- package/src/database/README.md +818 -0
- package/src/email/README.md +759 -0
- package/src/error/README.md +660 -0
- package/src/event/README.md +729 -0
- package/src/logger/README.md +435 -0
- package/src/queue/README.md +851 -0
- package/src/security/README.md +612 -0
- package/src/storage/README.md +1008 -0
- package/src/util/README.md +955 -0
- package/bin/templates/backend/docs/APPKIT_CLI.md +0 -507
- package/bin/templates/backend/docs/APPKIT_COMMENTS_GUIDELINES.md +0 -61
- package/bin/templates/backend/docs/APPKIT_LLM_GUIDE.md +0 -2539
|
@@ -0,0 +1,851 @@
|
|
|
1
|
+
# @bloomneo/appkit - Queue Module 🚀
|
|
2
|
+
|
|
3
|
+
[](https://www.npmjs.com/package/@bloomneo/appkit)
|
|
4
|
+
[](https://opensource.org/licenses/MIT)
|
|
5
|
+
|
|
6
|
+
> Ultra-simple job queuing that just works with automatic transport detection
|
|
7
|
+
|
|
8
|
+
**One function** returns a queue with all methods. Zero configuration needed,
|
|
9
|
+
production-ready by default, with built-in retry logic and distributed
|
|
10
|
+
processing.
|
|
11
|
+
|
|
12
|
+
## 🚀 Why Choose This?
|
|
13
|
+
|
|
14
|
+
- **⚡ One Function** - Just `queueClass.get()`, everything else is automatic
|
|
15
|
+
- **🔄 Auto-Transport Detection** - Memory → Redis → Database based on
|
|
16
|
+
environment
|
|
17
|
+
- **🔧 Zero Configuration** - Smart defaults for everything
|
|
18
|
+
- **🔁 Built-in Retry Logic** - Exponential backoff with jitter
|
|
19
|
+
- **📊 Production Monitoring** - Stats, health checks, job tracking
|
|
20
|
+
- **🛡️ Graceful Shutdown** - Waits for jobs to complete
|
|
21
|
+
- **🤖 AI-Ready** - Optimized for LLM code generation
|
|
22
|
+
|
|
23
|
+
## 📦 Installation
|
|
24
|
+
|
|
25
|
+
```bash
|
|
26
|
+
npm install @bloomneo/appkit
|
|
27
|
+
```
|
|
28
|
+
|
|
29
|
+
## 🏃♂️ Quick Start (30 seconds)
|
|
30
|
+
|
|
31
|
+
```bash
|
|
32
|
+
# Optional: Set environment variables for production
|
|
33
|
+
echo "REDIS_URL=redis://localhost:6379" > .env
|
|
34
|
+
# OR
|
|
35
|
+
echo "DATABASE_URL=postgres://user:pass@localhost/db" > .env
|
|
36
|
+
```
|
|
37
|
+
|
|
38
|
+
```typescript
|
|
39
|
+
import { queueClass } from '@bloomneo/appkit/queue';
|
|
40
|
+
|
|
41
|
+
const queue = queueClass.get();
|
|
42
|
+
|
|
43
|
+
// Add jobs
|
|
44
|
+
const jobId = await queue.add('email', {
|
|
45
|
+
to: 'user@example.com',
|
|
46
|
+
subject: 'Welcome!',
|
|
47
|
+
body: 'Thanks for signing up',
|
|
48
|
+
});
|
|
49
|
+
|
|
50
|
+
// Process jobs
|
|
51
|
+
queue.process('email', async (data) => {
|
|
52
|
+
console.log(`Sending email to ${data.to}`);
|
|
53
|
+
await sendEmail(data);
|
|
54
|
+
return { sent: true };
|
|
55
|
+
});
|
|
56
|
+
|
|
57
|
+
// Schedule delayed jobs
|
|
58
|
+
await queue.schedule('reminder', { userId: 123 }, 24 * 60 * 60 * 1000); // 24 hours
|
|
59
|
+
```
|
|
60
|
+
|
|
61
|
+
**That's it!** No configuration, no setup, production-ready.
|
|
62
|
+
|
|
63
|
+
## ✨ What You Get Instantly
|
|
64
|
+
|
|
65
|
+
- **✅ Memory Queue** - Development (no dependencies)
|
|
66
|
+
- **✅ Redis Queue** - Production distributed (auto-detected from `REDIS_URL`)
|
|
67
|
+
- **✅ Database Queue** - Persistent storage (auto-detected from `DATABASE_URL`)
|
|
68
|
+
- **✅ Automatic Retry** - 3 attempts with exponential backoff
|
|
69
|
+
- **✅ Job Scheduling** - Delayed execution with persistence
|
|
70
|
+
- **✅ Priority Queues** - High priority jobs processed first
|
|
71
|
+
- **✅ Stats & Monitoring** - Real-time queue metrics
|
|
72
|
+
- **✅ Graceful Shutdown** - Waits for active jobs
|
|
73
|
+
|
|
74
|
+
## 🔄 Auto-Transport Detection
|
|
75
|
+
|
|
76
|
+
The queue **automatically detects** what you need:
|
|
77
|
+
|
|
78
|
+
| Environment Variable | Transport Used | What You Get |
|
|
79
|
+
| -------------------- | -------------- | ------------------------- |
|
|
80
|
+
| _Nothing_ | Memory | Development queuing |
|
|
81
|
+
| `REDIS_URL` | Redis | Distributed production |
|
|
82
|
+
| `DATABASE_URL` | Database | Persistent simple storage |
|
|
83
|
+
|
|
84
|
+
**Set environment variables, get enterprise features. No code changes.**
|
|
85
|
+
|
|
86
|
+
## 🏢 Production Ready
|
|
87
|
+
|
|
88
|
+
```bash
|
|
89
|
+
# Minimal setup for production
|
|
90
|
+
REDIS_URL=redis://localhost:6379
|
|
91
|
+
BLOOM_QUEUE_CONCURRENCY=10
|
|
92
|
+
BLOOM_QUEUE_WORKER=true
|
|
93
|
+
```
|
|
94
|
+
|
|
95
|
+
```typescript
|
|
96
|
+
// Same code, production features
|
|
97
|
+
const queue = queueClass.get();
|
|
98
|
+
await queue.add('webhook', { url: 'https://api.example.com', data: payload });
|
|
99
|
+
// → Redis distributed queue
|
|
100
|
+
// → 10 concurrent workers
|
|
101
|
+
// → Automatic retry with backoff
|
|
102
|
+
// → Stats and monitoring
|
|
103
|
+
```
|
|
104
|
+
|
|
105
|
+
## 📋 Complete API (It's Tiny)
|
|
106
|
+
|
|
107
|
+
### Core Methods
|
|
108
|
+
|
|
109
|
+
```typescript
|
|
110
|
+
import { queueClass } from '@bloomneo/appkit/queue';
|
|
111
|
+
|
|
112
|
+
const queue = queueClass.get();
|
|
113
|
+
|
|
114
|
+
// Job management
|
|
115
|
+
await queue.add(jobType, data, options?); // Add job
|
|
116
|
+
await queue.schedule(jobType, data, delay); // Schedule delayed job
|
|
117
|
+
queue.process(jobType, handler); // Process jobs
|
|
118
|
+
|
|
119
|
+
// Queue control
|
|
120
|
+
await queue.pause(jobType?); // Pause processing
|
|
121
|
+
await queue.resume(jobType?); // Resume processing
|
|
122
|
+
|
|
123
|
+
// Monitoring
|
|
124
|
+
await queue.getStats(jobType?); // Get statistics
|
|
125
|
+
await queue.getJobs(status, jobType?); // Get jobs by status
|
|
126
|
+
await queue.retry(jobId); // Retry failed job
|
|
127
|
+
await queue.remove(jobId); // Remove job
|
|
128
|
+
await queue.clean(status, grace?); // Clean old jobs
|
|
129
|
+
```
|
|
130
|
+
|
|
131
|
+
### Utility Methods
|
|
132
|
+
|
|
133
|
+
```typescript
|
|
134
|
+
queueClass.getActiveTransport(); // See which transport is running
|
|
135
|
+
queueClass.hasTransport('redis'); // Check specific transport
|
|
136
|
+
queueClass.getConfig(); // Debug configuration
|
|
137
|
+
queueClass.getHealth(); // Health status
|
|
138
|
+
queueClass.clear(); // Clear all (testing)
|
|
139
|
+
```
|
|
140
|
+
|
|
141
|
+
## 🌍 Environment Variables
|
|
142
|
+
|
|
143
|
+
### Basic Setup
|
|
144
|
+
|
|
145
|
+
```bash
|
|
146
|
+
# Transport selection (auto-detected)
|
|
147
|
+
REDIS_URL=redis://localhost:6379 # Enables Redis transport
|
|
148
|
+
DATABASE_URL=postgres://user:pass@host/db # Enables Database transport
|
|
149
|
+
|
|
150
|
+
# Worker configuration
|
|
151
|
+
BLOOM_QUEUE_WORKER=true # Enable job processing
|
|
152
|
+
BLOOM_QUEUE_CONCURRENCY=10 # Jobs processed simultaneously
|
|
153
|
+
```
|
|
154
|
+
|
|
155
|
+
### Advanced Configuration
|
|
156
|
+
|
|
157
|
+
```bash
|
|
158
|
+
# Job retry settings
|
|
159
|
+
BLOOM_QUEUE_MAX_ATTEMPTS=5 # Max retry attempts (default: 3)
|
|
160
|
+
BLOOM_QUEUE_RETRY_DELAY=10000 # Base retry delay in ms (default: 5000)
|
|
161
|
+
BLOOM_QUEUE_RETRY_BACKOFF=exponential # fixed|exponential (default: exponential)
|
|
162
|
+
|
|
163
|
+
# Job cleanup
|
|
164
|
+
BLOOM_QUEUE_REMOVE_COMPLETE=100 # Keep last 100 completed jobs
|
|
165
|
+
BLOOM_QUEUE_REMOVE_FAILED=500 # Keep last 500 failed jobs
|
|
166
|
+
|
|
167
|
+
# Performance tuning
|
|
168
|
+
BLOOM_QUEUE_DEFAULT_PRIORITY=0 # Default job priority
|
|
169
|
+
BLOOM_QUEUE_SHUTDOWN_TIMEOUT=30000 # Graceful shutdown timeout
|
|
170
|
+
```
|
|
171
|
+
|
|
172
|
+
### Transport-Specific Settings
|
|
173
|
+
|
|
174
|
+
```bash
|
|
175
|
+
# Memory Transport (Development)
|
|
176
|
+
BLOOM_QUEUE_MEMORY_MAX_JOBS=1000 # Max jobs in memory
|
|
177
|
+
BLOOM_QUEUE_MEMORY_CLEANUP=30000 # Cleanup interval
|
|
178
|
+
|
|
179
|
+
# Redis Transport (Production)
|
|
180
|
+
BLOOM_QUEUE_REDIS_PREFIX=myapp # Redis key prefix
|
|
181
|
+
BLOOM_QUEUE_REDIS_RETRIES=3 # Connection retries
|
|
182
|
+
|
|
183
|
+
# Database Transport (Simple Persistent)
|
|
184
|
+
BLOOM_QUEUE_DB_TABLE=queue_jobs # Table name
|
|
185
|
+
BLOOM_QUEUE_DB_POLL=5000 # Polling interval
|
|
186
|
+
BLOOM_QUEUE_DB_BATCH=50 # Batch size
|
|
187
|
+
```
|
|
188
|
+
|
|
189
|
+
## 💡 Real Examples
|
|
190
|
+
|
|
191
|
+
### Express API with Background Jobs
|
|
192
|
+
|
|
193
|
+
```typescript
|
|
194
|
+
import express from 'express';
|
|
195
|
+
import { queueClass } from '@bloomneo/appkit/queue';
|
|
196
|
+
|
|
197
|
+
const app = express();
|
|
198
|
+
const queue = queueClass.get();
|
|
199
|
+
|
|
200
|
+
// Setup job processors
|
|
201
|
+
queue.process('email', async (data) => {
|
|
202
|
+
await sendEmail(data.to, data.subject, data.body);
|
|
203
|
+
return { delivered: true };
|
|
204
|
+
});
|
|
205
|
+
|
|
206
|
+
queue.process('webhook', async (data) => {
|
|
207
|
+
const response = await fetch(data.url, {
|
|
208
|
+
method: 'POST',
|
|
209
|
+
body: JSON.stringify(data.payload),
|
|
210
|
+
});
|
|
211
|
+
return { status: response.status };
|
|
212
|
+
});
|
|
213
|
+
|
|
214
|
+
queue.process('image-resize', async (data) => {
|
|
215
|
+
const resized = await resizeImage(data.imageUrl, data.width, data.height);
|
|
216
|
+
return { resizedUrl: resized };
|
|
217
|
+
});
|
|
218
|
+
|
|
219
|
+
// API endpoints that queue jobs
|
|
220
|
+
app.post('/register', async (req, res) => {
|
|
221
|
+
const { email, name } = req.body;
|
|
222
|
+
|
|
223
|
+
// Create user account
|
|
224
|
+
const user = await db.user.create({ data: { email, name } });
|
|
225
|
+
|
|
226
|
+
// Queue welcome email
|
|
227
|
+
await queue.add('email', {
|
|
228
|
+
to: email,
|
|
229
|
+
subject: 'Welcome to our platform!',
|
|
230
|
+
body: `Hi ${name}, welcome aboard!`,
|
|
231
|
+
template: 'welcome',
|
|
232
|
+
});
|
|
233
|
+
|
|
234
|
+
// Queue webhook notification
|
|
235
|
+
await queue.add('webhook', {
|
|
236
|
+
url: 'https://analytics.example.com/events',
|
|
237
|
+
payload: { event: 'user_registered', userId: user.id },
|
|
238
|
+
});
|
|
239
|
+
|
|
240
|
+
res.json({ success: true, userId: user.id });
|
|
241
|
+
});
|
|
242
|
+
|
|
243
|
+
app.post('/upload-avatar', async (req, res) => {
|
|
244
|
+
const { userId, imageUrl } = req.body;
|
|
245
|
+
|
|
246
|
+
// Queue image processing
|
|
247
|
+
const jobId = await queue.add(
|
|
248
|
+
'image-resize',
|
|
249
|
+
{
|
|
250
|
+
imageUrl,
|
|
251
|
+
userId,
|
|
252
|
+
width: 200,
|
|
253
|
+
height: 200,
|
|
254
|
+
format: 'webp',
|
|
255
|
+
},
|
|
256
|
+
{
|
|
257
|
+
priority: 5, // High priority
|
|
258
|
+
}
|
|
259
|
+
);
|
|
260
|
+
|
|
261
|
+
res.json({ success: true, jobId });
|
|
262
|
+
});
|
|
263
|
+
|
|
264
|
+
app.listen(3000, () => {
|
|
265
|
+
console.log('🚀 Server ready with background jobs');
|
|
266
|
+
});
|
|
267
|
+
```
|
|
268
|
+
|
|
269
|
+
### Fastify with Job Scheduling
|
|
270
|
+
|
|
271
|
+
```typescript
|
|
272
|
+
import Fastify from 'fastify';
|
|
273
|
+
import { queueClass } from '@bloomneo/appkit/queue';
|
|
274
|
+
|
|
275
|
+
const fastify = Fastify();
|
|
276
|
+
const queue = queueClass.get();
|
|
277
|
+
|
|
278
|
+
// Setup scheduled job processors
|
|
279
|
+
queue.process('reminder', async (data) => {
|
|
280
|
+
const user = await db.user.findUnique({ where: { id: data.userId } });
|
|
281
|
+
|
|
282
|
+
await sendEmail(user.email, 'Reminder', data.message);
|
|
283
|
+
return { reminded: true };
|
|
284
|
+
});
|
|
285
|
+
|
|
286
|
+
queue.process('subscription-renewal', async (data) => {
|
|
287
|
+
const subscription = await processRenewal(data.subscriptionId);
|
|
288
|
+
return { renewed: subscription.renewed };
|
|
289
|
+
});
|
|
290
|
+
|
|
291
|
+
// Schedule future jobs
|
|
292
|
+
fastify.post('/schedule-reminder', async (request, reply) => {
|
|
293
|
+
const { userId, message, delayMinutes } = request.body;
|
|
294
|
+
|
|
295
|
+
const delay = delayMinutes * 60 * 1000; // Convert to ms
|
|
296
|
+
|
|
297
|
+
const jobId = await queue.schedule(
|
|
298
|
+
'reminder',
|
|
299
|
+
{
|
|
300
|
+
userId,
|
|
301
|
+
message,
|
|
302
|
+
},
|
|
303
|
+
delay
|
|
304
|
+
);
|
|
305
|
+
|
|
306
|
+
return { success: true, jobId, scheduledFor: new Date(Date.now() + delay) };
|
|
307
|
+
});
|
|
308
|
+
|
|
309
|
+
// Recurring subscription processing
|
|
310
|
+
fastify.post('/setup-subscription', async (request, reply) => {
|
|
311
|
+
const { userId, plan } = request.body;
|
|
312
|
+
|
|
313
|
+
const subscription = await db.subscription.create({
|
|
314
|
+
data: {
|
|
315
|
+
userId,
|
|
316
|
+
plan,
|
|
317
|
+
renewsAt: new Date(Date.now() + 30 * 24 * 60 * 60 * 1000),
|
|
318
|
+
},
|
|
319
|
+
});
|
|
320
|
+
|
|
321
|
+
// Schedule renewal in 30 days
|
|
322
|
+
await queue.schedule(
|
|
323
|
+
'subscription-renewal',
|
|
324
|
+
{
|
|
325
|
+
subscriptionId: subscription.id,
|
|
326
|
+
},
|
|
327
|
+
30 * 24 * 60 * 60 * 1000
|
|
328
|
+
);
|
|
329
|
+
|
|
330
|
+
return { success: true, subscription };
|
|
331
|
+
});
|
|
332
|
+
|
|
333
|
+
fastify.listen({ port: 3000 });
|
|
334
|
+
```
|
|
335
|
+
|
|
336
|
+
### Background Worker Process
|
|
337
|
+
|
|
338
|
+
```typescript
|
|
339
|
+
// worker.ts - Separate worker process
|
|
340
|
+
import { queueClass } from '@bloomneo/appkit/queue';
|
|
341
|
+
|
|
342
|
+
const queue = queueClass.get();
|
|
343
|
+
|
|
344
|
+
// Heavy processing jobs
|
|
345
|
+
queue.process('data-export', async (data) => {
|
|
346
|
+
const { userId, format } = data;
|
|
347
|
+
|
|
348
|
+
console.log(`Starting data export for user ${userId}`);
|
|
349
|
+
|
|
350
|
+
// Simulate heavy work
|
|
351
|
+
const userData = await fetchAllUserData(userId);
|
|
352
|
+
const exportFile = await generateExport(userData, format);
|
|
353
|
+
const downloadUrl = await uploadToS3(exportFile);
|
|
354
|
+
|
|
355
|
+
// Notify user
|
|
356
|
+
await queue.add('email', {
|
|
357
|
+
to: userData.email,
|
|
358
|
+
subject: 'Your data export is ready',
|
|
359
|
+
body: `Download your data: ${downloadUrl}`,
|
|
360
|
+
});
|
|
361
|
+
|
|
362
|
+
return { downloadUrl, size: exportFile.size };
|
|
363
|
+
});
|
|
364
|
+
|
|
365
|
+
queue.process('video-transcode', async (data) => {
|
|
366
|
+
const { videoId, quality } = data;
|
|
367
|
+
|
|
368
|
+
console.log(`Transcoding video ${videoId} to ${quality}`);
|
|
369
|
+
|
|
370
|
+
const video = await db.video.findUnique({ where: { id: videoId } });
|
|
371
|
+
const transcodedUrl = await transcodeVideo(video.url, quality);
|
|
372
|
+
|
|
373
|
+
await db.video.update({
|
|
374
|
+
where: { id: videoId },
|
|
375
|
+
data: { [`${quality}Url`]: transcodedUrl },
|
|
376
|
+
});
|
|
377
|
+
|
|
378
|
+
return { transcodedUrl, quality };
|
|
379
|
+
});
|
|
380
|
+
|
|
381
|
+
// Graceful shutdown handling
|
|
382
|
+
process.on('SIGTERM', async () => {
|
|
383
|
+
console.log('Worker shutting down gracefully...');
|
|
384
|
+
await queue.close();
|
|
385
|
+
process.exit(0);
|
|
386
|
+
});
|
|
387
|
+
|
|
388
|
+
console.log('🔧 Background worker started');
|
|
389
|
+
```
|
|
390
|
+
|
|
391
|
+
### Job Monitoring Dashboard
|
|
392
|
+
|
|
393
|
+
```typescript
|
|
394
|
+
import express from 'express';
|
|
395
|
+
import { queueClass } from '@bloomneo/appkit/queue';
|
|
396
|
+
|
|
397
|
+
const app = express();
|
|
398
|
+
const queue = queueClass.get();
|
|
399
|
+
|
|
400
|
+
// Health check endpoint
|
|
401
|
+
app.get('/health', async (req, res) => {
|
|
402
|
+
const health = queueClass.getHealth();
|
|
403
|
+
const stats = await queue.getStats();
|
|
404
|
+
|
|
405
|
+
res.json({
|
|
406
|
+
status: health.status,
|
|
407
|
+
transport: queueClass.getActiveTransport(),
|
|
408
|
+
stats,
|
|
409
|
+
timestamp: new Date().toISOString(),
|
|
410
|
+
});
|
|
411
|
+
});
|
|
412
|
+
|
|
413
|
+
// Queue statistics
|
|
414
|
+
app.get('/api/queue/stats', async (req, res) => {
|
|
415
|
+
const { jobType } = req.query;
|
|
416
|
+
const stats = await queue.getStats(jobType as string);
|
|
417
|
+
|
|
418
|
+
res.json(stats);
|
|
419
|
+
});
|
|
420
|
+
|
|
421
|
+
// Get jobs by status
|
|
422
|
+
app.get('/api/queue/jobs', async (req, res) => {
|
|
423
|
+
const { status, jobType, limit = 50 } = req.query;
|
|
424
|
+
|
|
425
|
+
const jobs = await queue.getJobs(
|
|
426
|
+
status as any,
|
|
427
|
+
jobType as string,
|
|
428
|
+
parseInt(limit as string)
|
|
429
|
+
);
|
|
430
|
+
|
|
431
|
+
res.json(jobs);
|
|
432
|
+
});
|
|
433
|
+
|
|
434
|
+
// Retry failed job
|
|
435
|
+
app.post('/api/queue/retry/:jobId', async (req, res) => {
|
|
436
|
+
const { jobId } = req.params;
|
|
437
|
+
|
|
438
|
+
try {
|
|
439
|
+
await queue.retry(jobId);
|
|
440
|
+
res.json({ success: true });
|
|
441
|
+
} catch (error) {
|
|
442
|
+
res.status(400).json({ error: error.message });
|
|
443
|
+
}
|
|
444
|
+
});
|
|
445
|
+
|
|
446
|
+
// Remove job
|
|
447
|
+
app.delete('/api/queue/jobs/:jobId', async (req, res) => {
|
|
448
|
+
const { jobId } = req.params;
|
|
449
|
+
|
|
450
|
+
try {
|
|
451
|
+
await queue.remove(jobId);
|
|
452
|
+
res.json({ success: true });
|
|
453
|
+
} catch (error) {
|
|
454
|
+
res.status(400).json({ error: error.message });
|
|
455
|
+
}
|
|
456
|
+
});
|
|
457
|
+
|
|
458
|
+
// Clean old jobs
|
|
459
|
+
app.post('/api/queue/clean', async (req, res) => {
|
|
460
|
+
const { status, grace = 86400000 } = req.body; // Default 24 hours
|
|
461
|
+
|
|
462
|
+
try {
|
|
463
|
+
await queue.clean(status, grace);
|
|
464
|
+
res.json({ success: true });
|
|
465
|
+
} catch (error) {
|
|
466
|
+
res.status(400).json({ error: error.message });
|
|
467
|
+
}
|
|
468
|
+
});
|
|
469
|
+
|
|
470
|
+
app.listen(4000, () => {
|
|
471
|
+
console.log('📊 Queue monitoring dashboard on port 4000');
|
|
472
|
+
});
|
|
473
|
+
```
|
|
474
|
+
|
|
475
|
+
### Advanced Job Options
|
|
476
|
+
|
|
477
|
+
```typescript
|
|
478
|
+
import { queueClass } from '@bloomneo/appkit/queue';
|
|
479
|
+
|
|
480
|
+
const queue = queueClass.get();
|
|
481
|
+
|
|
482
|
+
// High priority job
|
|
483
|
+
await queue.add(
|
|
484
|
+
'critical-alert',
|
|
485
|
+
{
|
|
486
|
+
message: 'System overload detected',
|
|
487
|
+
severity: 'critical',
|
|
488
|
+
},
|
|
489
|
+
{
|
|
490
|
+
priority: 10, // Higher number = higher priority
|
|
491
|
+
attempts: 5, // Custom retry attempts
|
|
492
|
+
backoff: 'exponential',
|
|
493
|
+
}
|
|
494
|
+
);
|
|
495
|
+
|
|
496
|
+
// Low priority batch job
|
|
497
|
+
await queue.add(
|
|
498
|
+
'analytics-batch',
|
|
499
|
+
{
|
|
500
|
+
date: '2024-01-15',
|
|
501
|
+
type: 'daily-report',
|
|
502
|
+
},
|
|
503
|
+
{
|
|
504
|
+
priority: -5, // Lower priority
|
|
505
|
+
attempts: 1, // Don't retry batch jobs
|
|
506
|
+
removeOnComplete: 10, // Keep only 10 completed
|
|
507
|
+
removeOnFail: 50, // Keep 50 failed for debugging
|
|
508
|
+
}
|
|
509
|
+
);
|
|
510
|
+
|
|
511
|
+
// Job with custom retry strategy
|
|
512
|
+
await queue.add(
|
|
513
|
+
'api-sync',
|
|
514
|
+
{
|
|
515
|
+
endpoint: 'https://api.partner.com/sync',
|
|
516
|
+
data: payload,
|
|
517
|
+
},
|
|
518
|
+
{
|
|
519
|
+
attempts: 3,
|
|
520
|
+
backoff: 'fixed', // Fixed delay between retries
|
|
521
|
+
}
|
|
522
|
+
);
|
|
523
|
+
|
|
524
|
+
// Scheduled job with priority
|
|
525
|
+
const reminderDate = new Date('2024-12-25T09:00:00Z');
|
|
526
|
+
const delay = reminderDate.getTime() - Date.now();
|
|
527
|
+
|
|
528
|
+
await queue.schedule(
|
|
529
|
+
'holiday-reminder',
|
|
530
|
+
{
|
|
531
|
+
type: 'holiday',
|
|
532
|
+
message: 'Merry Christmas!',
|
|
533
|
+
},
|
|
534
|
+
delay
|
|
535
|
+
);
|
|
536
|
+
```
|
|
537
|
+
|
|
538
|
+
## 🔧 Database Setup (for Database Transport)
|
|
539
|
+
|
|
540
|
+
If using the database transport, add this to your Prisma schema:
|
|
541
|
+
|
|
542
|
+
```prisma
|
|
543
|
+
model QueueJob {
|
|
544
|
+
id String @id @default(cuid())
|
|
545
|
+
queue String // Job type
|
|
546
|
+
type String // Job type (compatibility)
|
|
547
|
+
payload Json // Job data
|
|
548
|
+
result Json? // Job result
|
|
549
|
+
error Json? // Error details
|
|
550
|
+
|
|
551
|
+
status String @default("pending") // pending, processing, completed, failed
|
|
552
|
+
attempts Int @default(0)
|
|
553
|
+
maxAttempts Int @default(3)
|
|
554
|
+
priority Int @default(0)
|
|
555
|
+
|
|
556
|
+
runAt DateTime @default(now())
|
|
557
|
+
processedAt DateTime?
|
|
558
|
+
completedAt DateTime?
|
|
559
|
+
failedAt DateTime?
|
|
560
|
+
|
|
561
|
+
createdAt DateTime @default(now())
|
|
562
|
+
updatedAt DateTime @updatedAt
|
|
563
|
+
|
|
564
|
+
@@index([queue, status, priority, runAt])
|
|
565
|
+
@@index([status, runAt])
|
|
566
|
+
@@map("queue_jobs")
|
|
567
|
+
}
|
|
568
|
+
```
|
|
569
|
+
|
|
570
|
+
Then run your migration:
|
|
571
|
+
|
|
572
|
+
```bash
|
|
573
|
+
npx prisma migrate dev --name add_queue_jobs
|
|
574
|
+
```
|
|
575
|
+
|
|
576
|
+
## 🧪 Testing
|
|
577
|
+
|
|
578
|
+
```typescript
|
|
579
|
+
import { queueClass } from '@bloomneo/appkit/queue';
|
|
580
|
+
|
|
581
|
+
describe('Queue Tests', () => {
|
|
582
|
+
afterEach(async () => {
|
|
583
|
+
// IMPORTANT: Clear queue state between tests
|
|
584
|
+
await queueClass.clear();
|
|
585
|
+
});
|
|
586
|
+
|
|
587
|
+
test('should process jobs', async () => {
|
|
588
|
+
const queue = queueClass.get();
|
|
589
|
+
const results: any[] = [];
|
|
590
|
+
|
|
591
|
+
// Setup processor
|
|
592
|
+
queue.process('test-job', async (data) => {
|
|
593
|
+
results.push(data);
|
|
594
|
+
return { processed: true };
|
|
595
|
+
});
|
|
596
|
+
|
|
597
|
+
// Add job
|
|
598
|
+
const jobId = await queue.add('test-job', { message: 'hello' });
|
|
599
|
+
|
|
600
|
+
// Wait for processing
|
|
601
|
+
await new Promise((resolve) => setTimeout(resolve, 100));
|
|
602
|
+
|
|
603
|
+
expect(results).toHaveLength(1);
|
|
604
|
+
expect(results[0].message).toBe('hello');
|
|
605
|
+
});
|
|
606
|
+
|
|
607
|
+
test('should retry failed jobs', async () => {
|
|
608
|
+
const queue = queueClass.get();
|
|
609
|
+
let attempts = 0;
|
|
610
|
+
|
|
611
|
+
queue.process('failing-job', async (data) => {
|
|
612
|
+
attempts++;
|
|
613
|
+
if (attempts < 3) {
|
|
614
|
+
throw new Error('Simulated failure');
|
|
615
|
+
}
|
|
616
|
+
return { success: true };
|
|
617
|
+
});
|
|
618
|
+
|
|
619
|
+
await queue.add('failing-job', { test: true }, { attempts: 3 });
|
|
620
|
+
|
|
621
|
+
// Wait for retries
|
|
622
|
+
await new Promise((resolve) => setTimeout(resolve, 1000));
|
|
623
|
+
|
|
624
|
+
expect(attempts).toBe(3);
|
|
625
|
+
});
|
|
626
|
+
});
|
|
627
|
+
```
|
|
628
|
+
|
|
629
|
+
## 🚀 Performance
|
|
630
|
+
|
|
631
|
+
- **Memory Transport**: 10,000+ jobs/second
|
|
632
|
+
- **Redis Transport**: 1,000+ jobs/second (network dependent)
|
|
633
|
+
- **Database Transport**: 100+ jobs/second (database dependent)
|
|
634
|
+
- **Startup Time**: < 100ms for any transport
|
|
635
|
+
- **Memory Usage**: < 10MB baseline
|
|
636
|
+
|
|
637
|
+
## 📈 Scaling
|
|
638
|
+
|
|
639
|
+
### Development → Production
|
|
640
|
+
|
|
641
|
+
```typescript
|
|
642
|
+
// Same code works everywhere
|
|
643
|
+
const queue = queueClass.get();
|
|
644
|
+
await queue.add('process-payment', { orderId: 123, amount: 99.99 });
|
|
645
|
+
|
|
646
|
+
// Development: Memory queue (no setup)
|
|
647
|
+
// Production: Redis queue (distributed workers)
|
|
648
|
+
```
|
|
649
|
+
|
|
650
|
+
### Transport Comparison
|
|
651
|
+
|
|
652
|
+
| Transport | Best For | Persistence | Distribution | Setup |
|
|
653
|
+
| ------------ | -------------------- | ----------- | ------------ | ------------ |
|
|
654
|
+
| **Memory** | Development, Testing | ❌ | ❌ | None |
|
|
655
|
+
| **Redis** | Production, Scale | ✅ | ✅ | Redis server |
|
|
656
|
+
| **Database** | Simple Persistent | ✅ | ⚠️ | Existing DB |
|
|
657
|
+
|
|
658
|
+
### Deployment Patterns
|
|
659
|
+
|
|
660
|
+
```bash
|
|
661
|
+
# Single server with database queue
|
|
662
|
+
DATABASE_URL=postgres://...
|
|
663
|
+
BLOOM_QUEUE_WORKER=true
|
|
664
|
+
|
|
665
|
+
# Distributed with Redis
|
|
666
|
+
REDIS_URL=redis://...
|
|
667
|
+
BLOOM_QUEUE_WORKER=true
|
|
668
|
+
BLOOM_QUEUE_CONCURRENCY=20
|
|
669
|
+
|
|
670
|
+
# Separate worker processes
|
|
671
|
+
REDIS_URL=redis://...
|
|
672
|
+
BLOOM_QUEUE_WORKER=true # Only in worker processes
|
|
673
|
+
```
|
|
674
|
+
|
|
675
|
+
## 🎯 When to Use What
|
|
676
|
+
|
|
677
|
+
### Transport Selection
|
|
678
|
+
|
|
679
|
+
- **Memory**: Development, testing, single-process apps
|
|
680
|
+
- **Redis**: Production, multiple workers, high throughput
|
|
681
|
+
- **Database**: Simple persistence, existing DB infrastructure
|
|
682
|
+
|
|
683
|
+
### Job Types
|
|
684
|
+
|
|
685
|
+
- **email**: User notifications, transactional emails
|
|
686
|
+
- **webhook**: API integrations, third-party notifications
|
|
687
|
+
- **image-resize**: Media processing, thumbnail generation
|
|
688
|
+
- **data-export**: Large data processing, reports
|
|
689
|
+
- **reminder**: Scheduled notifications, follow-ups
|
|
690
|
+
- **cleanup**: Maintenance tasks, data archival
|
|
691
|
+
|
|
692
|
+
## 🤖 LLM Guidelines
|
|
693
|
+
|
|
694
|
+
### **Essential Patterns**
|
|
695
|
+
|
|
696
|
+
```typescript
|
|
697
|
+
// ✅ ALWAYS use these patterns
|
|
698
|
+
import { queueClass } from '@bloomneo/appkit/queue';
|
|
699
|
+
const queue = queueClass.get();
|
|
700
|
+
|
|
701
|
+
// ✅ Add jobs with proper data
|
|
702
|
+
await queue.add('email', {
|
|
703
|
+
to: 'user@example.com',
|
|
704
|
+
subject: 'Welcome',
|
|
705
|
+
body: 'Thanks for signing up',
|
|
706
|
+
});
|
|
707
|
+
|
|
708
|
+
// ✅ Process jobs with async handlers
|
|
709
|
+
queue.process('email', async (data) => {
|
|
710
|
+
await sendEmail(data.to, data.subject, data.body);
|
|
711
|
+
return { sent: true };
|
|
712
|
+
});
|
|
713
|
+
|
|
714
|
+
// ✅ Handle job failures gracefully
|
|
715
|
+
queue.process('risky-job', async (data) => {
|
|
716
|
+
try {
|
|
717
|
+
return await riskyOperation(data);
|
|
718
|
+
} catch (error) {
|
|
719
|
+
console.error('Job failed:', error);
|
|
720
|
+
throw error; // Let queue handle retry
|
|
721
|
+
}
|
|
722
|
+
});
|
|
723
|
+
|
|
724
|
+
// ✅ Use scheduling for delayed jobs
|
|
725
|
+
await queue.schedule('reminder', data, 24 * 60 * 60 * 1000);
|
|
726
|
+
```
|
|
727
|
+
|
|
728
|
+
### **Anti-Patterns to Avoid**
|
|
729
|
+
|
|
730
|
+
```typescript
|
|
731
|
+
// ❌ DON'T call queueClass.get() repeatedly
|
|
732
|
+
const queue1 = queueClass.get();
|
|
733
|
+
const queue2 = queueClass.get(); // Unnecessary - same instance
|
|
734
|
+
|
|
735
|
+
// ❌ DON'T forget to handle job failures
|
|
736
|
+
queue.process('job', async (data) => {
|
|
737
|
+
riskyOperation(data); // Missing await and error handling
|
|
738
|
+
});
|
|
739
|
+
|
|
740
|
+
// ❌ DON'T add large objects as job data
|
|
741
|
+
await queue.add('job', {
|
|
742
|
+
hugeArray: new Array(1000000).fill(0), // Too large for serialization
|
|
743
|
+
});
|
|
744
|
+
|
|
745
|
+
// ❌ DON'T block in job handlers
|
|
746
|
+
queue.process('job', async (data) => {
|
|
747
|
+
while (true) {
|
|
748
|
+
/* infinite loop */
|
|
749
|
+
} // Blocks worker
|
|
750
|
+
});
|
|
751
|
+
|
|
752
|
+
// ❌ DON'T forget cleanup in tests
|
|
753
|
+
test('my test', () => {
|
|
754
|
+
// ... test code
|
|
755
|
+
// Missing: await queueClass.clear();
|
|
756
|
+
});
|
|
757
|
+
```
|
|
758
|
+
|
|
759
|
+
### **Common Patterns**
|
|
760
|
+
|
|
761
|
+
```typescript
|
|
762
|
+
// User registration flow
|
|
763
|
+
await queue.add('email', {
|
|
764
|
+
to: user.email,
|
|
765
|
+
template: 'welcome',
|
|
766
|
+
data: { name: user.name },
|
|
767
|
+
});
|
|
768
|
+
|
|
769
|
+
// File processing
|
|
770
|
+
await queue.add(
|
|
771
|
+
'image-resize',
|
|
772
|
+
{
|
|
773
|
+
imageUrl: upload.url,
|
|
774
|
+
userId: user.id,
|
|
775
|
+
sizes: [100, 200, 400],
|
|
776
|
+
},
|
|
777
|
+
{ priority: 5 }
|
|
778
|
+
);
|
|
779
|
+
|
|
780
|
+
// Webhook notifications
|
|
781
|
+
await queue.add('webhook', {
|
|
782
|
+
url: 'https://api.partner.com/notify',
|
|
783
|
+
payload: { event: 'order_created', orderId },
|
|
784
|
+
});
|
|
785
|
+
|
|
786
|
+
// Scheduled reminders
|
|
787
|
+
const reminderDelay = 7 * 24 * 60 * 60 * 1000; // 7 days
|
|
788
|
+
await queue.schedule(
|
|
789
|
+
'reminder',
|
|
790
|
+
{
|
|
791
|
+
userId,
|
|
792
|
+
type: 'trial_ending',
|
|
793
|
+
message: 'Your trial ends soon!',
|
|
794
|
+
},
|
|
795
|
+
reminderDelay
|
|
796
|
+
);
|
|
797
|
+
|
|
798
|
+
// Monitoring and stats
|
|
799
|
+
const stats = await queue.getStats();
|
|
800
|
+
const health = queueClass.getHealth();
|
|
801
|
+
const failedJobs = await queue.getJobs('failed');
|
|
802
|
+
```
|
|
803
|
+
|
|
804
|
+
## 🆚 Why Not Bull/Agenda?
|
|
805
|
+
|
|
806
|
+
**Other libraries:**
|
|
807
|
+
|
|
808
|
+
```javascript
|
|
809
|
+
// Bull: Complex setup with multiple dependencies
|
|
810
|
+
const Queue = require('bull');
|
|
811
|
+
const emailQueue = new Queue('email processing', {
|
|
812
|
+
redis: { port: 6379, host: '127.0.0.1' },
|
|
813
|
+
defaultJobOptions: {
|
|
814
|
+
removeOnComplete: 10,
|
|
815
|
+
removeOnFail: 50,
|
|
816
|
+
delay: 5000,
|
|
817
|
+
},
|
|
818
|
+
settings: {
|
|
819
|
+
retryProcessDelay: 5000,
|
|
820
|
+
},
|
|
821
|
+
});
|
|
822
|
+
|
|
823
|
+
// Agenda: MongoDB dependency and complex scheduling
|
|
824
|
+
const Agenda = require('agenda');
|
|
825
|
+
const agenda = new Agenda({
|
|
826
|
+
db: { address: 'mongodb://127.0.0.1/agenda' },
|
|
827
|
+
processEvery: '20 seconds',
|
|
828
|
+
maxConcurrency: 20,
|
|
829
|
+
});
|
|
830
|
+
```
|
|
831
|
+
|
|
832
|
+
**This library:**
|
|
833
|
+
|
|
834
|
+
```typescript
|
|
835
|
+
// 2 lines, production ready with 3 transports
|
|
836
|
+
import { queueClass } from '@bloomneo/appkit/queue';
|
|
837
|
+
const queue = queueClass.get();
|
|
838
|
+
```
|
|
839
|
+
|
|
840
|
+
**Same features, 90% less code, zero configuration.**
|
|
841
|
+
|
|
842
|
+
## 📄 License
|
|
843
|
+
|
|
844
|
+
MIT © [Bloomneo](https://github.com/bloomneo)
|
|
845
|
+
|
|
846
|
+
---
|
|
847
|
+
|
|
848
|
+
<p align="center">
|
|
849
|
+
<strong>Built with ❤️ by the <a href="https://github.com/bloomneo">Bloomneo Team</a></strong><br>
|
|
850
|
+
Because job queuing should be simple, not a PhD thesis.
|
|
851
|
+
</p>
|