@semiont/jobs 0.2.28-build.40
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +552 -0
- package/dist/index.d.ts +286 -0
- package/dist/index.js +325 -0
- package/dist/index.js.map +1 -0
- package/package.json +49 -0
package/README.md
ADDED
|
@@ -0,0 +1,552 @@
|
|
|
1
|
+
# @semiont/jobs
|
|
2
|
+
|
|
3
|
+
[](https://github.com/The-AI-Alliance/semiont/actions/workflows/package-tests.yml?query=branch%3Amain+is%3Asuccess+job%3A%22Test+jobs%22)
|
|
4
|
+
[](https://www.npmjs.com/package/@semiont/jobs)
|
|
5
|
+
[](https://github.com/The-AI-Alliance/semiont/blob/main/LICENSE)
|
|
6
|
+
|
|
7
|
+
Filesystem-based job queue and worker infrastructure for [Semiont](https://github.com/The-AI-Alliance/semiont) - provides async job processing, background workers, and long-running task management.
|
|
8
|
+
|
|
9
|
+
## What is a Job Queue?
|
|
10
|
+
|
|
11
|
+
A job queue is a pattern for processing work asynchronously outside of the HTTP request/response cycle. Jobs are persisted to storage, processed by workers, and can be monitored for progress and completion.
|
|
12
|
+
|
|
13
|
+
**Benefits:**
|
|
14
|
+
- **Decoupled processing** - HTTP responses return immediately while work continues
|
|
15
|
+
- **Reliability** - Jobs are persisted to disk and survive process restarts
|
|
16
|
+
- **Progress tracking** - Long-running tasks can report status updates
|
|
17
|
+
- **Retry logic** - Failed jobs can be retried with exponential backoff
|
|
18
|
+
- **Scalability** - Multiple workers can process jobs concurrently
|
|
19
|
+
|
|
20
|
+
## Installation
|
|
21
|
+
|
|
22
|
+
```bash
|
|
23
|
+
npm install @semiont/jobs
|
|
24
|
+
```
|
|
25
|
+
|
|
26
|
+
**Prerequisites:**
|
|
27
|
+
- Node.js >= 20.18.1
|
|
28
|
+
- `@semiont/core` and `@semiont/api-client` (peer dependencies)
|
|
29
|
+
|
|
30
|
+
## Quick Start
|
|
31
|
+
|
|
32
|
+
```typescript
|
|
33
|
+
import {
|
|
34
|
+
JobQueue,
|
|
35
|
+
initializeJobQueue,
|
|
36
|
+
getJobQueue,
|
|
37
|
+
JobWorker,
|
|
38
|
+
type GenerationJob,
|
|
39
|
+
} from '@semiont/jobs';
|
|
40
|
+
import { jobId } from '@semiont/api-client';
|
|
41
|
+
import { userId, resourceId } from '@semiont/core';
|
|
42
|
+
|
|
43
|
+
// 1. Initialize job queue
|
|
44
|
+
await initializeJobQueue({ dataDir: './data' });
|
|
45
|
+
|
|
46
|
+
// 2. Create a job
|
|
47
|
+
const jobQueue = getJobQueue();
|
|
48
|
+
const job: GenerationJob = {
|
|
49
|
+
id: jobId('job-abc123'),
|
|
50
|
+
type: 'generation',
|
|
51
|
+
status: 'pending',
|
|
52
|
+
userId: userId('user@example.com'),
|
|
53
|
+
referenceId: annotationId('ref-123'),
|
|
54
|
+
sourceResourceId: resourceId('doc-456'),
|
|
55
|
+
title: 'Generated Article',
|
|
56
|
+
created: new Date().toISOString(),
|
|
57
|
+
retryCount: 0,
|
|
58
|
+
maxRetries: 3,
|
|
59
|
+
};
|
|
60
|
+
|
|
61
|
+
await jobQueue.createJob(job);
|
|
62
|
+
|
|
63
|
+
// 3. Create a worker to process jobs
|
|
64
|
+
class MyGenerationWorker extends JobWorker {
|
|
65
|
+
protected getWorkerName(): string {
|
|
66
|
+
return 'MyGenerationWorker';
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
protected canProcessJob(job: Job): boolean {
|
|
70
|
+
return job.type === 'generation';
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
protected async executeJob(job: Job): Promise<void> {
|
|
74
|
+
const genJob = job as GenerationJob;
|
|
75
|
+
console.log(`Generating resource: ${genJob.title}`);
|
|
76
|
+
// Your processing logic here
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
// 4. Start worker
|
|
81
|
+
const worker = new MyGenerationWorker();
|
|
82
|
+
await worker.start();
|
|
83
|
+
```
|
|
84
|
+
|
|
85
|
+
## Architecture
|
|
86
|
+
|
|
87
|
+
The jobs package follows a simple status-directory pattern:
|
|
88
|
+
|
|
89
|
+
```
|
|
90
|
+
data/
|
|
91
|
+
jobs/
|
|
92
|
+
pending/ ← Jobs waiting to be processed
|
|
93
|
+
job-123.json
|
|
94
|
+
job-456.json
|
|
95
|
+
running/ ← Jobs currently being processed
|
|
96
|
+
job-789.json
|
|
97
|
+
complete/ ← Successfully completed jobs
|
|
98
|
+
job-111.json
|
|
99
|
+
failed/ ← Failed jobs (with error info)
|
|
100
|
+
job-222.json
|
|
101
|
+
cancelled/ ← Cancelled jobs
|
|
102
|
+
job-333.json
|
|
103
|
+
```
|
|
104
|
+
|
|
105
|
+
**Key Components:**
|
|
106
|
+
|
|
107
|
+
- **JobQueue** - Manages job lifecycle and persistence
|
|
108
|
+
- **JobWorker** - Abstract base class for workers that process jobs
|
|
109
|
+
- **Job Types** - Strongly-typed job definitions for different task types
|
|
110
|
+
|
|
111
|
+
## Core Concepts
|
|
112
|
+
|
|
113
|
+
### Jobs
|
|
114
|
+
|
|
115
|
+
Jobs are JSON documents that represent async work:
|
|
116
|
+
|
|
117
|
+
```typescript
|
|
118
|
+
import type { GenerationJob } from '@semiont/jobs';
|
|
119
|
+
|
|
120
|
+
const job: GenerationJob = {
|
|
121
|
+
id: jobId('job-123'),
|
|
122
|
+
type: 'generation',
|
|
123
|
+
status: 'pending',
|
|
124
|
+
userId: userId('user@example.com'),
|
|
125
|
+
|
|
126
|
+
// Job-specific fields
|
|
127
|
+
referenceId: annotationId('ref-456'),
|
|
128
|
+
sourceResourceId: resourceId('doc-789'),
|
|
129
|
+
title: 'AI Generated Article',
|
|
130
|
+
prompt: 'Write about quantum computing',
|
|
131
|
+
language: 'en-US',
|
|
132
|
+
|
|
133
|
+
// Timestamps
|
|
134
|
+
created: '2024-01-01T00:00:00Z',
|
|
135
|
+
startedAt: undefined, // Set when worker picks up job
|
|
136
|
+
completedAt: undefined, // Set when job finishes
|
|
137
|
+
|
|
138
|
+
// Retry handling
|
|
139
|
+
retryCount: 0,
|
|
140
|
+
maxRetries: 3,
|
|
141
|
+
error: undefined, // Set if job fails
|
|
142
|
+
|
|
143
|
+
// Progress tracking (optional)
|
|
144
|
+
progress: {
|
|
145
|
+
stage: 'generating',
|
|
146
|
+
percentage: 45,
|
|
147
|
+
message: 'Generating content...',
|
|
148
|
+
},
|
|
149
|
+
|
|
150
|
+
// Result (optional)
|
|
151
|
+
result: {
|
|
152
|
+
resourceId: resourceId('doc-new'),
|
|
153
|
+
resourceName: 'Generated Article',
|
|
154
|
+
},
|
|
155
|
+
};
|
|
156
|
+
```
|
|
157
|
+
|
|
158
|
+
### Job Types
|
|
159
|
+
|
|
160
|
+
The package supports multiple job types for different tasks:
|
|
161
|
+
|
|
162
|
+
```typescript
|
|
163
|
+
import type {
|
|
164
|
+
DetectionJob, // Entity detection in resources
|
|
165
|
+
GenerationJob, // AI content generation
|
|
166
|
+
HighlightDetectionJob, // Identify key passages
|
|
167
|
+
AssessmentDetectionJob, // Generate evaluative comments
|
|
168
|
+
CommentDetectionJob, // Generate explanatory comments
|
|
169
|
+
TagDetectionJob, // Structural role detection
|
|
170
|
+
} from '@semiont/jobs';
|
|
171
|
+
```
|
|
172
|
+
|
|
173
|
+
### Job Status
|
|
174
|
+
|
|
175
|
+
Jobs progress through status states stored as directories:
|
|
176
|
+
|
|
177
|
+
```typescript
|
|
178
|
+
type JobStatus =
|
|
179
|
+
| 'pending' // Waiting to be processed
|
|
180
|
+
| 'running' // Currently being processed
|
|
181
|
+
| 'complete' // Successfully finished
|
|
182
|
+
| 'failed' // Failed with error
|
|
183
|
+
| 'cancelled' // Cancelled by user
|
|
184
|
+
```
|
|
185
|
+
|
|
186
|
+
### Workers
|
|
187
|
+
|
|
188
|
+
Workers poll the queue and process jobs:
|
|
189
|
+
|
|
190
|
+
```typescript
|
|
191
|
+
import { JobWorker, type Job } from '@semiont/jobs';
|
|
192
|
+
|
|
193
|
+
class CustomWorker extends JobWorker {
|
|
194
|
+
// Worker identification
|
|
195
|
+
protected getWorkerName(): string {
|
|
196
|
+
return 'CustomWorker';
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
// Filter which jobs this worker processes
|
|
200
|
+
protected canProcessJob(job: Job): boolean {
|
|
201
|
+
return job.type === 'custom-type';
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
// Implement job processing logic
|
|
205
|
+
protected async executeJob(job: Job): Promise<void> {
|
|
206
|
+
// 1. Access job data
|
|
207
|
+
const customJob = job as CustomJob;
|
|
208
|
+
|
|
209
|
+
// 2. Perform async work
|
|
210
|
+
const result = await doWork(customJob);
|
|
211
|
+
|
|
212
|
+
// 3. Update job with results
|
|
213
|
+
customJob.result = result;
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
```
|
|
217
|
+
|
|
218
|
+
## Documentation
|
|
219
|
+
|
|
220
|
+
📚 **[Job Queue Guide](./docs/JobQueue.md)** - JobQueue API and job management
|
|
221
|
+
|
|
222
|
+
👷 **[Workers Guide](./docs/Workers.md)** - Building custom workers
|
|
223
|
+
|
|
224
|
+
📝 **[Job Types Guide](./docs/JobTypes.md)** - All job type definitions and usage
|
|
225
|
+
|
|
226
|
+
⚙️ **[Configuration Guide](./docs/Configuration.md)** - Setup and options
|
|
227
|
+
|
|
228
|
+
## Key Features
|
|
229
|
+
|
|
230
|
+
- **Type-safe** - Full TypeScript support with discriminated union types
|
|
231
|
+
- **Filesystem-based** - No external database required (JSON files for jobs)
|
|
232
|
+
- **Status directories** - Jobs organized by status for easy polling
|
|
233
|
+
- **Atomic operations** - Safe concurrent access to job files
|
|
234
|
+
- **Progress tracking** - Jobs can report progress updates during processing
|
|
235
|
+
- **Retry logic** - Built-in retry handling with configurable max attempts
|
|
236
|
+
- **Framework-agnostic** - Pure TypeScript, no web framework dependencies
|
|
237
|
+
|
|
238
|
+
## Use Cases
|
|
239
|
+
|
|
240
|
+
✅ **AI generation** - Long-running LLM inference tasks
|
|
241
|
+
|
|
242
|
+
✅ **Background processing** - Resource analysis, entity detection
|
|
243
|
+
|
|
244
|
+
✅ **Worker microservices** - Separate processes for compute-intensive work
|
|
245
|
+
|
|
246
|
+
✅ **CLI tools** - Command-line tools that queue batch operations
|
|
247
|
+
|
|
248
|
+
✅ **Testing** - Isolated job queues for unit/integration tests
|
|
249
|
+
|
|
250
|
+
❌ **Not for frontend** - Backend infrastructure only (workers need filesystem access)
|
|
251
|
+
|
|
252
|
+
## API Overview
|
|
253
|
+
|
|
254
|
+
### JobQueue
|
|
255
|
+
|
|
256
|
+
```typescript
|
|
257
|
+
const queue = getJobQueue();
|
|
258
|
+
|
|
259
|
+
// Create job
|
|
260
|
+
await queue.createJob(job);
|
|
261
|
+
|
|
262
|
+
// Get job by ID
|
|
263
|
+
const job = await queue.getJob(jobId);
|
|
264
|
+
|
|
265
|
+
// Poll for next pending job
|
|
266
|
+
const next = await queue.pollNextPendingJob();
|
|
267
|
+
|
|
268
|
+
// Update job status
|
|
269
|
+
job.status = 'complete';
|
|
270
|
+
await queue.updateJob(job, 'running');
|
|
271
|
+
|
|
272
|
+
// Query jobs by status
|
|
273
|
+
const pending = await queue.queryJobs({ status: 'pending' });
|
|
274
|
+
const failed = await queue.queryJobs({ status: 'failed' });
|
|
275
|
+
|
|
276
|
+
// Cleanup old jobs
|
|
277
|
+
await queue.cleanupCompletedJobs(Date.now() - 86400000); // 1 day ago
|
|
278
|
+
```
|
|
279
|
+
|
|
280
|
+
### JobWorker
|
|
281
|
+
|
|
282
|
+
```typescript
|
|
283
|
+
// Create worker
|
|
284
|
+
class MyWorker extends JobWorker {
|
|
285
|
+
constructor() {
|
|
286
|
+
super(
|
|
287
|
+
1000, // Poll interval (ms)
|
|
288
|
+
5000 // Error backoff (ms)
|
|
289
|
+
);
|
|
290
|
+
}
|
|
291
|
+
|
|
292
|
+
protected getWorkerName(): string {
|
|
293
|
+
return 'MyWorker';
|
|
294
|
+
}
|
|
295
|
+
|
|
296
|
+
protected canProcessJob(job: Job): boolean {
|
|
297
|
+
return job.type === 'my-type';
|
|
298
|
+
}
|
|
299
|
+
|
|
300
|
+
protected async executeJob(job: Job): Promise<void> {
|
|
301
|
+
// Process job
|
|
302
|
+
}
|
|
303
|
+
}
|
|
304
|
+
|
|
305
|
+
// Start worker
|
|
306
|
+
const worker = new MyWorker();
|
|
307
|
+
await worker.start();
|
|
308
|
+
|
|
309
|
+
// Stop worker (graceful shutdown)
|
|
310
|
+
await worker.stop();
|
|
311
|
+
```
|
|
312
|
+
|
|
313
|
+
### Singleton Pattern
|
|
314
|
+
|
|
315
|
+
```typescript
|
|
316
|
+
import { initializeJobQueue, getJobQueue } from '@semiont/jobs';
|
|
317
|
+
|
|
318
|
+
// Initialize once at startup
|
|
319
|
+
await initializeJobQueue({ dataDir: './data' });
|
|
320
|
+
|
|
321
|
+
// Get queue instance anywhere
|
|
322
|
+
const queue = getJobQueue();
|
|
323
|
+
```
|
|
324
|
+
|
|
325
|
+
## Storage Format
|
|
326
|
+
|
|
327
|
+
Jobs are stored as individual JSON files:
|
|
328
|
+
|
|
329
|
+
```
|
|
330
|
+
data/
|
|
331
|
+
jobs/
|
|
332
|
+
pending/
|
|
333
|
+
job-abc123.json
|
|
334
|
+
running/
|
|
335
|
+
job-def456.json
|
|
336
|
+
complete/
|
|
337
|
+
job-ghi789.json
|
|
338
|
+
```
|
|
339
|
+
|
|
340
|
+
Each job file contains the complete job object:
|
|
341
|
+
|
|
342
|
+
```json
|
|
343
|
+
{
|
|
344
|
+
"id": "job-abc123",
|
|
345
|
+
"type": "generation",
|
|
346
|
+
"status": "complete",
|
|
347
|
+
"userId": "user@example.com",
|
|
348
|
+
"referenceId": "ref-456",
|
|
349
|
+
"sourceResourceId": "doc-789",
|
|
350
|
+
"title": "Generated Article",
|
|
351
|
+
"created": "2024-01-01T00:00:00Z",
|
|
352
|
+
"startedAt": "2024-01-01T00:01:00Z",
|
|
353
|
+
"completedAt": "2024-01-01T00:05:00Z",
|
|
354
|
+
"retryCount": 0,
|
|
355
|
+
"maxRetries": 3,
|
|
356
|
+
"result": {
|
|
357
|
+
"resourceId": "doc-new",
|
|
358
|
+
"resourceName": "Generated Article"
|
|
359
|
+
}
|
|
360
|
+
}
|
|
361
|
+
```
|
|
362
|
+
|
|
363
|
+
## Performance
|
|
364
|
+
|
|
365
|
+
- **Polling-based** - Workers poll pending directory at configurable intervals
|
|
366
|
+
- **Filesystem limits** - Performance degrades with >1000 pending jobs per directory
|
|
367
|
+
- **Atomic moves** - Jobs move between status directories atomically (delete + write)
|
|
368
|
+
- **No locks needed** - Status-based organization prevents race conditions
|
|
369
|
+
|
|
370
|
+
**Scaling considerations:**
|
|
371
|
+
- Multiple workers can run concurrently (same or different machines)
|
|
372
|
+
- Workers use `pollNextPendingJob()` for FIFO processing
|
|
373
|
+
- Completed jobs should be cleaned up periodically
|
|
374
|
+
- For high throughput (>1000 jobs/min), consider Redis/database-backed queue
|
|
375
|
+
|
|
376
|
+
## Error Handling
|
|
377
|
+
|
|
378
|
+
### Worker Error Recovery
|
|
379
|
+
|
|
380
|
+
```typescript
|
|
381
|
+
class ResilientWorker extends JobWorker {
|
|
382
|
+
protected async executeJob(job: Job): Promise<void> {
|
|
383
|
+
try {
|
|
384
|
+
await doWork(job);
|
|
385
|
+
} catch (error) {
|
|
386
|
+
// JobWorker base class handles:
|
|
387
|
+
// 1. Moving job to 'failed' status
|
|
388
|
+
// 2. Recording error message
|
|
389
|
+
// 3. Retry logic (if retryCount < maxRetries)
|
|
390
|
+
throw error; // Let base class handle it
|
|
391
|
+
}
|
|
392
|
+
}
|
|
393
|
+
}
|
|
394
|
+
```
|
|
395
|
+
|
|
396
|
+
### Manual Retry
|
|
397
|
+
|
|
398
|
+
```typescript
|
|
399
|
+
const queue = getJobQueue();
|
|
400
|
+
const failedJobs = await queue.queryJobs({ status: 'failed' });
|
|
401
|
+
|
|
402
|
+
for (const job of failedJobs) {
|
|
403
|
+
if (job.retryCount < job.maxRetries) {
|
|
404
|
+
job.status = 'pending';
|
|
405
|
+
job.retryCount++;
|
|
406
|
+
delete job.error;
|
|
407
|
+
await queue.updateJob(job, 'failed');
|
|
408
|
+
}
|
|
409
|
+
}
|
|
410
|
+
```
|
|
411
|
+
|
|
412
|
+
## Testing
|
|
413
|
+
|
|
414
|
+
```typescript
|
|
415
|
+
import { initializeJobQueue, getJobQueue } from '@semiont/jobs';
|
|
416
|
+
import { describe, it, beforeEach } from 'vitest';
|
|
417
|
+
|
|
418
|
+
describe('Job queue', () => {
|
|
419
|
+
beforeEach(async () => {
|
|
420
|
+
await initializeJobQueue({ dataDir: './test-data' });
|
|
421
|
+
});
|
|
422
|
+
|
|
423
|
+
it('should create and retrieve jobs', async () => {
|
|
424
|
+
const queue = getJobQueue();
|
|
425
|
+
|
|
426
|
+
const job: GenerationJob = {
|
|
427
|
+
id: jobId('test-1'),
|
|
428
|
+
type: 'generation',
|
|
429
|
+
status: 'pending',
|
|
430
|
+
// ... other fields
|
|
431
|
+
};
|
|
432
|
+
|
|
433
|
+
await queue.createJob(job);
|
|
434
|
+
const retrieved = await queue.getJob(jobId('test-1'));
|
|
435
|
+
|
|
436
|
+
expect(retrieved).toEqual(job);
|
|
437
|
+
});
|
|
438
|
+
});
|
|
439
|
+
```
|
|
440
|
+
|
|
441
|
+
## Examples
|
|
442
|
+
|
|
443
|
+
### Building a Background Worker
|
|
444
|
+
|
|
445
|
+
```typescript
|
|
446
|
+
import { JobWorker, type Job, type GenerationJob } from '@semiont/jobs';
|
|
447
|
+
import { InferenceService } from './inference';
|
|
448
|
+
|
|
449
|
+
class GenerationWorker extends JobWorker {
|
|
450
|
+
private inference: InferenceService;
|
|
451
|
+
|
|
452
|
+
constructor(inference: InferenceService) {
|
|
453
|
+
super(1000, 5000);
|
|
454
|
+
this.inference = inference;
|
|
455
|
+
}
|
|
456
|
+
|
|
457
|
+
protected getWorkerName(): string {
|
|
458
|
+
return 'GenerationWorker';
|
|
459
|
+
}
|
|
460
|
+
|
|
461
|
+
protected canProcessJob(job: Job): boolean {
|
|
462
|
+
return job.type === 'generation';
|
|
463
|
+
}
|
|
464
|
+
|
|
465
|
+
protected async executeJob(job: Job): Promise<void> {
|
|
466
|
+
const genJob = job as GenerationJob;
|
|
467
|
+
|
|
468
|
+
// Report progress
|
|
469
|
+
genJob.progress = {
|
|
470
|
+
stage: 'generating',
|
|
471
|
+
percentage: 0,
|
|
472
|
+
message: 'Starting generation...',
|
|
473
|
+
};
|
|
474
|
+
await getJobQueue().updateJob(genJob);
|
|
475
|
+
|
|
476
|
+
// Generate content
|
|
477
|
+
const content = await this.inference.generate({
|
|
478
|
+
prompt: genJob.prompt,
|
|
479
|
+
context: genJob.context,
|
|
480
|
+
temperature: genJob.temperature,
|
|
481
|
+
maxTokens: genJob.maxTokens,
|
|
482
|
+
});
|
|
483
|
+
|
|
484
|
+
// Update progress
|
|
485
|
+
genJob.progress = {
|
|
486
|
+
stage: 'creating',
|
|
487
|
+
percentage: 75,
|
|
488
|
+
message: 'Creating resource...',
|
|
489
|
+
};
|
|
490
|
+
await getJobQueue().updateJob(genJob);
|
|
491
|
+
|
|
492
|
+
// Create resource (simplified)
|
|
493
|
+
const resourceId = await createResource(content, genJob.title);
|
|
494
|
+
|
|
495
|
+
// Set result
|
|
496
|
+
genJob.result = {
|
|
497
|
+
resourceId,
|
|
498
|
+
resourceName: genJob.title,
|
|
499
|
+
};
|
|
500
|
+
}
|
|
501
|
+
}
|
|
502
|
+
```
|
|
503
|
+
|
|
504
|
+
### Progress Monitoring
|
|
505
|
+
|
|
506
|
+
```typescript
|
|
507
|
+
import { getJobQueue } from '@semiont/jobs';
|
|
508
|
+
|
|
509
|
+
async function monitorJob(jobId: JobId): Promise<void> {
|
|
510
|
+
const queue = getJobQueue();
|
|
511
|
+
|
|
512
|
+
while (true) {
|
|
513
|
+
const job = await queue.getJob(jobId);
|
|
514
|
+
|
|
515
|
+
if (!job) {
|
|
516
|
+
console.log('Job not found');
|
|
517
|
+
break;
|
|
518
|
+
}
|
|
519
|
+
|
|
520
|
+
console.log(`Status: ${job.status}`);
|
|
521
|
+
|
|
522
|
+
if (job.progress) {
|
|
523
|
+
console.log(`Progress: ${job.progress.percentage}%`);
|
|
524
|
+
console.log(`Stage: ${job.progress.stage}`);
|
|
525
|
+
console.log(`Message: ${job.progress.message}`);
|
|
526
|
+
}
|
|
527
|
+
|
|
528
|
+
if (job.status === 'complete' || job.status === 'failed') {
|
|
529
|
+
break;
|
|
530
|
+
}
|
|
531
|
+
|
|
532
|
+
await new Promise(resolve => setTimeout(resolve, 1000));
|
|
533
|
+
}
|
|
534
|
+
}
|
|
535
|
+
```
|
|
536
|
+
|
|
537
|
+
## License
|
|
538
|
+
|
|
539
|
+
Apache-2.0
|
|
540
|
+
|
|
541
|
+
## Related Packages
|
|
542
|
+
|
|
543
|
+
- [`@semiont/api-client`](../api-client/) - API types and utilities
|
|
544
|
+
- [`@semiont/core`](../core/) - Domain types and utilities
|
|
545
|
+
- [`@semiont/event-sourcing`](../event-sourcing/) - Event persistence
|
|
546
|
+
- [`semiont-backend`](../../apps/backend/) - Backend API server
|
|
547
|
+
|
|
548
|
+
## Learn More
|
|
549
|
+
|
|
550
|
+
- [Background Jobs Pattern](https://www.enterpriseintegrationpatterns.com/patterns/messaging/MessageQueueing.html) - Queue-based processing
|
|
551
|
+
- [Job Types Guide](./docs/JobTypes.md) - Detailed job type documentation
|
|
552
|
+
- [Workers Guide](./docs/Workers.md) - Building custom workers
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,286 @@
|
|
|
1
|
+
import { JobId, EntityType, GenerationContext } from '@semiont/api-client';
|
|
2
|
+
import { UserId, ResourceId, AnnotationId } from '@semiont/core';
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* Job Queue Type Definitions
|
|
6
|
+
*
|
|
7
|
+
* Jobs represent async work that can be queued, processed, and monitored.
|
|
8
|
+
* They are completely independent of HTTP request/response cycles.
|
|
9
|
+
*/
|
|
10
|
+
|
|
11
|
+
type JobType = 'detection' | 'generation' | 'highlight-detection' | 'assessment-detection' | 'comment-detection' | 'tag-detection';
|
|
12
|
+
type JobStatus = 'pending' | 'running' | 'complete' | 'failed' | 'cancelled';
|
|
13
|
+
/**
|
|
14
|
+
* Base job interface - all jobs extend this
|
|
15
|
+
*/
|
|
16
|
+
interface BaseJob {
|
|
17
|
+
id: JobId;
|
|
18
|
+
type: JobType;
|
|
19
|
+
status: JobStatus;
|
|
20
|
+
userId: UserId;
|
|
21
|
+
created: string;
|
|
22
|
+
startedAt?: string;
|
|
23
|
+
completedAt?: string;
|
|
24
|
+
error?: string;
|
|
25
|
+
retryCount: number;
|
|
26
|
+
maxRetries: number;
|
|
27
|
+
}
|
|
28
|
+
/**
|
|
29
|
+
* Detection job - finds entities in a resource using AI inference
|
|
30
|
+
*/
|
|
31
|
+
interface DetectionJob extends BaseJob {
|
|
32
|
+
type: 'detection';
|
|
33
|
+
resourceId: ResourceId;
|
|
34
|
+
entityTypes: EntityType[];
|
|
35
|
+
includeDescriptiveReferences?: boolean;
|
|
36
|
+
progress?: {
|
|
37
|
+
totalEntityTypes: number;
|
|
38
|
+
processedEntityTypes: number;
|
|
39
|
+
currentEntityType?: string;
|
|
40
|
+
entitiesFound: number;
|
|
41
|
+
entitiesEmitted: number;
|
|
42
|
+
};
|
|
43
|
+
result?: {
|
|
44
|
+
totalFound: number;
|
|
45
|
+
totalEmitted: number;
|
|
46
|
+
errors: number;
|
|
47
|
+
};
|
|
48
|
+
}
|
|
49
|
+
/**
|
|
50
|
+
* Generation job - generates a new resource using AI inference
|
|
51
|
+
*/
|
|
52
|
+
interface GenerationJob extends BaseJob {
|
|
53
|
+
type: 'generation';
|
|
54
|
+
referenceId: AnnotationId;
|
|
55
|
+
sourceResourceId: ResourceId;
|
|
56
|
+
prompt?: string;
|
|
57
|
+
title?: string;
|
|
58
|
+
entityTypes?: EntityType[];
|
|
59
|
+
language?: string;
|
|
60
|
+
context?: GenerationContext;
|
|
61
|
+
temperature?: number;
|
|
62
|
+
maxTokens?: number;
|
|
63
|
+
progress?: {
|
|
64
|
+
stage: 'fetching' | 'generating' | 'creating' | 'linking';
|
|
65
|
+
percentage: number;
|
|
66
|
+
message?: string;
|
|
67
|
+
};
|
|
68
|
+
result?: {
|
|
69
|
+
resourceId: ResourceId;
|
|
70
|
+
resourceName: string;
|
|
71
|
+
};
|
|
72
|
+
}
|
|
73
|
+
/**
|
|
74
|
+
* Highlight Detection job - finds passages to highlight using AI
|
|
75
|
+
*/
|
|
76
|
+
interface HighlightDetectionJob extends BaseJob {
|
|
77
|
+
type: 'highlight-detection';
|
|
78
|
+
resourceId: ResourceId;
|
|
79
|
+
instructions?: string;
|
|
80
|
+
density?: number;
|
|
81
|
+
progress?: {
|
|
82
|
+
stage: 'analyzing' | 'creating';
|
|
83
|
+
percentage: number;
|
|
84
|
+
message?: string;
|
|
85
|
+
};
|
|
86
|
+
result?: {
|
|
87
|
+
highlightsFound: number;
|
|
88
|
+
highlightsCreated: number;
|
|
89
|
+
};
|
|
90
|
+
}
|
|
91
|
+
/**
|
|
92
|
+
* Assessment Detection job - evaluates passages using AI
|
|
93
|
+
*/
|
|
94
|
+
interface AssessmentDetectionJob extends BaseJob {
|
|
95
|
+
type: 'assessment-detection';
|
|
96
|
+
resourceId: ResourceId;
|
|
97
|
+
instructions?: string;
|
|
98
|
+
tone?: 'analytical' | 'critical' | 'balanced' | 'constructive';
|
|
99
|
+
density?: number;
|
|
100
|
+
progress?: {
|
|
101
|
+
stage: 'analyzing' | 'creating';
|
|
102
|
+
percentage: number;
|
|
103
|
+
message?: string;
|
|
104
|
+
};
|
|
105
|
+
result?: {
|
|
106
|
+
assessmentsFound: number;
|
|
107
|
+
assessmentsCreated: number;
|
|
108
|
+
};
|
|
109
|
+
}
|
|
110
|
+
/**
|
|
111
|
+
* Comment Detection job - generates explanatory comments on passages using AI
|
|
112
|
+
*/
|
|
113
|
+
interface CommentDetectionJob extends BaseJob {
|
|
114
|
+
type: 'comment-detection';
|
|
115
|
+
resourceId: ResourceId;
|
|
116
|
+
instructions?: string;
|
|
117
|
+
tone?: 'scholarly' | 'explanatory' | 'conversational' | 'technical';
|
|
118
|
+
density?: number;
|
|
119
|
+
progress?: {
|
|
120
|
+
stage: 'analyzing' | 'creating';
|
|
121
|
+
percentage: number;
|
|
122
|
+
message?: string;
|
|
123
|
+
};
|
|
124
|
+
result?: {
|
|
125
|
+
commentsFound: number;
|
|
126
|
+
commentsCreated: number;
|
|
127
|
+
};
|
|
128
|
+
}
|
|
129
|
+
/**
|
|
130
|
+
* Tag Detection job - identifies passages serving structural roles using AI
|
|
131
|
+
*/
|
|
132
|
+
interface TagDetectionJob extends BaseJob {
|
|
133
|
+
type: 'tag-detection';
|
|
134
|
+
resourceId: ResourceId;
|
|
135
|
+
schemaId: string;
|
|
136
|
+
categories: string[];
|
|
137
|
+
progress?: {
|
|
138
|
+
stage: 'analyzing' | 'creating';
|
|
139
|
+
percentage: number;
|
|
140
|
+
currentCategory?: string;
|
|
141
|
+
processedCategories: number;
|
|
142
|
+
totalCategories: number;
|
|
143
|
+
message?: string;
|
|
144
|
+
};
|
|
145
|
+
result?: {
|
|
146
|
+
tagsFound: number;
|
|
147
|
+
tagsCreated: number;
|
|
148
|
+
byCategory: Record<string, number>;
|
|
149
|
+
};
|
|
150
|
+
}
|
|
151
|
+
/**
|
|
152
|
+
* Discriminated union of all job types
|
|
153
|
+
*/
|
|
154
|
+
type Job = DetectionJob | GenerationJob | HighlightDetectionJob | AssessmentDetectionJob | CommentDetectionJob | TagDetectionJob;
|
|
155
|
+
/**
|
|
156
|
+
* Job query filters
|
|
157
|
+
*/
|
|
158
|
+
interface JobQueryFilters {
|
|
159
|
+
status?: JobStatus;
|
|
160
|
+
type?: JobType;
|
|
161
|
+
userId?: UserId;
|
|
162
|
+
limit?: number;
|
|
163
|
+
offset?: number;
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
/**
|
|
167
|
+
* Job Queue Manager
|
|
168
|
+
*
|
|
169
|
+
* Filesystem-based job queue with atomic operations.
|
|
170
|
+
* Jobs are stored in directories by status for easy polling.
|
|
171
|
+
*/
|
|
172
|
+
|
|
173
|
+
interface JobQueueConfig {
|
|
174
|
+
dataDir: string;
|
|
175
|
+
}
|
|
176
|
+
declare class JobQueue {
|
|
177
|
+
private jobsDir;
|
|
178
|
+
constructor(config: JobQueueConfig);
|
|
179
|
+
/**
|
|
180
|
+
* Initialize job queue directories
|
|
181
|
+
*/
|
|
182
|
+
initialize(): Promise<void>;
|
|
183
|
+
/**
|
|
184
|
+
* Create a new job
|
|
185
|
+
*/
|
|
186
|
+
createJob(job: Job): Promise<void>;
|
|
187
|
+
/**
|
|
188
|
+
* Get a job by ID (searches all status directories)
|
|
189
|
+
*/
|
|
190
|
+
getJob(jobId: JobId): Promise<Job | null>;
|
|
191
|
+
/**
|
|
192
|
+
* Update a job (atomic: delete old, write new)
|
|
193
|
+
*/
|
|
194
|
+
updateJob(job: Job, oldStatus?: JobStatus): Promise<void>;
|
|
195
|
+
/**
|
|
196
|
+
* Poll for next pending job (FIFO)
|
|
197
|
+
*/
|
|
198
|
+
pollNextPendingJob(): Promise<Job | null>;
|
|
199
|
+
/**
|
|
200
|
+
* List jobs with filters
|
|
201
|
+
*/
|
|
202
|
+
listJobs(filters?: JobQueryFilters): Promise<Job[]>;
|
|
203
|
+
/**
|
|
204
|
+
* Cancel a job
|
|
205
|
+
*/
|
|
206
|
+
cancelJob(jobId: JobId): Promise<boolean>;
|
|
207
|
+
/**
|
|
208
|
+
* Clean up old completed/failed jobs (older than retention period)
|
|
209
|
+
*/
|
|
210
|
+
cleanupOldJobs(retentionHours?: number): Promise<number>;
|
|
211
|
+
/**
|
|
212
|
+
* Get job file path
|
|
213
|
+
*/
|
|
214
|
+
private getJobPath;
|
|
215
|
+
/**
|
|
216
|
+
* Get statistics about the queue
|
|
217
|
+
*/
|
|
218
|
+
getStats(): Promise<{
|
|
219
|
+
pending: number;
|
|
220
|
+
running: number;
|
|
221
|
+
complete: number;
|
|
222
|
+
failed: number;
|
|
223
|
+
cancelled: number;
|
|
224
|
+
}>;
|
|
225
|
+
}
|
|
226
|
+
declare function getJobQueue(): JobQueue;
|
|
227
|
+
declare function initializeJobQueue(config: JobQueueConfig): Promise<JobQueue>;
|
|
228
|
+
|
|
229
|
+
/**
|
|
230
|
+
* Job Worker Base Class
|
|
231
|
+
*
|
|
232
|
+
* Abstract worker that polls the job queue and processes jobs.
|
|
233
|
+
* Subclasses implement specific job processing logic.
|
|
234
|
+
*/
|
|
235
|
+
|
|
236
|
+
declare abstract class JobWorker {
|
|
237
|
+
private running;
|
|
238
|
+
private currentJob;
|
|
239
|
+
private pollIntervalMs;
|
|
240
|
+
private errorBackoffMs;
|
|
241
|
+
constructor(pollIntervalMs?: number, errorBackoffMs?: number);
|
|
242
|
+
/**
|
|
243
|
+
* Start the worker (polls queue in loop)
|
|
244
|
+
*/
|
|
245
|
+
start(): Promise<void>;
|
|
246
|
+
/**
|
|
247
|
+
* Stop the worker (graceful shutdown)
|
|
248
|
+
*/
|
|
249
|
+
stop(): Promise<void>;
|
|
250
|
+
/**
|
|
251
|
+
* Poll for next job to process
|
|
252
|
+
*/
|
|
253
|
+
private pollNextJob;
|
|
254
|
+
/**
|
|
255
|
+
* Process a job (handles state transitions and error handling)
|
|
256
|
+
*/
|
|
257
|
+
private processJob;
|
|
258
|
+
/**
|
|
259
|
+
* Handle job failure (retry or move to failed)
|
|
260
|
+
*/
|
|
261
|
+
protected handleJobFailure(job: Job, error: any): Promise<void>;
|
|
262
|
+
/**
|
|
263
|
+
* Update job progress (best-effort, doesn't throw)
|
|
264
|
+
*/
|
|
265
|
+
protected updateJobProgress(job: Job): Promise<void>;
|
|
266
|
+
/**
|
|
267
|
+
* Sleep utility
|
|
268
|
+
*/
|
|
269
|
+
protected sleep(ms: number): Promise<void>;
|
|
270
|
+
/**
|
|
271
|
+
* Get worker name (for logging)
|
|
272
|
+
*/
|
|
273
|
+
protected abstract getWorkerName(): string;
|
|
274
|
+
/**
|
|
275
|
+
* Check if this worker can process the given job
|
|
276
|
+
*/
|
|
277
|
+
protected abstract canProcessJob(job: Job): boolean;
|
|
278
|
+
/**
|
|
279
|
+
* Execute the job (job-specific logic)
|
|
280
|
+
* This is where the actual work happens
|
|
281
|
+
* Throw an error to trigger retry logic
|
|
282
|
+
*/
|
|
283
|
+
protected abstract executeJob(job: Job): Promise<void>;
|
|
284
|
+
}
|
|
285
|
+
|
|
286
|
+
export { type AssessmentDetectionJob, type BaseJob, type CommentDetectionJob, type DetectionJob, type GenerationJob, type HighlightDetectionJob, type Job, type JobQueryFilters, JobQueue, type JobQueueConfig, type JobStatus, type JobType, JobWorker, type TagDetectionJob, getJobQueue, initializeJobQueue };
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,325 @@
|
|
|
1
|
+
import { promises } from 'fs';
|
|
2
|
+
import * as path from 'path';
|
|
3
|
+
|
|
4
|
+
// src/job-queue.ts
|
|
5
|
+
var JobQueue = class {
|
|
6
|
+
jobsDir;
|
|
7
|
+
constructor(config) {
|
|
8
|
+
this.jobsDir = path.join(config.dataDir, "jobs");
|
|
9
|
+
}
|
|
10
|
+
/**
|
|
11
|
+
* Initialize job queue directories
|
|
12
|
+
*/
|
|
13
|
+
async initialize() {
|
|
14
|
+
const statuses = ["pending", "running", "complete", "failed", "cancelled"];
|
|
15
|
+
for (const status of statuses) {
|
|
16
|
+
const dir = path.join(this.jobsDir, status);
|
|
17
|
+
await promises.mkdir(dir, { recursive: true });
|
|
18
|
+
}
|
|
19
|
+
console.log("[JobQueue] Initialized job directories");
|
|
20
|
+
}
|
|
21
|
+
/**
|
|
22
|
+
* Create a new job
|
|
23
|
+
*/
|
|
24
|
+
async createJob(job) {
|
|
25
|
+
const jobPath = this.getJobPath(job.id, job.status);
|
|
26
|
+
await promises.writeFile(jobPath, JSON.stringify(job, null, 2), "utf-8");
|
|
27
|
+
console.log(`[JobQueue] Created job ${job.id} with status ${job.status}`);
|
|
28
|
+
}
|
|
29
|
+
/**
|
|
30
|
+
* Get a job by ID (searches all status directories)
|
|
31
|
+
*/
|
|
32
|
+
async getJob(jobId) {
|
|
33
|
+
const statuses = ["pending", "running", "complete", "failed", "cancelled"];
|
|
34
|
+
for (const status of statuses) {
|
|
35
|
+
const jobPath = this.getJobPath(jobId, status);
|
|
36
|
+
try {
|
|
37
|
+
const content = await promises.readFile(jobPath, "utf-8");
|
|
38
|
+
return JSON.parse(content);
|
|
39
|
+
} catch (error) {
|
|
40
|
+
continue;
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
return null;
|
|
44
|
+
}
|
|
45
|
+
/**
|
|
46
|
+
* Update a job (atomic: delete old, write new)
|
|
47
|
+
*/
|
|
48
|
+
async updateJob(job, oldStatus) {
|
|
49
|
+
if (oldStatus && oldStatus !== job.status) {
|
|
50
|
+
const oldPath = this.getJobPath(job.id, oldStatus);
|
|
51
|
+
try {
|
|
52
|
+
await promises.unlink(oldPath);
|
|
53
|
+
} catch (error) {
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
const newPath = this.getJobPath(job.id, job.status);
|
|
57
|
+
await promises.writeFile(newPath, JSON.stringify(job, null, 2), "utf-8");
|
|
58
|
+
if (oldStatus && oldStatus !== job.status) {
|
|
59
|
+
console.log(`[JobQueue] Moved job ${job.id} from ${oldStatus} to ${job.status}`);
|
|
60
|
+
} else {
|
|
61
|
+
console.log(`[JobQueue] Updated job ${job.id} (status: ${job.status})`);
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
/**
|
|
65
|
+
* Poll for next pending job (FIFO)
|
|
66
|
+
*/
|
|
67
|
+
async pollNextPendingJob() {
|
|
68
|
+
const pendingDir = path.join(this.jobsDir, "pending");
|
|
69
|
+
try {
|
|
70
|
+
const files = await promises.readdir(pendingDir);
|
|
71
|
+
if (files.length === 0) {
|
|
72
|
+
return null;
|
|
73
|
+
}
|
|
74
|
+
files.sort();
|
|
75
|
+
const jobFile = files[0];
|
|
76
|
+
const jobPath = path.join(pendingDir, jobFile);
|
|
77
|
+
const content = await promises.readFile(jobPath, "utf-8");
|
|
78
|
+
return JSON.parse(content);
|
|
79
|
+
} catch (error) {
|
|
80
|
+
console.error("[JobQueue] Error polling pending jobs:", error);
|
|
81
|
+
return null;
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
/**
|
|
85
|
+
* List jobs with filters
|
|
86
|
+
*/
|
|
87
|
+
async listJobs(filters = {}) {
|
|
88
|
+
const jobs = [];
|
|
89
|
+
const statuses = filters.status ? [filters.status] : ["pending", "running", "complete", "failed", "cancelled"];
|
|
90
|
+
for (const status of statuses) {
|
|
91
|
+
const statusDir = path.join(this.jobsDir, status);
|
|
92
|
+
try {
|
|
93
|
+
const files = await promises.readdir(statusDir);
|
|
94
|
+
for (const file of files) {
|
|
95
|
+
const jobPath = path.join(statusDir, file);
|
|
96
|
+
const content = await promises.readFile(jobPath, "utf-8");
|
|
97
|
+
const job = JSON.parse(content);
|
|
98
|
+
if (filters.type && job.type !== filters.type) continue;
|
|
99
|
+
if (filters.userId && job.userId !== filters.userId) continue;
|
|
100
|
+
jobs.push(job);
|
|
101
|
+
}
|
|
102
|
+
} catch (error) {
|
|
103
|
+
continue;
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
jobs.sort((a, b) => new Date(b.created).getTime() - new Date(a.created).getTime());
|
|
107
|
+
const offset = filters.offset || 0;
|
|
108
|
+
const limit = filters.limit || 100;
|
|
109
|
+
return jobs.slice(offset, offset + limit);
|
|
110
|
+
}
|
|
111
|
+
/**
|
|
112
|
+
* Cancel a job
|
|
113
|
+
*/
|
|
114
|
+
async cancelJob(jobId) {
|
|
115
|
+
const job = await this.getJob(jobId);
|
|
116
|
+
if (!job) {
|
|
117
|
+
return false;
|
|
118
|
+
}
|
|
119
|
+
if (job.status !== "pending" && job.status !== "running") {
|
|
120
|
+
return false;
|
|
121
|
+
}
|
|
122
|
+
const oldStatus = job.status;
|
|
123
|
+
job.status = "cancelled";
|
|
124
|
+
job.completedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
125
|
+
await this.updateJob(job, oldStatus);
|
|
126
|
+
return true;
|
|
127
|
+
}
|
|
128
|
+
/**
|
|
129
|
+
* Clean up old completed/failed jobs (older than retention period)
|
|
130
|
+
*/
|
|
131
|
+
async cleanupOldJobs(retentionHours = 24) {
|
|
132
|
+
const cutoffTime = Date.now() - retentionHours * 60 * 60 * 1e3;
|
|
133
|
+
let deletedCount = 0;
|
|
134
|
+
const cleanupStatuses = ["complete", "failed", "cancelled"];
|
|
135
|
+
for (const status of cleanupStatuses) {
|
|
136
|
+
const statusDir = path.join(this.jobsDir, status);
|
|
137
|
+
try {
|
|
138
|
+
const files = await promises.readdir(statusDir);
|
|
139
|
+
for (const file of files) {
|
|
140
|
+
const jobPath = path.join(statusDir, file);
|
|
141
|
+
const content = await promises.readFile(jobPath, "utf-8");
|
|
142
|
+
const job = JSON.parse(content);
|
|
143
|
+
if (job.completedAt) {
|
|
144
|
+
const completedTime = new Date(job.completedAt).getTime();
|
|
145
|
+
if (completedTime < cutoffTime) {
|
|
146
|
+
await promises.unlink(jobPath);
|
|
147
|
+
deletedCount++;
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
} catch (error) {
|
|
152
|
+
console.error(`[JobQueue] Error cleaning up ${status} jobs:`, error);
|
|
153
|
+
}
|
|
154
|
+
}
|
|
155
|
+
if (deletedCount > 0) {
|
|
156
|
+
console.log(`[JobQueue] Cleaned up ${deletedCount} old jobs`);
|
|
157
|
+
}
|
|
158
|
+
return deletedCount;
|
|
159
|
+
}
|
|
160
|
+
/**
|
|
161
|
+
* Get job file path
|
|
162
|
+
*/
|
|
163
|
+
getJobPath(jobId, status) {
|
|
164
|
+
return path.join(this.jobsDir, status, `${jobId}.json`);
|
|
165
|
+
}
|
|
166
|
+
/**
|
|
167
|
+
* Get statistics about the queue
|
|
168
|
+
*/
|
|
169
|
+
async getStats() {
|
|
170
|
+
const stats = {
|
|
171
|
+
pending: 0,
|
|
172
|
+
running: 0,
|
|
173
|
+
complete: 0,
|
|
174
|
+
failed: 0,
|
|
175
|
+
cancelled: 0
|
|
176
|
+
};
|
|
177
|
+
const statuses = ["pending", "running", "complete", "failed", "cancelled"];
|
|
178
|
+
for (const status of statuses) {
|
|
179
|
+
const statusDir = path.join(this.jobsDir, status);
|
|
180
|
+
try {
|
|
181
|
+
const files = await promises.readdir(statusDir);
|
|
182
|
+
stats[status] = files.length;
|
|
183
|
+
} catch (error) {
|
|
184
|
+
stats[status] = 0;
|
|
185
|
+
}
|
|
186
|
+
}
|
|
187
|
+
return stats;
|
|
188
|
+
}
|
|
189
|
+
};
|
|
190
|
+
var jobQueue = null;
|
|
191
|
+
function getJobQueue() {
|
|
192
|
+
if (!jobQueue) {
|
|
193
|
+
throw new Error("JobQueue not initialized. Call initializeJobQueue() first.");
|
|
194
|
+
}
|
|
195
|
+
return jobQueue;
|
|
196
|
+
}
|
|
197
|
+
async function initializeJobQueue(config) {
|
|
198
|
+
jobQueue = new JobQueue(config);
|
|
199
|
+
await jobQueue.initialize();
|
|
200
|
+
return jobQueue;
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
// src/job-worker.ts
|
|
204
|
+
var JobWorker = class {
|
|
205
|
+
running = false;
|
|
206
|
+
currentJob = null;
|
|
207
|
+
pollIntervalMs;
|
|
208
|
+
errorBackoffMs;
|
|
209
|
+
constructor(pollIntervalMs = 1e3, errorBackoffMs = 5e3) {
|
|
210
|
+
this.pollIntervalMs = pollIntervalMs;
|
|
211
|
+
this.errorBackoffMs = errorBackoffMs;
|
|
212
|
+
}
|
|
213
|
+
/**
|
|
214
|
+
* Start the worker (polls queue in loop)
|
|
215
|
+
*/
|
|
216
|
+
async start() {
|
|
217
|
+
this.running = true;
|
|
218
|
+
console.log(`[${this.getWorkerName()}] Started`);
|
|
219
|
+
while (this.running) {
|
|
220
|
+
try {
|
|
221
|
+
const job = await this.pollNextJob();
|
|
222
|
+
if (job) {
|
|
223
|
+
await this.processJob(job);
|
|
224
|
+
} else {
|
|
225
|
+
await this.sleep(this.pollIntervalMs);
|
|
226
|
+
}
|
|
227
|
+
} catch (error) {
|
|
228
|
+
console.error(`[${this.getWorkerName()}] Error in main loop:`, error);
|
|
229
|
+
await this.sleep(this.errorBackoffMs);
|
|
230
|
+
}
|
|
231
|
+
}
|
|
232
|
+
console.log(`[${this.getWorkerName()}] Stopped`);
|
|
233
|
+
}
|
|
234
|
+
/**
|
|
235
|
+
* Stop the worker (graceful shutdown)
|
|
236
|
+
*/
|
|
237
|
+
async stop() {
|
|
238
|
+
console.log(`[${this.getWorkerName()}] Stopping...`);
|
|
239
|
+
this.running = false;
|
|
240
|
+
const timeout = 6e4;
|
|
241
|
+
const startTime = Date.now();
|
|
242
|
+
while (this.currentJob && Date.now() - startTime < timeout) {
|
|
243
|
+
await this.sleep(100);
|
|
244
|
+
}
|
|
245
|
+
if (this.currentJob) {
|
|
246
|
+
console.warn(`[${this.getWorkerName()}] Forced shutdown while processing job ${this.currentJob.id}`);
|
|
247
|
+
}
|
|
248
|
+
}
|
|
249
|
+
/**
|
|
250
|
+
* Poll for next job to process
|
|
251
|
+
*/
|
|
252
|
+
async pollNextJob() {
|
|
253
|
+
const jobQueue2 = getJobQueue();
|
|
254
|
+
const job = await jobQueue2.pollNextPendingJob();
|
|
255
|
+
if (job && this.canProcessJob(job)) {
|
|
256
|
+
return job;
|
|
257
|
+
}
|
|
258
|
+
return null;
|
|
259
|
+
}
|
|
260
|
+
/**
|
|
261
|
+
* Process a job (handles state transitions and error handling)
|
|
262
|
+
*/
|
|
263
|
+
async processJob(job) {
|
|
264
|
+
this.currentJob = job;
|
|
265
|
+
const jobQueue2 = getJobQueue();
|
|
266
|
+
try {
|
|
267
|
+
const oldStatus = job.status;
|
|
268
|
+
job.status = "running";
|
|
269
|
+
job.startedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
270
|
+
await jobQueue2.updateJob(job, oldStatus);
|
|
271
|
+
console.log(`[${this.getWorkerName()}] \u{1F504} Processing job ${job.id} (type: ${job.type})`);
|
|
272
|
+
await this.executeJob(job);
|
|
273
|
+
job.status = "complete";
|
|
274
|
+
job.completedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
275
|
+
await jobQueue2.updateJob(job, "running");
|
|
276
|
+
console.log(`[${this.getWorkerName()}] \u2705 Job ${job.id} completed successfully`);
|
|
277
|
+
} catch (error) {
|
|
278
|
+
await this.handleJobFailure(job, error);
|
|
279
|
+
} finally {
|
|
280
|
+
this.currentJob = null;
|
|
281
|
+
}
|
|
282
|
+
}
|
|
283
|
+
/**
|
|
284
|
+
* Handle job failure (retry or move to failed)
|
|
285
|
+
*/
|
|
286
|
+
async handleJobFailure(job, error) {
|
|
287
|
+
const jobQueue2 = getJobQueue();
|
|
288
|
+
job.retryCount++;
|
|
289
|
+
if (job.retryCount < job.maxRetries) {
|
|
290
|
+
console.log(`[${this.getWorkerName()}] Job ${job.id} failed, will retry (${job.retryCount}/${job.maxRetries})`);
|
|
291
|
+
console.log(`[${this.getWorkerName()}] Error:`, error);
|
|
292
|
+
job.status = "pending";
|
|
293
|
+
job.startedAt = void 0;
|
|
294
|
+
await jobQueue2.updateJob(job, "running");
|
|
295
|
+
} else {
|
|
296
|
+
console.error(`[${this.getWorkerName()}] \u274C Job ${job.id} failed permanently after ${job.retryCount} retries`);
|
|
297
|
+
console.error(`[${this.getWorkerName()}] Error:`, error);
|
|
298
|
+
job.status = "failed";
|
|
299
|
+
job.error = error instanceof Error ? error.message : String(error);
|
|
300
|
+
job.completedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
301
|
+
await jobQueue2.updateJob(job, "running");
|
|
302
|
+
}
|
|
303
|
+
}
|
|
304
|
+
/**
|
|
305
|
+
* Update job progress (best-effort, doesn't throw)
|
|
306
|
+
*/
|
|
307
|
+
async updateJobProgress(job) {
|
|
308
|
+
try {
|
|
309
|
+
const jobQueue2 = getJobQueue();
|
|
310
|
+
await jobQueue2.updateJob(job);
|
|
311
|
+
} catch (error) {
|
|
312
|
+
console.warn(`[${this.getWorkerName()}] Failed to update job progress:`, error);
|
|
313
|
+
}
|
|
314
|
+
}
|
|
315
|
+
/**
|
|
316
|
+
* Sleep utility
|
|
317
|
+
*/
|
|
318
|
+
sleep(ms) {
|
|
319
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
320
|
+
}
|
|
321
|
+
};
|
|
322
|
+
|
|
323
|
+
export { JobQueue, JobWorker, getJobQueue, initializeJobQueue };
|
|
324
|
+
//# sourceMappingURL=index.js.map
|
|
325
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/job-queue.ts","../src/job-worker.ts"],"names":["fs","jobQueue"],"mappings":";;;;AAgBO,IAAM,WAAN,MAAe;AAAA,EACZ,OAAA;AAAA,EAER,YAAY,MAAA,EAAwB;AAClC,IAAA,IAAA,CAAK,OAAA,GAAe,IAAA,CAAA,IAAA,CAAK,MAAA,CAAO,OAAA,EAAS,MAAM,CAAA;AAAA,EACjD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,UAAA,GAA4B;AAChC,IAAA,MAAM,WAAwB,CAAC,SAAA,EAAW,SAAA,EAAW,UAAA,EAAY,UAAU,WAAW,CAAA;AAEtF,IAAA,KAAA,MAAW,UAAU,QAAA,EAAU;AAC7B,MAAA,MAAM,GAAA,GAAW,IAAA,CAAA,IAAA,CAAK,IAAA,CAAK,OAAA,EAAS,MAAM,CAAA;AAC1C,MAAA,MAAMA,SAAG,KAAA,CAAM,GAAA,EAAK,EAAE,SAAA,EAAW,MAAM,CAAA;AAAA,IACzC;AAEA,IAAA,OAAA,CAAQ,IAAI,wCAAwC,CAAA;AAAA,EACtD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,UAAU,GAAA,EAAyB;AACvC,IAAA,MAAM,UAAU,IAAA,CAAK,UAAA,CAAW,GAAA,CAAI,EAAA,EAAI,IAAI,MAAM,CAAA;AAClD,IAAA,MAAMA,QAAA,CAAG,UAAU,OAAA,EAAS,IAAA,CAAK,UAAU,GAAA,EAAK,IAAA,EAAM,CAAC,CAAA,EAAG,OAAO,CAAA;AACjE,IAAA,OAAA,CAAQ,IAAI,CAAA,uBAAA,EAA0B,GAAA,CAAI,EAAE,CAAA,aAAA,EAAgB,GAAA,CAAI,MAAM,CAAA,CAAE,CAAA;AAAA,EAC1E;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,OAAO,KAAA,EAAmC;AAC9C,IAAA,MAAM,WAAwB,CAAC,SAAA,EAAW,SAAA,EAAW,UAAA,EAAY,UAAU,WAAW,CAAA;AAEtF,IAAA,KAAA,MAAW,UAAU,QAAA,EAAU;AAC7B,MAAA,MAAM,OAAA,GAAU,IAAA,CAAK,UAAA,CAAW,KAAA,EAAO,MAAM,CAAA;AAC7C,MAAA,IAAI;AACF,QAAA,MAAM,OAAA,GAAU,MAAMA,QAAA,CAAG,QAAA,CAAS,SAAS,OAAO,CAAA;AAClD,QAAA,OAAO,IAAA,CAAK,MAAM,OAAO,CAAA;AAAA,MAC3B,SAAS,KAAA,EAAO;AAEd,QAAA;AAAA,MACF;AAAA,IACF;AAEA,IAAA,OAAO,IAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,SAAA,CAAU,GAAA,EAAU,SAAA,EAAsC;AAE9D,IAAA,IAAI,SAAA,IAAa,SAAA,KAAc,GAAA,CAAI,MAAA,EAAQ;AACzC,MAAA,MAAM,OAAA,GAAU,IAAA,CAAK,UAAA,CAAW,GAAA,CAAI,IAAI,SAAS,CAAA;AACjD,MAAA,IAAI;AACF,QAAA,MAAMA,QAAA,CAAG,OAAO,OAAO,CAAA;AAAA,MACzB,SAAS,KAAA,EAAO;AAAA,MAEhB;AAAA,IACF;AAGA,IAAA,MAAM,UAAU,IAAA,CAAK,UAAA,CAAW,GAAA,CAAI,EAAA,EAAI,IAAI,MAAM,CAAA;AAClD,IAAA,MAAMA,QAAA,CAAG,UAAU,OAAA,EAAS,IAAA,CAAK,UAAU,GAAA,EAAK,IAAA,EAAM,CAAC,CAAA,EAAG,OAAO,CAAA;AAEjE,IAAA,IAAI,SAAA,IAAa,SAAA,KAAc,GAAA,CAAI,MAAA,EAAQ;AACzC,MAAA,OAAA,CAAQ,GAAA,CAAI,wBAAwB,GAAA,CAAI,EAAE,SAAS,SAAS,CAAA,IAAA,EAAO,GAAA,CAAI,MAAM,CAAA,CAAE,CAAA;AAAA,IACjF,CAAA,MAAO;AACL,MAAA,OAAA,CAAQ,IAAI,CAAA,uBAAA,EAA0B,GAAA,CAAI,EAAE,CAAA,UAAA,EAAa,GAAA,CAAI,MAAM,CAAA,CAAA,CAAG,CAAA;AAAA,IACxE;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,kBAAA,GAA0C;AAC9C,IAAA,MAAM,UAAA,GAAkB,IAAA,CAAA,IAAA,CAAK,IAAA,CAAK,OAAA,EAAS,SAAS,CAAA;AAEpD,IAAA,IAAI;AACF,MAAA,MAAM,KAAA,GAAQ,MAAMA,QAAA,CAAG,OAAA,CAAQ,UAAU,CAAA;AAEzC,MAAA,IAAI,KAAA,CAAM,WAAW,CAAA,EAAG;AACtB,QAAA,OAAO,IAAA;AAAA,MACT;AAGA,MAAA,KAAA,CAAM,IAAA,EAAK;AAEX,MAAA,MAAM,OAAA,GAAU,MAAM,CAAC,CAAA;AACvB,MAAA,MAAM,OAAA,GAAe,IAAA,CAAA,IAAA,CAAK,UAAA,EAAY,OAAO,CAAA;AAE7C,MAAA,MAAM,OAAA,GAAU,MAAMA,QAAA,CAAG,QAAA,CAAS,SAAS,OAAO,CAAA;AAClD,MAAA,OAAO,IAAA,CAAK,MAAM,OAAO,CAAA;AAAA,IAC3B,SAAS,KAAA,EAAO;AACd,MAAA,OAAA,CAAQ,KAAA,CAAM,0CAA0C,KAAK,CAAA;AAC7D,MAAA,OAAO,IAAA;AAAA,IACT;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,QAAA,CAAS,OAAA,GAA2B,EAAC,EAAmB;AAC5D,IAAA,MAAM,OAAc,EAAC;AAGrB,IAAA,MAAM,QAAA,GAAwB,OAAA,CAAQ,MAAA,GAClC,CAAC,OAAA,CAAQ,MAAM,CAAA,GACf,CAAC,SAAA,EAAW,SAAA,EAAW,UAAA,EAAY,QAAA,EAAU,WAAW,CAAA;AAE5D,IAAA,KAAA,MAAW,UAAU,QAAA,EAAU;AAC7B,MAAA,MAAM,SAAA,GAAiB,IAAA,CAAA,IAAA,CAAK,IAAA,CAAK,OAAA,EAAS,MAAM,CAAA;AAEhD,MAAA,IAAI;AACF,QAAA,MAAM,KAAA,GAAQ,MAAMA,QAAA,CAAG,OAAA,CAAQ,SAAS,CAAA;AAExC,QAAA,KAAA,MAAW,QAAQ,KAAA,EAAO;AACxB,UAAA,MAAM,OAAA,GAAe,IAAA,CAAA,IAAA,CAAK,SAAA,EAAW,IAAI,CAAA;AACzC,UAAA,MAAM,OAAA,GAAU,MAAMA,QAAA,CAAG,QAAA,CAAS,SAAS,OAAO,CAAA;AAClD,UAAA,MAAM,GAAA,GAAM,IAAA,CAAK,KAAA,CAAM,OAAO,CAAA;AAG9B,UAAA,IAAI,OAAA,CAAQ,IAAA,IAAQ,GAAA,CAAI,IAAA,KAAS,QAAQ,IAAA,EAAM;AAC/C,UAAA,IAAI,OAAA,CAAQ,MAAA,IAAU,GAAA,CAAI,MAAA,KAAW,QAAQ,MAAA,EAAQ;AAErD,UAAA,IAAA,CAAK,KAAK,GAAG,CAAA;AAAA,QACf;AAAA,MACF,SAAS,KAAA,EAAO;AAEd,QAAA;AAAA,MACF;AAAA,IACF;AAGA,IAAA,IAAA,CAAK,KAAK,CAAC,CAAA,EAAG,CAAA,KAAM,IAAI,KAAK,CAAA,CAAE,OAAO,CAAA,CAAE,OAAA,KAAY,IAAI,IAAA,CAAK,EAAE,OAAO,CAAA,CAAE,SAAS,CAAA;AAGjF,IAAA,MAAM,MAAA,GAAS,QAAQ,MAAA,IAAU,CAAA;AACjC,IAAA,MAAM,KAAA,GAAQ,QAAQ,KAAA,IAAS,GAAA;AAE/B,IAAA,OAAO,IAAA,CAAK,KAAA,CAAM,MAAA,EAAQ,MAAA,GAAS,KAAK,CAAA;AAAA,EAC1C;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,UAAU,KAAA,EAAgC;AAC9C,IAAA,MAAM,GAAA,GAAM,MAAM,IAAA,CAAK,MAAA,CAAO,KAAK,CAAA;AAEnC,IAAA,IAAI,CAAC,GAAA,EAAK;AACR,MAAA,OAAO,KAAA;AAAA,IACT;AAGA,IAAA,IAAI,GAAA,CAAI,MAAA,KAAW,SAAA,IAAa,GAAA,CAAI,WAAW,SAAA,EAAW;AACxD,MAAA,OAAO,KAAA;AAAA,IACT;AAEA,IAAA,MAAM,YAAY,GAAA,CAAI,MAAA;AACtB,IAAA,GAAA,CAAI,MAAA,GAAS,WAAA;AACb,IAAA,GAAA,CAAI,WAAA,GAAA,iBAAc,IAAI,IAAA,EAAK,EAAE,WAAA,EAAY;AAEzC,IAAA,MAAM,IAAA,CAAK,SAAA,CAAU,GAAA,EAAK,SAAS,CAAA;AACnC,IAAA,OAAO,IAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cAAA,CAAe,cAAA,GAAyB,EAAA,EAAqB;AACjE,IAAA,MAAM,aAAa,IAAA,CAAK,GAAA,EAAI,GAAK,cAAA,GAAiB,KAAK,EAAA,GAAK,GAAA;AAC5D,IAAA,IAAI,YAAA,GAAe,CAAA;AAEnB,IAAA,MAAM,eAAA,GAA+B,CAAC,UAAA,EAAY,QAAA,EAAU,WAAW,CAAA;AAEvE,IAAA,KAAA,MAAW,UAAU,eAAA,EAAiB;AACpC,MAAA,MAAM,SAAA,GAAiB,IAAA,CAAA,IAAA,CAAK,IAAA,CAAK,OAAA,EAAS,MAAM,CAAA;AAEhD,MAAA,IAAI;AACF,QAAA,MAAM,KAAA,GAAQ,MAAMA,QAAA,CAAG,OAAA,CAAQ,SAAS,CAAA;AAExC,QAAA,KAAA,MAAW,QAAQ,KAAA,EAAO;AACxB,UAAA,MAAM,OAAA,GAAe,IAAA,CAAA,IAAA,CAAK,SAAA,EAAW,IAAI,CAAA;AACzC,UAAA,MAAM,OAAA,GAAU,MAAMA,QAAA,CAAG,QAAA,CAAS,SAAS,OAAO,CAAA;AAClD,UAAA,MAAM,GAAA,GAAM,IAAA,CAAK,KAAA,CAAM,OAAO,CAAA;AAE9B,UAAA,IAAI,IAAI,WAAA,EAAa;AACnB,YAAA,MAAM,gBAAgB,IAAI,IAAA,CAAK,GAAA,CAAI,WAAW,EAAE,OAAA,EAAQ;AAExD,YAAA,IAAI,gBAAgB,UAAA,EAAY;AAC9B,cAAA,MAAMA,QAAA,CAAG,OAAO,OAAO,CAAA;AACvB,cAAA,YAAA,EAAA;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,MACF,SAAS,KAAA,EAAO;AACd,QAAA,OAAA,CAAQ,KAAA,CAAM,CAAA,6BAAA,EAAgC,MAAM,CAAA,MAAA,CAAA,EAAU,KAAK,CAAA;AAAA,MACrE;AAAA,IACF;AAEA,IAAA,IAAI,eAAe,CAAA,EAAG;AACpB,MAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,sBAAA,EAAyB,YAAY,CAAA,SAAA,CAAW,CAAA;AAAA,IAC9D;AAEA,IAAA,OAAO,YAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,UAAA,CAAW,OAAc,MAAA,EAA2B;AAC1D,IAAA,OAAY,UAAK,IAAA,CAAK,OAAA,EAAS,MAAA,EAAQ,CAAA,EAAG,KAAK,CAAA,KAAA,CAAO,CAAA;AAAA,EACxD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,QAAA,GAMH;AACD,IAAA,MAAM,KAAA,GAAQ;AAAA,MACZ,OAAA,EAAS,CAAA;AAAA,MACT,OAAA,EAAS,CAAA;AAAA,MACT,QAAA,EAAU,CAAA;AAAA,MACV,MAAA,EAAQ,CAAA;AAAA,MACR,SAAA,EAAW;AAAA,KACb;AAEA,IAAA,MAAM,WAAwB,CAAC,SAAA,EAAW,SAAA,EAAW,UAAA,EAAY,UAAU,WAAW,CAAA;AAEtF,IAAA,KAAA,MAAW,UAAU,QAAA,EAAU;AAC7B,MAAA,MAAM,SAAA,GAAiB,IAAA,CAAA,IAAA,CAAK,IAAA,CAAK,OAAA,EAAS,MAAM,CAAA;AAEhD,MAAA,IAAI;AACF,QAAA,MAAM,KAAA,GAAQ,MAAMA,QAAA,CAAG,OAAA,CAAQ,SAAS,CAAA;AACxC,QAAA,KAAA,CAAM,MAAM,IAAI,KAAA,CAAM,MAAA;AAAA,MACxB,SAAS,KAAA,EAAO;AAEd,QAAA,KAAA,CAAM,MAAM,CAAA,GAAI,CAAA;AAAA,MAClB;AAAA,IACF;AAEA,IAAA,OAAO,KAAA;AAAA,EACT;AACF;AAGA,IAAI,QAAA,GAA4B,IAAA;AAEzB,SAAS,WAAA,GAAwB;AACtC,EAAA,IAAI,CAAC,QAAA,EAAU;AACb,IAAA,MAAM,IAAI,MAAM,4DAA4D,CAAA;AAAA,EAC9E;AACA,EAAA,OAAO,QAAA;AACT;AAEA,eAAsB,mBAAmB,MAAA,EAA2C;AAClF,EAAA,QAAA,GAAW,IAAI,SAAS,MAAM,CAAA;AAC9B,EAAA,MAAM,SAAS,UAAA,EAAW;AAC1B,EAAA,OAAO,QAAA;AACT;;;ACjRO,IAAe,YAAf,MAAyB;AAAA,EACtB,OAAA,GAAU,KAAA;AAAA,EACV,UAAA,GAAyB,IAAA;AAAA,EACzB,cAAA;AAAA,EACA,cAAA;AAAA,EAER,WAAA,CACE,cAAA,GAAyB,GAAA,EACzB,cAAA,GAAyB,GAAA,EACzB;AACA,IAAA,IAAA,CAAK,cAAA,GAAiB,cAAA;AACtB,IAAA,IAAA,CAAK,cAAA,GAAiB,cAAA;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,KAAA,GAAuB;AAC3B,IAAA,IAAA,CAAK,OAAA,GAAU,IAAA;AACf,IAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,CAAA,EAAI,IAAA,CAAK,aAAA,EAAe,CAAA,SAAA,CAAW,CAAA;AAE/C,IAAA,OAAO,KAAK,OAAA,EAAS;AACnB,MAAA,IAAI;AACF,QAAA,MAAM,GAAA,GAAM,MAAM,IAAA,CAAK,WAAA,EAAY;AAEnC,QAAA,IAAI,GAAA,EAAK;AACP,UAAA,MAAM,IAAA,CAAK,WAAW,GAAG,CAAA;AAAA,QAC3B,CAAA,MAAO;AAEL,UAAA,MAAM,IAAA,CAAK,KAAA,CAAM,IAAA,CAAK,cAAc,CAAA;AAAA,QACtC;AAAA,MACF,SAAS,KAAA,EAAO;AACd,QAAA,OAAA,CAAQ,MAAM,CAAA,CAAA,EAAI,IAAA,CAAK,aAAA,EAAe,yBAAyB,KAAK,CAAA;AAEpE,QAAA,MAAM,IAAA,CAAK,KAAA,CAAM,IAAA,CAAK,cAAc,CAAA;AAAA,MACtC;AAAA,IACF;AAEA,IAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,CAAA,EAAI,IAAA,CAAK,aAAA,EAAe,CAAA,SAAA,CAAW,CAAA;AAAA,EACjD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,IAAA,GAAsB;AAC1B,IAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,CAAA,EAAI,IAAA,CAAK,aAAA,EAAe,CAAA,aAAA,CAAe,CAAA;AACnD,IAAA,IAAA,CAAK,OAAA,GAAU,KAAA;AAGf,IAAA,MAAM,OAAA,GAAU,GAAA;AAChB,IAAA,MAAM,SAAA,GAAY,KAAK,GAAA,EAAI;AAE3B,IAAA,OAAO,KAAK,UAAA,IAAe,IAAA,CAAK,GAAA,EAAI,GAAI,YAAa,OAAA,EAAS;AAC5D,MAAA,MAAM,IAAA,CAAK,MAAM,GAAG,CAAA;AAAA,IACtB;AAEA,IAAA,IAAI,KAAK,UAAA,EAAY;AACnB,MAAA,OAAA,CAAQ,IAAA,CAAK,IAAI,IAAA,CAAK,aAAA,EAAe,CAAA,uCAAA,EAA0C,IAAA,CAAK,UAAA,CAAW,EAAE,CAAA,CAAE,CAAA;AAAA,IACrG;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,WAAA,GAAmC;AAC/C,IAAA,MAAMC,YAAW,WAAA,EAAY;AAC7B,IAAA,MAAM,GAAA,GAAM,MAAMA,SAAAA,CAAS,kBAAA,EAAmB;AAE9C,IAAA,IAAI,GAAA,IAAO,IAAA,CAAK,aAAA,CAAc,GAAG,CAAA,EAAG;AAClC,MAAA,OAAO,GAAA;AAAA,IACT;AAEA,IAAA,OAAO,IAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,WAAW,GAAA,EAAyB;AAChD,IAAA,IAAA,CAAK,UAAA,GAAa,GAAA;AAClB,IAAA,MAAMA,YAAW,WAAA,EAAY;AAE7B,IAAA,IAAI;AAEF,MAAA,MAAM,YAAY,GAAA,CAAI,MAAA;AACtB,MAAA,GAAA,CAAI,MAAA,GAAS,SAAA;AACb,MAAA,GAAA,CAAI,SAAA,GAAA,iBAAY,IAAI,IAAA,EAAK,EAAE,WAAA,EAAY;AACvC,MAAA,MAAMA,SAAAA,CAAS,SAAA,CAAU,GAAA,EAAK,SAAS,CAAA;AAEvC,MAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,CAAA,EAAI,IAAA,CAAK,aAAA,EAAe,CAAA,2BAAA,EAAuB,GAAA,CAAI,EAAE,CAAA,QAAA,EAAW,GAAA,CAAI,IAAI,CAAA,CAAA,CAAG,CAAA;AAGvF,MAAA,MAAM,IAAA,CAAK,WAAW,GAAG,CAAA;AAGzB,MAAA,GAAA,CAAI,MAAA,GAAS,UAAA;AACb,MAAA,GAAA,CAAI,WAAA,GAAA,iBAAc,IAAI,IAAA,EAAK,EAAE,WAAA,EAAY;AACzC,MAAA,MAAMA,SAAAA,CAAS,SAAA,CAAU,GAAA,EAAK,SAAS,CAAA;AAEvC,MAAA,OAAA,CAAQ,GAAA,CAAI,IAAI,IAAA,CAAK,aAAA,EAAe,CAAA,aAAA,EAAW,GAAA,CAAI,EAAE,CAAA,uBAAA,CAAyB,CAAA;AAAA,IAEhF,SAAS,KAAA,EAAO;AACd,MAAA,MAAM,IAAA,CAAK,gBAAA,CAAiB,GAAA,EAAK,KAAK,CAAA;AAAA,IACxC,CAAA,SAAE;AACA,MAAA,IAAA,CAAK,UAAA,GAAa,IAAA;AAAA,IACpB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAgB,gBAAA,CAAiB,GAAA,EAAU,KAAA,EAA2B;AACpE,IAAA,MAAMA,YAAW,WAAA,EAAY;AAC7B,IAAA,GAAA,CAAI,UAAA,EAAA;AAEJ,IAAA,IAAI,GAAA,CAAI,UAAA,GAAa,GAAA,CAAI,UAAA,EAAY;AACnC,MAAA,OAAA,CAAQ,GAAA,CAAI,CAAA,CAAA,EAAI,IAAA,CAAK,aAAA,EAAe,CAAA,MAAA,EAAS,GAAA,CAAI,EAAE,CAAA,qBAAA,EAAwB,GAAA,CAAI,UAAU,CAAA,CAAA,EAAI,GAAA,CAAI,UAAU,CAAA,CAAA,CAAG,CAAA;AAC9G,MAAA,OAAA,CAAQ,IAAI,CAAA,CAAA,EAAI,IAAA,CAAK,aAAA,EAAe,YAAY,KAAK,CAAA;AAGrD,MAAA,GAAA,CAAI,MAAA,GAAS,SAAA;AACb,MAAA,GAAA,CAAI,SAAA,GAAY,MAAA;AAChB,MAAA,MAAMA,SAAAA,CAAS,SAAA,CAAU,GAAA,EAAK,SAAS,CAAA;AAAA,IAEzC,CAAA,MAAO;AACL,MAAA,OAAA,CAAQ,KAAA,CAAM,CAAA,CAAA,EAAI,IAAA,CAAK,aAAA,EAAe,CAAA,aAAA,EAAW,GAAA,CAAI,EAAE,CAAA,0BAAA,EAA6B,GAAA,CAAI,UAAU,CAAA,QAAA,CAAU,CAAA;AAC5G,MAAA,OAAA,CAAQ,MAAM,CAAA,CAAA,EAAI,IAAA,CAAK,aAAA,EAAe,YAAY,KAAK,CAAA;AAGvD,MAAA,GAAA,CAAI,MAAA,GAAS,QAAA;AACb,MAAA,GAAA,CAAI,QAAQ,KAAA,YAAiB,KAAA,GAAQ,KAAA,CAAM,OAAA,GAAU,OAAO,KAAK,CAAA;AACjE,MAAA,GAAA,CAAI,WAAA,GAAA,iBAAc,IAAI,IAAA,EAAK,EAAE,WAAA,EAAY;AACzC,MAAA,MAAMA,SAAAA,CAAS,SAAA,CAAU,GAAA,EAAK,SAAS,CAAA;AAAA,IACzC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAgB,kBAAkB,GAAA,EAAyB;AACzD,IAAA,IAAI;AACF,MAAA,MAAMA,YAAW,WAAA,EAAY;AAC7B,MAAA,MAAMA,SAAAA,CAAS,UAAU,GAAG,CAAA;AAAA,IAC9B,SAAS,KAAA,EAAO;AACd,MAAA,OAAA,CAAQ,KAAK,CAAA,CAAA,EAAI,IAAA,CAAK,aAAA,EAAe,oCAAoC,KAAK,CAAA;AAAA,IAEhF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKU,MAAM,EAAA,EAA2B;AACzC,IAAA,OAAO,IAAI,OAAA,CAAQ,CAAA,OAAA,KAAW,UAAA,CAAW,OAAA,EAAS,EAAE,CAAC,CAAA;AAAA,EACvD;AAoBF","file":"index.js","sourcesContent":["/**\n * Job Queue Manager\n *\n * Filesystem-based job queue with atomic operations.\n * Jobs are stored in directories by status for easy polling.\n */\n\nimport { promises as fs } from 'fs';\nimport * as path from 'path';\nimport type { Job, JobStatus, JobQueryFilters } from './types';\nimport type { JobId } from '@semiont/api-client';\n\nexport interface JobQueueConfig {\n dataDir: string;\n}\n\nexport class JobQueue {\n private jobsDir: string;\n\n constructor(config: JobQueueConfig) {\n this.jobsDir = path.join(config.dataDir, 'jobs');\n }\n\n /**\n * Initialize job queue directories\n */\n async initialize(): Promise<void> {\n const statuses: JobStatus[] = ['pending', 'running', 'complete', 'failed', 'cancelled'];\n\n for (const status of statuses) {\n const dir = path.join(this.jobsDir, status);\n await fs.mkdir(dir, { recursive: true });\n }\n\n console.log('[JobQueue] Initialized job directories');\n }\n\n /**\n * Create a new job\n */\n async createJob(job: Job): Promise<void> {\n const jobPath = this.getJobPath(job.id, job.status);\n await fs.writeFile(jobPath, JSON.stringify(job, null, 2), 'utf-8');\n console.log(`[JobQueue] Created job ${job.id} with status ${job.status}`);\n }\n\n /**\n * Get a job by ID (searches all status directories)\n */\n async getJob(jobId: JobId): Promise<Job | null> {\n const statuses: JobStatus[] = ['pending', 'running', 'complete', 'failed', 'cancelled'];\n\n for (const status of statuses) {\n const jobPath = this.getJobPath(jobId, status);\n try {\n const content = await fs.readFile(jobPath, 'utf-8');\n return JSON.parse(content) as Job;\n } catch (error) {\n // File doesn't exist in this status directory, try next\n continue;\n }\n }\n\n return null;\n }\n\n /**\n * Update a job (atomic: delete old, write new)\n */\n async updateJob(job: Job, oldStatus?: JobStatus): Promise<void> {\n // If oldStatus provided, delete from old location\n if (oldStatus && oldStatus !== job.status) {\n const oldPath = this.getJobPath(job.id, oldStatus);\n try {\n await fs.unlink(oldPath);\n } catch (error) {\n // Ignore if file doesn't exist\n }\n }\n\n // Write to new location\n const newPath = this.getJobPath(job.id, job.status);\n await fs.writeFile(newPath, JSON.stringify(job, null, 2), 'utf-8');\n\n if (oldStatus && oldStatus !== job.status) {\n console.log(`[JobQueue] Moved job ${job.id} from ${oldStatus} to ${job.status}`);\n } else {\n console.log(`[JobQueue] Updated job ${job.id} (status: ${job.status})`);\n }\n }\n\n /**\n * Poll for next pending job (FIFO)\n */\n async pollNextPendingJob(): Promise<Job | null> {\n const pendingDir = path.join(this.jobsDir, 'pending');\n\n try {\n const files = await fs.readdir(pendingDir);\n\n if (files.length === 0) {\n return null;\n }\n\n // Sort by filename (job IDs have timestamps via nanoid)\n files.sort();\n\n const jobFile = files[0]!;\n const jobPath = path.join(pendingDir, jobFile);\n\n const content = await fs.readFile(jobPath, 'utf-8');\n return JSON.parse(content) as Job;\n } catch (error) {\n console.error('[JobQueue] Error polling pending jobs:', error);\n return null;\n }\n }\n\n /**\n * List jobs with filters\n */\n async listJobs(filters: JobQueryFilters = {}): Promise<Job[]> {\n const jobs: Job[] = [];\n\n // Determine which status directories to scan\n const statuses: JobStatus[] = filters.status\n ? [filters.status]\n : ['pending', 'running', 'complete', 'failed', 'cancelled'];\n\n for (const status of statuses) {\n const statusDir = path.join(this.jobsDir, status);\n\n try {\n const files = await fs.readdir(statusDir);\n\n for (const file of files) {\n const jobPath = path.join(statusDir, file);\n const content = await fs.readFile(jobPath, 'utf-8');\n const job = JSON.parse(content) as Job;\n\n // Apply filters\n if (filters.type && job.type !== filters.type) continue;\n if (filters.userId && job.userId !== filters.userId) continue;\n\n jobs.push(job);\n }\n } catch (error) {\n // Directory might not exist yet\n continue;\n }\n }\n\n // Sort by created descending (newest first)\n jobs.sort((a, b) => new Date(b.created).getTime() - new Date(a.created).getTime());\n\n // Apply pagination\n const offset = filters.offset || 0;\n const limit = filters.limit || 100;\n\n return jobs.slice(offset, offset + limit);\n }\n\n /**\n * Cancel a job\n */\n async cancelJob(jobId: JobId): Promise<boolean> {\n const job = await this.getJob(jobId);\n\n if (!job) {\n return false;\n }\n\n // Can only cancel pending or running jobs\n if (job.status !== 'pending' && job.status !== 'running') {\n return false;\n }\n\n const oldStatus = job.status;\n job.status = 'cancelled';\n job.completedAt = new Date().toISOString();\n\n await this.updateJob(job, oldStatus);\n return true;\n }\n\n /**\n * Clean up old completed/failed jobs (older than retention period)\n */\n async cleanupOldJobs(retentionHours: number = 24): Promise<number> {\n const cutoffTime = Date.now() - (retentionHours * 60 * 60 * 1000);\n let deletedCount = 0;\n\n const cleanupStatuses: JobStatus[] = ['complete', 'failed', 'cancelled'];\n\n for (const status of cleanupStatuses) {\n const statusDir = path.join(this.jobsDir, status);\n\n try {\n const files = await fs.readdir(statusDir);\n\n for (const file of files) {\n const jobPath = path.join(statusDir, file);\n const content = await fs.readFile(jobPath, 'utf-8');\n const job = JSON.parse(content) as Job;\n\n if (job.completedAt) {\n const completedTime = new Date(job.completedAt).getTime();\n\n if (completedTime < cutoffTime) {\n await fs.unlink(jobPath);\n deletedCount++;\n }\n }\n }\n } catch (error) {\n console.error(`[JobQueue] Error cleaning up ${status} jobs:`, error);\n }\n }\n\n if (deletedCount > 0) {\n console.log(`[JobQueue] Cleaned up ${deletedCount} old jobs`);\n }\n\n return deletedCount;\n }\n\n /**\n * Get job file path\n */\n private getJobPath(jobId: JobId, status: JobStatus): string {\n return path.join(this.jobsDir, status, `${jobId}.json`);\n }\n\n /**\n * Get statistics about the queue\n */\n async getStats(): Promise<{\n pending: number;\n running: number;\n complete: number;\n failed: number;\n cancelled: number;\n }> {\n const stats = {\n pending: 0,\n running: 0,\n complete: 0,\n failed: 0,\n cancelled: 0\n };\n\n const statuses: JobStatus[] = ['pending', 'running', 'complete', 'failed', 'cancelled'];\n\n for (const status of statuses) {\n const statusDir = path.join(this.jobsDir, status);\n\n try {\n const files = await fs.readdir(statusDir);\n stats[status] = files.length;\n } catch (error) {\n // Directory might not exist yet\n stats[status] = 0;\n }\n }\n\n return stats;\n }\n}\n\n// Singleton instance\nlet jobQueue: JobQueue | null = null;\n\nexport function getJobQueue(): JobQueue {\n if (!jobQueue) {\n throw new Error('JobQueue not initialized. Call initializeJobQueue() first.');\n }\n return jobQueue;\n}\n\nexport async function initializeJobQueue(config: JobQueueConfig): Promise<JobQueue> {\n jobQueue = new JobQueue(config);\n await jobQueue.initialize();\n return jobQueue;\n}\n","/**\n * Job Worker Base Class\n *\n * Abstract worker that polls the job queue and processes jobs.\n * Subclasses implement specific job processing logic.\n */\n\nimport type { Job } from './types';\nimport { getJobQueue } from './job-queue';\n\nexport abstract class JobWorker {\n private running = false;\n private currentJob: Job | null = null;\n private pollIntervalMs: number;\n private errorBackoffMs: number;\n\n constructor(\n pollIntervalMs: number = 1000,\n errorBackoffMs: number = 5000\n ) {\n this.pollIntervalMs = pollIntervalMs;\n this.errorBackoffMs = errorBackoffMs;\n }\n\n /**\n * Start the worker (polls queue in loop)\n */\n async start(): Promise<void> {\n this.running = true;\n console.log(`[${this.getWorkerName()}] Started`);\n\n while (this.running) {\n try {\n const job = await this.pollNextJob();\n\n if (job) {\n await this.processJob(job);\n } else {\n // No jobs available, wait before polling again\n await this.sleep(this.pollIntervalMs);\n }\n } catch (error) {\n console.error(`[${this.getWorkerName()}] Error in main loop:`, error);\n // Back off on error to avoid tight error loops\n await this.sleep(this.errorBackoffMs);\n }\n }\n\n console.log(`[${this.getWorkerName()}] Stopped`);\n }\n\n /**\n * Stop the worker (graceful shutdown)\n */\n async stop(): Promise<void> {\n console.log(`[${this.getWorkerName()}] Stopping...`);\n this.running = false;\n\n // Wait for current job to finish (with timeout)\n const timeout = 60000; // 60 seconds\n const startTime = Date.now();\n\n while (this.currentJob && (Date.now() - startTime) < timeout) {\n await this.sleep(100);\n }\n\n if (this.currentJob) {\n console.warn(`[${this.getWorkerName()}] Forced shutdown while processing job ${this.currentJob.id}`);\n }\n }\n\n /**\n * Poll for next job to process\n */\n private async pollNextJob(): Promise<Job | null> {\n const jobQueue = getJobQueue();\n const job = await jobQueue.pollNextPendingJob();\n\n if (job && this.canProcessJob(job)) {\n return job;\n }\n\n return null;\n }\n\n /**\n * Process a job (handles state transitions and error handling)\n */\n private async processJob(job: Job): Promise<void> {\n this.currentJob = job;\n const jobQueue = getJobQueue();\n\n try {\n // Move to running state\n const oldStatus = job.status;\n job.status = 'running';\n job.startedAt = new Date().toISOString();\n await jobQueue.updateJob(job, oldStatus);\n\n console.log(`[${this.getWorkerName()}] 🔄 Processing job ${job.id} (type: ${job.type})`);\n\n // Execute job-specific logic\n await this.executeJob(job);\n\n // Move to complete state\n job.status = 'complete';\n job.completedAt = new Date().toISOString();\n await jobQueue.updateJob(job, 'running');\n\n console.log(`[${this.getWorkerName()}] ✅ Job ${job.id} completed successfully`);\n\n } catch (error) {\n await this.handleJobFailure(job, error);\n } finally {\n this.currentJob = null;\n }\n }\n\n /**\n * Handle job failure (retry or move to failed)\n */\n protected async handleJobFailure(job: Job, error: any): Promise<void> {\n const jobQueue = getJobQueue();\n job.retryCount++;\n\n if (job.retryCount < job.maxRetries) {\n console.log(`[${this.getWorkerName()}] Job ${job.id} failed, will retry (${job.retryCount}/${job.maxRetries})`);\n console.log(`[${this.getWorkerName()}] Error:`, error);\n\n // Move back to pending for retry\n job.status = 'pending';\n job.startedAt = undefined; // Clear start time for retry\n await jobQueue.updateJob(job, 'running');\n\n } else {\n console.error(`[${this.getWorkerName()}] ❌ Job ${job.id} failed permanently after ${job.retryCount} retries`);\n console.error(`[${this.getWorkerName()}] Error:`, error);\n\n // Move to failed state\n job.status = 'failed';\n job.error = error instanceof Error ? error.message : String(error);\n job.completedAt = new Date().toISOString();\n await jobQueue.updateJob(job, 'running');\n }\n }\n\n /**\n * Update job progress (best-effort, doesn't throw)\n */\n protected async updateJobProgress(job: Job): Promise<void> {\n try {\n const jobQueue = getJobQueue();\n await jobQueue.updateJob(job);\n } catch (error) {\n console.warn(`[${this.getWorkerName()}] Failed to update job progress:`, error);\n // Don't throw - progress updates are best-effort\n }\n }\n\n /**\n * Sleep utility\n */\n protected sleep(ms: number): Promise<void> {\n return new Promise(resolve => setTimeout(resolve, ms));\n }\n\n // Abstract methods to be implemented by subclasses\n\n /**\n * Get worker name (for logging)\n */\n protected abstract getWorkerName(): string;\n\n /**\n * Check if this worker can process the given job\n */\n protected abstract canProcessJob(job: Job): boolean;\n\n /**\n * Execute the job (job-specific logic)\n * This is where the actual work happens\n * Throw an error to trigger retry logic\n */\n protected abstract executeJob(job: Job): Promise<void>;\n}\n"]}
|
package/package.json
ADDED
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@semiont/jobs",
|
|
3
|
+
"version": "0.2.28-build.40",
|
|
4
|
+
"type": "module",
|
|
5
|
+
"description": "Filesystem-based job queue and worker infrastructure",
|
|
6
|
+
"main": "./dist/index.js",
|
|
7
|
+
"types": "./dist/index.d.ts",
|
|
8
|
+
"exports": {
|
|
9
|
+
".": {
|
|
10
|
+
"types": "./dist/index.d.ts",
|
|
11
|
+
"import": "./dist/index.js"
|
|
12
|
+
}
|
|
13
|
+
},
|
|
14
|
+
"files": [
|
|
15
|
+
"dist",
|
|
16
|
+
"README.md"
|
|
17
|
+
],
|
|
18
|
+
"scripts": {
|
|
19
|
+
"build": "npm run typecheck && tsup",
|
|
20
|
+
"typecheck": "tsc --noEmit",
|
|
21
|
+
"test": "vitest"
|
|
22
|
+
},
|
|
23
|
+
"dependencies": {
|
|
24
|
+
"@semiont/api-client": "*",
|
|
25
|
+
"@semiont/core": "*"
|
|
26
|
+
},
|
|
27
|
+
"devDependencies": {
|
|
28
|
+
"tsup": "^8.0.1",
|
|
29
|
+
"typescript": "^5.6.3",
|
|
30
|
+
"vitest": "^2.1.8"
|
|
31
|
+
},
|
|
32
|
+
"publishConfig": {
|
|
33
|
+
"access": "public"
|
|
34
|
+
},
|
|
35
|
+
"keywords": [
|
|
36
|
+
"job-queue",
|
|
37
|
+
"worker",
|
|
38
|
+
"task-queue",
|
|
39
|
+
"filesystem",
|
|
40
|
+
"background-jobs"
|
|
41
|
+
],
|
|
42
|
+
"author": "The AI Alliance",
|
|
43
|
+
"license": "Apache-2.0",
|
|
44
|
+
"repository": {
|
|
45
|
+
"type": "git",
|
|
46
|
+
"url": "https://github.com/The-AI-Alliance/semiont.git",
|
|
47
|
+
"directory": "packages/jobs"
|
|
48
|
+
}
|
|
49
|
+
}
|