@queuebase/worker 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/adapter.d.ts +36 -0
- package/dist/adapter.d.ts.map +1 -0
- package/dist/adapter.js +2 -0
- package/dist/adapter.js.map +1 -0
- package/dist/adapters/postgres.d.ts +4 -0
- package/dist/adapters/postgres.d.ts.map +1 -0
- package/dist/adapters/postgres.js +79 -0
- package/dist/adapters/postgres.js.map +1 -0
- package/dist/adapters/sqlite.d.ts +4 -0
- package/dist/adapters/sqlite.d.ts.map +1 -0
- package/dist/adapters/sqlite.js +68 -0
- package/dist/adapters/sqlite.js.map +1 -0
- package/dist/cron/index.d.ts +3 -0
- package/dist/cron/index.d.ts.map +1 -0
- package/dist/cron/index.js +3 -0
- package/dist/cron/index.js.map +1 -0
- package/dist/cron/lock.d.ts +3 -0
- package/dist/cron/lock.d.ts.map +1 -0
- package/dist/cron/lock.js +15 -0
- package/dist/cron/lock.js.map +1 -0
- package/dist/cron/tasks/cleanup-audit-logs.d.ts +3 -0
- package/dist/cron/tasks/cleanup-audit-logs.d.ts.map +1 -0
- package/dist/cron/tasks/cleanup-audit-logs.js +23 -0
- package/dist/cron/tasks/cleanup-audit-logs.js.map +1 -0
- package/dist/cron/tasks/cleanup-jobs.d.ts +3 -0
- package/dist/cron/tasks/cleanup-jobs.d.ts.map +1 -0
- package/dist/cron/tasks/cleanup-jobs.js +23 -0
- package/dist/cron/tasks/cleanup-jobs.js.map +1 -0
- package/dist/cron/tasks/index.d.ts +3 -0
- package/dist/cron/tasks/index.d.ts.map +1 -0
- package/dist/cron/tasks/index.js +3 -0
- package/dist/cron/tasks/index.js.map +1 -0
- package/dist/executor.d.ts +18 -0
- package/dist/executor.d.ts.map +1 -0
- package/dist/executor.js +142 -0
- package/dist/executor.js.map +1 -0
- package/dist/index.d.ts +7 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +6 -0
- package/dist/index.js.map +1 -0
- package/dist/queue.d.ts +15 -0
- package/dist/queue.d.ts.map +1 -0
- package/dist/queue.js +27 -0
- package/dist/queue.js.map +1 -0
- package/package.json +48 -0
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
export interface WorkerJob {
|
|
2
|
+
id: number;
|
|
3
|
+
publicId: string;
|
|
4
|
+
name: string;
|
|
5
|
+
payload: string;
|
|
6
|
+
status: string;
|
|
7
|
+
attempt: number;
|
|
8
|
+
maxAttempts: number;
|
|
9
|
+
runAt: Date;
|
|
10
|
+
createdAt: Date;
|
|
11
|
+
startedAt: Date | null;
|
|
12
|
+
completedAt: Date | null;
|
|
13
|
+
result: string | null;
|
|
14
|
+
error: string | null;
|
|
15
|
+
backoffStrategy: string;
|
|
16
|
+
backoffDelay: number;
|
|
17
|
+
callbackUrl: string;
|
|
18
|
+
projectId?: number;
|
|
19
|
+
}
|
|
20
|
+
export type JobLogEvent = "enqueued" | "started" | "completed" | "failed" | "retrying" | "cancelled";
|
|
21
|
+
export interface JobLogEntry {
|
|
22
|
+
jobId: number;
|
|
23
|
+
projectId?: number;
|
|
24
|
+
event: JobLogEvent;
|
|
25
|
+
source?: "system" | "user";
|
|
26
|
+
metadata?: string | null;
|
|
27
|
+
}
|
|
28
|
+
export interface WorkerDb {
|
|
29
|
+
claimJobs(limit: number): Promise<WorkerJob[]>;
|
|
30
|
+
markRunning(jobId: number): Promise<void>;
|
|
31
|
+
markCompleted(jobId: number, result: string | null): Promise<void>;
|
|
32
|
+
markFailed(jobId: number, error: string): Promise<void>;
|
|
33
|
+
markRetrying(jobId: number, nextAttempt: number, runAt: Date, error: string): Promise<void>;
|
|
34
|
+
insertJobLog(entry: JobLogEntry): Promise<void>;
|
|
35
|
+
}
|
|
36
|
+
//# sourceMappingURL=adapter.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"adapter.d.ts","sourceRoot":"","sources":["../src/adapter.ts"],"names":[],"mappings":"AAAA,MAAM,WAAW,SAAS;IACxB,EAAE,EAAE,MAAM,CAAC;IACX,QAAQ,EAAE,MAAM,CAAC;IACjB,IAAI,EAAE,MAAM,CAAC;IACb,OAAO,EAAE,MAAM,CAAC;IAChB,MAAM,EAAE,MAAM,CAAC;IACf,OAAO,EAAE,MAAM,CAAC;IAChB,WAAW,EAAE,MAAM,CAAC;IACpB,KAAK,EAAE,IAAI,CAAC;IACZ,SAAS,EAAE,IAAI,CAAC;IAChB,SAAS,EAAE,IAAI,GAAG,IAAI,CAAC;IACvB,WAAW,EAAE,IAAI,GAAG,IAAI,CAAC;IACzB,MAAM,EAAE,MAAM,GAAG,IAAI,CAAC;IACtB,KAAK,EAAE,MAAM,GAAG,IAAI,CAAC;IACrB,eAAe,EAAE,MAAM,CAAC;IACxB,YAAY,EAAE,MAAM,CAAC;IACrB,WAAW,EAAE,MAAM,CAAC;IACpB,SAAS,CAAC,EAAE,MAAM,CAAC;CACpB;AAED,MAAM,MAAM,WAAW,GAAG,UAAU,GAAG,SAAS,GAAG,WAAW,GAAG,QAAQ,GAAG,UAAU,GAAG,WAAW,CAAC;AAErG,MAAM,WAAW,WAAW;IAC1B,KAAK,EAAE,MAAM,CAAC;IACd,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,KAAK,EAAE,WAAW,CAAC;IACnB,MAAM,CAAC,EAAE,QAAQ,GAAG,MAAM,CAAC;IAC3B,QAAQ,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;CAC1B;AAED,MAAM,WAAW,QAAQ;IACvB,SAAS,CAAC,KAAK,EAAE,MAAM,GAAG,OAAO,CAAC,SAAS,EAAE,CAAC,CAAC;IAC/C,WAAW,CAAC,KAAK,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;IAC1C,aAAa,CAAC,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,GAAG,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;IACnE,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;IACxD,YAAY,CAAC,KAAK,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;IAC5F,YAAY,CAAC,KAAK,EAAE,WAAW,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;CACjD"}
|
package/dist/adapter.js
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"adapter.js","sourceRoot":"","sources":["../src/adapter.ts"],"names":[],"mappings":""}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"postgres.d.ts","sourceRoot":"","sources":["../../src/adapters/postgres.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,wBAAwB,CAAC;AACzD,OAAO,KAAK,EAAE,QAAQ,EAA0B,MAAM,eAAe,CAAC;AAEtE,wBAAgB,sBAAsB,CAAC,EAAE,EAAE,UAAU,GAAG,QAAQ,CAkF/D"}
|
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
import { jobs, jobLogs, eq, sql } from '@queuebase/db/postgres';
|
|
2
|
+
export function createPostgresWorkerDb(db) {
|
|
3
|
+
return {
|
|
4
|
+
async claimJobs(limit) {
|
|
5
|
+
// Atomic claim: SELECT ... FOR UPDATE SKIP LOCKED + UPDATE in a single CTE
|
|
6
|
+
const rows = await db.execute(sql `
|
|
7
|
+
WITH claimable AS (
|
|
8
|
+
SELECT id FROM ${jobs}
|
|
9
|
+
WHERE ${jobs.status} = 'pending'
|
|
10
|
+
AND ${jobs.runAt} <= now()
|
|
11
|
+
ORDER BY ${jobs.runAt}
|
|
12
|
+
LIMIT ${limit}
|
|
13
|
+
FOR UPDATE SKIP LOCKED
|
|
14
|
+
),
|
|
15
|
+
claimed AS (
|
|
16
|
+
UPDATE ${jobs}
|
|
17
|
+
SET status = 'running', started_at = now()
|
|
18
|
+
FROM claimable
|
|
19
|
+
WHERE ${jobs.id} = claimable.id
|
|
20
|
+
RETURNING ${jobs.id}, ${jobs.publicId}, ${jobs.name}, ${jobs.payload},
|
|
21
|
+
${jobs.status}, ${jobs.attempt}, ${jobs.maxAttempts}, ${jobs.runAt},
|
|
22
|
+
${jobs.createdAt}, ${jobs.startedAt}, ${jobs.completedAt},
|
|
23
|
+
${jobs.result}, ${jobs.error}, ${jobs.backoffStrategy},
|
|
24
|
+
${jobs.backoffDelay}, ${jobs.callbackUrl}, ${jobs.projectId}
|
|
25
|
+
)
|
|
26
|
+
SELECT * FROM claimed
|
|
27
|
+
`);
|
|
28
|
+
return rows.map((row) => ({
|
|
29
|
+
id: row.id,
|
|
30
|
+
publicId: row.public_id,
|
|
31
|
+
name: row.name,
|
|
32
|
+
payload: row.payload,
|
|
33
|
+
status: row.status,
|
|
34
|
+
attempt: row.attempt,
|
|
35
|
+
maxAttempts: row.max_attempts,
|
|
36
|
+
runAt: new Date(row.run_at),
|
|
37
|
+
createdAt: new Date(row.created_at),
|
|
38
|
+
startedAt: row.started_at ? new Date(row.started_at) : null,
|
|
39
|
+
completedAt: row.completed_at ? new Date(row.completed_at) : null,
|
|
40
|
+
result: row.result ?? null,
|
|
41
|
+
error: row.error ?? null,
|
|
42
|
+
backoffStrategy: row.backoff_strategy,
|
|
43
|
+
backoffDelay: row.backoff_delay,
|
|
44
|
+
callbackUrl: row.callback_url,
|
|
45
|
+
projectId: row.project_id,
|
|
46
|
+
}));
|
|
47
|
+
},
|
|
48
|
+
// Already marked running in claimJobs CTE
|
|
49
|
+
async markRunning(_jobId) { },
|
|
50
|
+
async markCompleted(jobId, result) {
|
|
51
|
+
await db
|
|
52
|
+
.update(jobs)
|
|
53
|
+
.set({ status: 'completed', completedAt: new Date(), result })
|
|
54
|
+
.where(eq(jobs.id, jobId));
|
|
55
|
+
},
|
|
56
|
+
async markFailed(jobId, error) {
|
|
57
|
+
await db
|
|
58
|
+
.update(jobs)
|
|
59
|
+
.set({ status: 'failed', completedAt: new Date(), error })
|
|
60
|
+
.where(eq(jobs.id, jobId));
|
|
61
|
+
},
|
|
62
|
+
async markRetrying(jobId, nextAttempt, runAt, error) {
|
|
63
|
+
await db
|
|
64
|
+
.update(jobs)
|
|
65
|
+
.set({ status: 'pending', attempt: nextAttempt, runAt, error })
|
|
66
|
+
.where(eq(jobs.id, jobId));
|
|
67
|
+
},
|
|
68
|
+
async insertJobLog(entry) {
|
|
69
|
+
await db.insert(jobLogs).values({
|
|
70
|
+
jobId: entry.jobId,
|
|
71
|
+
projectId: entry.projectId ?? 0,
|
|
72
|
+
event: entry.event,
|
|
73
|
+
source: entry.source ?? 'system',
|
|
74
|
+
metadata: entry.metadata ?? null,
|
|
75
|
+
});
|
|
76
|
+
},
|
|
77
|
+
};
|
|
78
|
+
}
|
|
79
|
+
//# sourceMappingURL=postgres.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"postgres.js","sourceRoot":"","sources":["../../src/adapters/postgres.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,IAAI,EAAE,OAAO,EAAE,EAAE,EAAE,GAAG,EAAE,MAAM,wBAAwB,CAAC;AAIhE,MAAM,UAAU,sBAAsB,CAAC,EAAc;IACnD,OAAO;QACL,KAAK,CAAC,SAAS,CAAC,KAAa;YAC3B,2EAA2E;YAC3E,MAAM,IAAI,GAAG,MAAM,EAAE,CAAC,OAAO,CAAC,GAAG,CAAA;;2BAEZ,IAAI;kBACb,IAAI,CAAC,MAAM;kBACX,IAAI,CAAC,KAAK;qBACP,IAAI,CAAC,KAAK;kBACb,KAAK;;;;mBAIJ,IAAI;;;kBAGL,IAAI,CAAC,EAAE;sBACH,IAAI,CAAC,EAAE,KAAK,IAAI,CAAC,QAAQ,KAAK,IAAI,CAAC,IAAI,KAAK,IAAI,CAAC,OAAO;cAChE,IAAI,CAAC,MAAM,KAAK,IAAI,CAAC,OAAO,KAAK,IAAI,CAAC,WAAW,KAAK,IAAI,CAAC,KAAK;cAChE,IAAI,CAAC,SAAS,KAAK,IAAI,CAAC,SAAS,KAAK,IAAI,CAAC,WAAW;cACtD,IAAI,CAAC,MAAM,KAAK,IAAI,CAAC,KAAK,KAAK,IAAI,CAAC,eAAe;cACnD,IAAI,CAAC,YAAY,KAAK,IAAI,CAAC,WAAW,KAAK,IAAI,CAAC,SAAS;;;OAGhE,CAAC,CAAC;YAEH,OAAO,IAAI,CAAC,GAAG,CAAC,CAAC,GAA4B,EAAE,EAAE,CAAC,CAAC;gBACjD,EAAE,EAAE,GAAG,CAAC,EAAY;gBACpB,QAAQ,EAAE,GAAG,CAAC,SAAmB;gBACjC,IAAI,EAAE,GAAG,CAAC,IAAc;gBACxB,OAAO,EAAE,GAAG,CAAC,OAAiB;gBAC9B,MAAM,EAAE,GAAG,CAAC,MAAgB;gBAC5B,OAAO,EAAE,GAAG,CAAC,OAAiB;gBAC9B,WAAW,EAAE,GAAG,CAAC,YAAsB;gBACvC,KAAK,EAAE,IAAI,IAAI,CAAC,GAAG,CAAC,MAAgB,CAAC;gBACrC,SAAS,EAAE,IAAI,IAAI,CAAC,GAAG,CAAC,UAAoB,CAAC;gBAC7C,SAAS,EAAE,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC,GAAG,CAAC,UAAoB,CAAC,CAAC,CAAC,CAAC,IAAI;gBACrE,WAAW,EAAE,GAAG,CAAC,YAAY,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC,GAAG,CAAC,YAAsB,CAAC,CAAC,CAAC,CAAC,IAAI;gBAC3E,MAAM,EAAG,GAAG,CAAC,MAAwB,IAAI,IAAI;gBAC7C,KAAK,EAAG,GAAG,CAAC,KAAuB,IAAI,IAAI;gBAC3C,eAAe,EAAE,GAAG,CAAC,gBAA0B;gBAC/C,YAAY,EAAE,GAAG,CAAC,aAAuB;gBACzC,WAAW,EAAE,GAAG,CAAC,YAAsB;gBACvC,SAAS,EAAE,GAAG,CAAC,UAAoB;aACpC,CAAC,CAAC,CAAC;QACN,CAAC;QAED,0CAA0C;QAC1C,KAAK,CAAC,WAAW,CAAC,MAAc,IAAkB,CAAC;QAEnD,KAAK,CAAC,aAAa,CAAC,KAAa,EAAE,MAAqB;YACtD,MAAM,EAAE;iBACL,MAAM,CAAC,IAAI,CAAC;iBACZ,GAAG,CAAC,EAAE,MAAM,EAAE,WAAW,EAAE,WAAW,EAAE,IAAI,IAAI,EAAE,EAAE,MAAM,EAAE,CAAC;iBAC7D,KAAK,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,EAAE,KAAK,CAAC,CAAC,CAAC;QAC/B,CAAC;QAED,KAAK,CAAC,UAAU,CAAC,KAAa,EAAE,KAAa;YAC3C,MAAM,EAAE;iBACL,MAAM,CAAC,IAAI,CAAC;iBACZ,GAAG,CAAC,EAAE,MAAM,EAAE,QAAQ,EAAE,WAAW,EAAE,IAAI,IAAI,EAAE,EAAE,KAAK,EAAE,CAAC;iBACzD,KAAK,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,EAAE,KAAK,CAAC,CAAC,CAAC;QAC/B,CAAC;QAED,KAAK,CAAC,YAAY,CAAC,KAAa,EAAE,WAAmB,EAAE,KAAW,EAAE,KAAa;YAC/E,MAAM,EAAE;iBACL,MAAM,CAAC,IAAI,CAAC;iBACZ,GAAG,CAAC,EAAE,MAAM,EAAE,SAAS,EAAE,OAAO,EAAE,WAAW,EAAE,KAAK,EAAE,KAAK,EAAE,CAAC;iBAC9D,KAAK,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,EAAE,KAAK,CAAC,CAAC,CAAC;QAC/B,CAAC;QAED,KAAK,CAAC,YAAY,CAAC,KAAkB;YACnC,MAAM,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,MAAM,CAAC;gBAC9B,KAAK,EAAE,KAAK,CAAC,KAAK;gBAClB,SAAS,EAAE,KAAK,CAAC,SAAS,IAAI,CAAC;gBAC/B,KAAK,EAAE,KAAK,CAAC,KAAK;gBAClB,MAAM,EAAE,KAAK,CAAC,MAAM,IAAI,QAAQ;gBAChC,QAAQ,EAAE,KAAK,CAAC,QAAQ,IAAI,IAAI;aACjC,CAAC,CAAC;QACL,CAAC;KACF,CAAC;AACJ,CAAC"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"sqlite.d.ts","sourceRoot":"","sources":["../../src/adapters/sqlite.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,QAAQ,EAAiB,MAAM,sBAAsB,CAAC;AAEpE,OAAO,KAAK,EAAE,QAAQ,EAA0B,MAAM,eAAe,CAAC;AAEtE,wBAAgB,oBAAoB,CAAC,EAAE,EAAE,QAAQ,GAAG,QAAQ,CAsE3D"}
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
import { jobs, jobLogs } from '@queuebase/db/sqlite';
|
|
2
|
+
import { and, eq, lte } from 'drizzle-orm';
|
|
3
|
+
export function createSqliteWorkerDb(db) {
|
|
4
|
+
return {
|
|
5
|
+
async claimJobs(limit) {
|
|
6
|
+
const now = new Date();
|
|
7
|
+
const rows = db
|
|
8
|
+
.select()
|
|
9
|
+
.from(jobs)
|
|
10
|
+
.where(and(eq(jobs.status, 'pending'), lte(jobs.runAt, now)))
|
|
11
|
+
.limit(limit)
|
|
12
|
+
.all();
|
|
13
|
+
return rows.map((row) => ({
|
|
14
|
+
id: row.id,
|
|
15
|
+
publicId: row.publicId,
|
|
16
|
+
name: row.name,
|
|
17
|
+
payload: typeof row.payload === 'string' ? row.payload : JSON.stringify(row.payload),
|
|
18
|
+
status: row.status ?? 'pending',
|
|
19
|
+
attempt: row.attempt,
|
|
20
|
+
maxAttempts: row.maxAttempts,
|
|
21
|
+
runAt: row.runAt,
|
|
22
|
+
createdAt: row.createdAt,
|
|
23
|
+
startedAt: row.startedAt,
|
|
24
|
+
completedAt: row.completedAt,
|
|
25
|
+
result: row.result == null ? null : (typeof row.result === 'string' ? row.result : JSON.stringify(row.result)),
|
|
26
|
+
error: row.error,
|
|
27
|
+
backoffStrategy: row.backoffStrategy ?? 'exponential',
|
|
28
|
+
backoffDelay: row.backoffDelay,
|
|
29
|
+
callbackUrl: row.callbackUrl,
|
|
30
|
+
}));
|
|
31
|
+
},
|
|
32
|
+
async markRunning(jobId) {
|
|
33
|
+
db.update(jobs)
|
|
34
|
+
.set({ status: 'running', startedAt: new Date() })
|
|
35
|
+
.where(eq(jobs.id, jobId))
|
|
36
|
+
.run();
|
|
37
|
+
},
|
|
38
|
+
async markCompleted(jobId, result) {
|
|
39
|
+
db.update(jobs)
|
|
40
|
+
.set({ status: 'completed', completedAt: new Date(), result })
|
|
41
|
+
.where(eq(jobs.id, jobId))
|
|
42
|
+
.run();
|
|
43
|
+
},
|
|
44
|
+
async markFailed(jobId, error) {
|
|
45
|
+
db.update(jobs)
|
|
46
|
+
.set({ status: 'failed', completedAt: new Date(), error })
|
|
47
|
+
.where(eq(jobs.id, jobId))
|
|
48
|
+
.run();
|
|
49
|
+
},
|
|
50
|
+
async markRetrying(jobId, nextAttempt, runAt, error) {
|
|
51
|
+
db.update(jobs)
|
|
52
|
+
.set({ status: 'pending', attempt: nextAttempt, runAt, error })
|
|
53
|
+
.where(eq(jobs.id, jobId))
|
|
54
|
+
.run();
|
|
55
|
+
},
|
|
56
|
+
async insertJobLog(entry) {
|
|
57
|
+
db.insert(jobLogs)
|
|
58
|
+
.values({
|
|
59
|
+
jobId: entry.jobId,
|
|
60
|
+
event: entry.event,
|
|
61
|
+
source: entry.source ?? 'system',
|
|
62
|
+
metadata: entry.metadata ?? null,
|
|
63
|
+
})
|
|
64
|
+
.run();
|
|
65
|
+
},
|
|
66
|
+
};
|
|
67
|
+
}
|
|
68
|
+
//# sourceMappingURL=sqlite.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"sqlite.js","sourceRoot":"","sources":["../../src/adapters/sqlite.ts"],"names":[],"mappings":"AAAA,OAAO,EAAiB,IAAI,EAAE,OAAO,EAAE,MAAM,sBAAsB,CAAC;AACpE,OAAO,EAAE,GAAG,EAAE,EAAE,EAAE,GAAG,EAAE,MAAM,aAAa,CAAC;AAG3C,MAAM,UAAU,oBAAoB,CAAC,EAAY;IAC/C,OAAO;QACL,KAAK,CAAC,SAAS,CAAC,KAAa;YAC3B,MAAM,GAAG,GAAG,IAAI,IAAI,EAAE,CAAC;YACvB,MAAM,IAAI,GAAG,EAAE;iBACZ,MAAM,EAAE;iBACR,IAAI,CAAC,IAAI,CAAC;iBACV,KAAK,CAAC,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC,EAAE,GAAG,CAAC,IAAI,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;iBAC5D,KAAK,CAAC,KAAK,CAAC;iBACZ,GAAG,EAAE,CAAC;YAET,OAAO,IAAI,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,CAAC;gBACxB,EAAE,EAAE,GAAG,CAAC,EAAE;gBACV,QAAQ,EAAE,GAAG,CAAC,QAAQ;gBACtB,IAAI,EAAE,GAAG,CAAC,IAAI;gBACd,OAAO,EAAE,OAAO,GAAG,CAAC,OAAO,KAAK,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,OAAO,CAAC;gBACpF,MAAM,EAAE,GAAG,CAAC,MAAM,IAAI,SAAS;gBAC/B,OAAO,EAAE,GAAG,CAAC,OAAO;gBACpB,WAAW,EAAE,GAAG,CAAC,WAAW;gBAC5B,KAAK,EAAE,GAAG,CAAC,KAAK;gBAChB,SAAS,EAAE,GAAG,CAAC,SAAS;gBACxB,SAAS,EAAE,GAAG,CAAC,SAAS;gBACxB,WAAW,EAAE,GAAG,CAAC,WAAW;gBAC5B,MAAM,EAAE,GAAG,CAAC,MAAM,IAAI,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,OAAO,GAAG,CAAC,MAAM,KAAK,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;gBAC9G,KAAK,EAAE,GAAG,CAAC,KAAK;gBAChB,eAAe,EAAE,GAAG,CAAC,eAAe,IAAI,aAAa;gBACrD,YAAY,EAAE,GAAG,CAAC,YAAY;gBAC9B,WAAW,EAAE,GAAG,CAAC,WAAW;aAC7B,CAAC,CAAC,CAAC;QACN,CAAC;QAED,KAAK,CAAC,WAAW,CAAC,KAAa;YAC7B,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC;iBACZ,GAAG,CAAC,EAAE,MAAM,EAAE,SAAS,EAAE,SAAS,EAAE,IAAI,IAAI,EAAE,EAAE,CAAC;iBACjD,KAAK,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,EAAE,KAAK,CAAC,CAAC;iBACzB,GAAG,EAAE,CAAC;QACX,CAAC;QAED,KAAK,CAAC,aAAa,CAAC,KAAa,EAAE,MAAqB;YACtD,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC;iBACZ,GAAG,CAAC,EAAE,MAAM,EAAE,WAAW,EAAE,WAAW,EAAE,IAAI,IAAI,EAAE,EAAE,MAAM,EAAE,CAAC;iBAC7D,KAAK,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,EAAE,KAAK,CAAC,CAAC;iBACzB,GAAG,EAAE,CAAC;QACX,CAAC;QAED,KAAK,CAAC,UAAU,CAAC,KAAa,EAAE,KAAa;YAC3C,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC;iBACZ,GAAG,CAAC,EAAE,MAAM,EAAE,QAAQ,EAAE,WAAW,EAAE,IAAI,IAAI,EAAE,EAAE,KAAK,EAAE,CAAC;iBACzD,KAAK,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,EAAE,KAAK,CAAC,CAAC;iBACzB,GAAG,EAAE,CAAC;QACX,CAAC;QAED,KAAK,CAAC,YAAY,CAAC,KAAa,EAAE,WAAmB,EAAE,KAAW,EAAE,KAAa;YAC/E,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC;iBACZ,GAAG,CAAC,EAAE,MAAM,EAAE,SAAS,EAAE,OAAO,EAAE,WAAW,EAAE,KAAK,EAAE,KAAK,EAAE,CAAC;iBAC9D,KAAK,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,EAAE,KAAK,CAAC,CAAC;iBACzB,GAAG,EAAE,CAAC;QACX,CAAC;QAED,KAAK,CAAC,YAAY,CAAC,KAAkB;YACnC,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC;iBACf,MAAM,CAAC;gBACN,KAAK,EAAE,KAAK,CAAC,KAAK;gBAClB,KAAK,EAAE,KAAK,CAAC,KAAK;gBAClB,MAAM,EAAE,KAAK,CAAC,MAAM,IAAI,QAAQ;gBAChC,QAAQ,EAAE,KAAK,CAAC,QAAQ,IAAI,IAAI;aACjC,CAAC;iBACD,GAAG,EAAE,CAAC;QACX,CAAC;KACF,CAAC;AACJ,CAAC"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/cron/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,eAAe,EAAE,MAAM,WAAW,CAAC;AAC5C,OAAO,EAAE,kBAAkB,EAAE,uBAAuB,EAAE,MAAM,kBAAkB,CAAC"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/cron/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,eAAe,EAAE,MAAM,WAAW,CAAC;AAC5C,OAAO,EAAE,kBAAkB,EAAE,uBAAuB,EAAE,MAAM,kBAAkB,CAAC"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"lock.d.ts","sourceRoot":"","sources":["../../src/cron/lock.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,wBAAwB,CAAC;AAEzD,wBAAsB,eAAe,CACnC,EAAE,EAAE,UAAU,EACd,QAAQ,EAAE,MAAM,EAChB,UAAU,EAAE,MAAM,GACjB,OAAO,CAAC,OAAO,CAAC,CAclB"}
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import { cronLocks, sql } from '@queuebase/db/postgres';
|
|
2
|
+
export async function acquireCronLock(db, taskName, durationMs) {
|
|
3
|
+
const lockedUntil = new Date(Date.now() + durationMs);
|
|
4
|
+
const result = await db
|
|
5
|
+
.insert(cronLocks)
|
|
6
|
+
.values({ taskName, lockedUntil })
|
|
7
|
+
.onConflictDoUpdate({
|
|
8
|
+
target: cronLocks.taskName,
|
|
9
|
+
set: { lockedUntil },
|
|
10
|
+
setWhere: sql `${cronLocks.lockedUntil} < now()`,
|
|
11
|
+
})
|
|
12
|
+
.returning();
|
|
13
|
+
return result.length > 0;
|
|
14
|
+
}
|
|
15
|
+
//# sourceMappingURL=lock.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"lock.js","sourceRoot":"","sources":["../../src/cron/lock.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,SAAS,EAAE,GAAG,EAAE,MAAM,wBAAwB,CAAC;AAGxD,MAAM,CAAC,KAAK,UAAU,eAAe,CACnC,EAAc,EACd,QAAgB,EAChB,UAAkB;IAElB,MAAM,WAAW,GAAG,IAAI,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,UAAU,CAAC,CAAC;IAEtD,MAAM,MAAM,GAAG,MAAM,EAAE;SACpB,MAAM,CAAC,SAAS,CAAC;SACjB,MAAM,CAAC,EAAE,QAAQ,EAAE,WAAW,EAAE,CAAC;SACjC,kBAAkB,CAAC;QAClB,MAAM,EAAE,SAAS,CAAC,QAAQ;QAC1B,GAAG,EAAE,EAAE,WAAW,EAAE;QACpB,QAAQ,EAAE,GAAG,CAAA,GAAG,SAAS,CAAC,WAAW,UAAU;KAChD,CAAC;SACD,SAAS,EAAE,CAAC;IAEf,OAAO,MAAM,CAAC,MAAM,GAAG,CAAC,CAAC;AAC3B,CAAC"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"cleanup-audit-logs.d.ts","sourceRoot":"","sources":["../../../src/cron/tasks/cleanup-audit-logs.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,wBAAwB,CAAC;AAKzD,wBAAsB,uBAAuB,CAAC,EAAE,EAAE,UAAU,GAAG,OAAO,CAAC,MAAM,CAAC,CAsB7E"}
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
import { auditLogs, sql, inArray } from '@queuebase/db/postgres';
|
|
2
|
+
const BATCH_SIZE = 1000;
|
|
3
|
+
const RETENTION_DAYS = 90;
|
|
4
|
+
export async function cleanupExpiredAuditLogs(db) {
|
|
5
|
+
const cutoff = new Date(Date.now() - RETENTION_DAYS * 24 * 60 * 60 * 1000);
|
|
6
|
+
let totalDeleted = 0;
|
|
7
|
+
while (true) {
|
|
8
|
+
const batch = await db
|
|
9
|
+
.select({ id: auditLogs.id })
|
|
10
|
+
.from(auditLogs)
|
|
11
|
+
.where(sql `${auditLogs.createdAt} < ${cutoff}`)
|
|
12
|
+
.limit(BATCH_SIZE);
|
|
13
|
+
if (batch.length === 0)
|
|
14
|
+
break;
|
|
15
|
+
const ids = batch.map((row) => row.id);
|
|
16
|
+
await db.delete(auditLogs).where(inArray(auditLogs.id, ids));
|
|
17
|
+
totalDeleted += ids.length;
|
|
18
|
+
if (batch.length < BATCH_SIZE)
|
|
19
|
+
break;
|
|
20
|
+
}
|
|
21
|
+
return totalDeleted;
|
|
22
|
+
}
|
|
23
|
+
//# sourceMappingURL=cleanup-audit-logs.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"cleanup-audit-logs.js","sourceRoot":"","sources":["../../../src/cron/tasks/cleanup-audit-logs.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,SAAS,EAAE,GAAG,EAAE,OAAO,EAAE,MAAM,wBAAwB,CAAC;AAGjE,MAAM,UAAU,GAAG,IAAI,CAAC;AACxB,MAAM,cAAc,GAAG,EAAE,CAAC;AAE1B,MAAM,CAAC,KAAK,UAAU,uBAAuB,CAAC,EAAc;IAC1D,MAAM,MAAM,GAAG,IAAI,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,cAAc,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,IAAI,CAAC,CAAC;IAC3E,IAAI,YAAY,GAAG,CAAC,CAAC;IAErB,OAAO,IAAI,EAAE,CAAC;QACZ,MAAM,KAAK,GAAG,MAAM,EAAE;aACnB,MAAM,CAAC,EAAE,EAAE,EAAE,SAAS,CAAC,EAAE,EAAE,CAAC;aAC5B,IAAI,CAAC,SAAS,CAAC;aACf,KAAK,CAAC,GAAG,CAAA,GAAG,SAAS,CAAC,SAAS,MAAM,MAAM,EAAE,CAAC;aAC9C,KAAK,CAAC,UAAU,CAAC,CAAC;QAErB,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC;YAAE,MAAM;QAE9B,MAAM,GAAG,GAAG,KAAK,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;QACvC,MAAM,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,EAAE,EAAE,GAAG,CAAC,CAAC,CAAC;QAE7D,YAAY,IAAI,GAAG,CAAC,MAAM,CAAC;QAE3B,IAAI,KAAK,CAAC,MAAM,GAAG,UAAU;YAAE,MAAM;IACvC,CAAC;IAED,OAAO,YAAY,CAAC;AACtB,CAAC"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"cleanup-jobs.d.ts","sourceRoot":"","sources":["../../../src/cron/tasks/cleanup-jobs.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,wBAAwB,CAAC;AAKzD,wBAAsB,kBAAkB,CAAC,EAAE,EAAE,UAAU,GAAG,OAAO,CAAC,MAAM,CAAC,CAwBxE"}
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
import { jobs, sql, inArray } from '@queuebase/db/postgres';
|
|
2
|
+
const BATCH_SIZE = 1000;
|
|
3
|
+
const RETENTION_DAYS = 30;
|
|
4
|
+
export async function cleanupExpiredJobs(db) {
|
|
5
|
+
const cutoff = new Date(Date.now() - RETENTION_DAYS * 24 * 60 * 60 * 1000);
|
|
6
|
+
let totalDeleted = 0;
|
|
7
|
+
while (true) {
|
|
8
|
+
const batch = await db
|
|
9
|
+
.select({ id: jobs.id })
|
|
10
|
+
.from(jobs)
|
|
11
|
+
.where(sql `${jobs.createdAt} < ${cutoff} AND ${jobs.status} IN ('completed', 'failed', 'cancelled')`)
|
|
12
|
+
.limit(BATCH_SIZE);
|
|
13
|
+
if (batch.length === 0)
|
|
14
|
+
break;
|
|
15
|
+
const ids = batch.map((row) => row.id);
|
|
16
|
+
await db.delete(jobs).where(inArray(jobs.id, ids));
|
|
17
|
+
totalDeleted += ids.length;
|
|
18
|
+
if (batch.length < BATCH_SIZE)
|
|
19
|
+
break;
|
|
20
|
+
}
|
|
21
|
+
return totalDeleted;
|
|
22
|
+
}
|
|
23
|
+
//# sourceMappingURL=cleanup-jobs.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"cleanup-jobs.js","sourceRoot":"","sources":["../../../src/cron/tasks/cleanup-jobs.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,IAAI,EAAE,GAAG,EAAE,OAAO,EAAE,MAAM,wBAAwB,CAAC;AAG5D,MAAM,UAAU,GAAG,IAAI,CAAC;AACxB,MAAM,cAAc,GAAG,EAAE,CAAC;AAE1B,MAAM,CAAC,KAAK,UAAU,kBAAkB,CAAC,EAAc;IACrD,MAAM,MAAM,GAAG,IAAI,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,cAAc,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,IAAI,CAAC,CAAC;IAC3E,IAAI,YAAY,GAAG,CAAC,CAAC;IAErB,OAAO,IAAI,EAAE,CAAC;QACZ,MAAM,KAAK,GAAG,MAAM,EAAE;aACnB,MAAM,CAAC,EAAE,EAAE,EAAE,IAAI,CAAC,EAAE,EAAE,CAAC;aACvB,IAAI,CAAC,IAAI,CAAC;aACV,KAAK,CACJ,GAAG,CAAA,GAAG,IAAI,CAAC,SAAS,MAAM,MAAM,QAAQ,IAAI,CAAC,MAAM,0CAA0C,CAC9F;aACA,KAAK,CAAC,UAAU,CAAC,CAAC;QAErB,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC;YAAE,MAAM;QAE9B,MAAM,GAAG,GAAG,KAAK,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;QACvC,MAAM,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,EAAE,EAAE,GAAG,CAAC,CAAC,CAAC;QAEnD,YAAY,IAAI,GAAG,CAAC,MAAM,CAAC;QAE3B,IAAI,KAAK,CAAC,MAAM,GAAG,UAAU;YAAE,MAAM;IACvC,CAAC;IAED,OAAO,YAAY,CAAC;AACtB,CAAC"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/cron/tasks/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,kBAAkB,EAAE,MAAM,mBAAmB,CAAC;AACvD,OAAO,EAAE,uBAAuB,EAAE,MAAM,yBAAyB,CAAC"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/cron/tasks/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,kBAAkB,EAAE,MAAM,mBAAmB,CAAC;AACvD,OAAO,EAAE,uBAAuB,EAAE,MAAM,yBAAyB,CAAC"}
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import type { WorkerDb } from './adapter.js';
|
|
2
|
+
export interface ExecutionResult {
|
|
3
|
+
success: boolean;
|
|
4
|
+
output?: unknown;
|
|
5
|
+
error?: string;
|
|
6
|
+
duration: number;
|
|
7
|
+
}
|
|
8
|
+
export interface ExecutorOptions {
|
|
9
|
+
pollInterval?: number;
|
|
10
|
+
concurrency?: number;
|
|
11
|
+
webhookSecret?: string;
|
|
12
|
+
}
|
|
13
|
+
export interface JobExecutorHandle {
|
|
14
|
+
start(): void;
|
|
15
|
+
stop(): void;
|
|
16
|
+
}
|
|
17
|
+
export declare function createJobExecutor(workerDb: WorkerDb, options?: ExecutorOptions): JobExecutorHandle;
|
|
18
|
+
//# sourceMappingURL=executor.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"executor.d.ts","sourceRoot":"","sources":["../src/executor.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,QAAQ,EAAa,MAAM,cAAc,CAAC;AAExD,MAAM,WAAW,eAAe;IAC9B,OAAO,EAAE,OAAO,CAAC;IACjB,MAAM,CAAC,EAAE,OAAO,CAAC;IACjB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,QAAQ,EAAE,MAAM,CAAC;CAClB;AAED,MAAM,WAAW,eAAe;IAC9B,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,aAAa,CAAC,EAAE,MAAM,CAAC;CACxB;AAED,MAAM,WAAW,iBAAiB;IAChC,KAAK,IAAI,IAAI,CAAC;IACd,IAAI,IAAI,IAAI,CAAC;CACd;AAED,wBAAgB,iBAAiB,CAC/B,QAAQ,EAAE,QAAQ,EAClB,OAAO,GAAE,eAAoB,GAC5B,iBAAiB,CA6JnB"}
|
package/dist/executor.js
ADDED
|
@@ -0,0 +1,142 @@
|
|
|
1
|
+
import { calculateBackoff, signPayload, WEBHOOK_HEADERS } from '@queuebase/core';
|
|
2
|
+
export function createJobExecutor(workerDb, options = {}) {
|
|
3
|
+
const pollInterval = options.pollInterval ?? 1000;
|
|
4
|
+
const concurrency = options.concurrency ?? 5;
|
|
5
|
+
const webhookSecret = options.webhookSecret;
|
|
6
|
+
let running = false;
|
|
7
|
+
let activeJobs = 0;
|
|
8
|
+
let pollTimer = null;
|
|
9
|
+
async function poll() {
|
|
10
|
+
if (!running)
|
|
11
|
+
return;
|
|
12
|
+
if (activeJobs >= concurrency)
|
|
13
|
+
return;
|
|
14
|
+
const slotsAvailable = concurrency - activeJobs;
|
|
15
|
+
const pendingJobs = await workerDb.claimJobs(slotsAvailable);
|
|
16
|
+
for (const job of pendingJobs) {
|
|
17
|
+
executeJob(job);
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
async function executeJob(job) {
|
|
21
|
+
activeJobs++;
|
|
22
|
+
await workerDb.markRunning(job.id);
|
|
23
|
+
await workerDb.insertJobLog({
|
|
24
|
+
jobId: job.id,
|
|
25
|
+
projectId: job.projectId,
|
|
26
|
+
event: 'started',
|
|
27
|
+
});
|
|
28
|
+
const startTime = Date.now();
|
|
29
|
+
try {
|
|
30
|
+
const result = await callbackExecute(job);
|
|
31
|
+
const duration = Date.now() - startTime;
|
|
32
|
+
if (result.success) {
|
|
33
|
+
const resultStr = result.output != null ? JSON.stringify(result.output) : null;
|
|
34
|
+
await workerDb.markCompleted(job.id, resultStr);
|
|
35
|
+
await workerDb.insertJobLog({
|
|
36
|
+
jobId: job.id,
|
|
37
|
+
projectId: job.projectId,
|
|
38
|
+
event: 'completed',
|
|
39
|
+
metadata: JSON.stringify({ duration }),
|
|
40
|
+
});
|
|
41
|
+
}
|
|
42
|
+
else {
|
|
43
|
+
await handleFailure(job, result.error ?? 'Unknown error', duration);
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
catch (error) {
|
|
47
|
+
const duration = Date.now() - startTime;
|
|
48
|
+
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
|
49
|
+
await handleFailure(job, errorMessage, duration);
|
|
50
|
+
}
|
|
51
|
+
finally {
|
|
52
|
+
activeJobs--;
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
async function handleFailure(job, error, duration) {
|
|
56
|
+
const nextAttempt = job.attempt + 1;
|
|
57
|
+
if (nextAttempt <= job.maxAttempts) {
|
|
58
|
+
const backoffMs = calculateBackoff(job.attempt, job.backoffStrategy, job.backoffDelay);
|
|
59
|
+
const runAt = new Date(Date.now() + backoffMs);
|
|
60
|
+
await workerDb.markRetrying(job.id, nextAttempt, runAt, error);
|
|
61
|
+
await workerDb.insertJobLog({
|
|
62
|
+
jobId: job.id,
|
|
63
|
+
projectId: job.projectId,
|
|
64
|
+
event: 'retrying',
|
|
65
|
+
metadata: JSON.stringify({ error, attempt: nextAttempt, duration, nextRunAt: runAt.toISOString() }),
|
|
66
|
+
});
|
|
67
|
+
}
|
|
68
|
+
else {
|
|
69
|
+
await workerDb.markFailed(job.id, error);
|
|
70
|
+
await workerDb.insertJobLog({
|
|
71
|
+
jobId: job.id,
|
|
72
|
+
projectId: job.projectId,
|
|
73
|
+
event: 'failed',
|
|
74
|
+
metadata: JSON.stringify({ error, duration }),
|
|
75
|
+
});
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
async function callbackExecute(job) {
|
|
79
|
+
const startTime = Date.now();
|
|
80
|
+
const body = JSON.stringify({
|
|
81
|
+
jobId: job.publicId,
|
|
82
|
+
name: job.name,
|
|
83
|
+
payload: JSON.parse(job.payload),
|
|
84
|
+
attempt: job.attempt,
|
|
85
|
+
maxAttempts: job.maxAttempts,
|
|
86
|
+
});
|
|
87
|
+
const headers = {
|
|
88
|
+
'Content-Type': 'application/json',
|
|
89
|
+
'X-Queuebase-Job-Id': job.publicId,
|
|
90
|
+
'X-Queuebase-Job-Name': job.name,
|
|
91
|
+
'X-Queuebase-Attempt': job.attempt.toString(),
|
|
92
|
+
};
|
|
93
|
+
if (webhookSecret) {
|
|
94
|
+
const { signature } = signPayload(body, webhookSecret);
|
|
95
|
+
headers[WEBHOOK_HEADERS.SIGNATURE] = signature;
|
|
96
|
+
}
|
|
97
|
+
const response = await fetch(job.callbackUrl, {
|
|
98
|
+
method: 'POST',
|
|
99
|
+
headers,
|
|
100
|
+
body,
|
|
101
|
+
});
|
|
102
|
+
const duration = Date.now() - startTime;
|
|
103
|
+
if (!response.ok) {
|
|
104
|
+
const errorText = await response.text().catch(() => 'Failed to read response');
|
|
105
|
+
return {
|
|
106
|
+
success: false,
|
|
107
|
+
error: `Callback failed with status ${response.status}: ${errorText}`,
|
|
108
|
+
duration,
|
|
109
|
+
};
|
|
110
|
+
}
|
|
111
|
+
const result = (await response.json().catch(() => ({})));
|
|
112
|
+
if (result.success === false) {
|
|
113
|
+
return {
|
|
114
|
+
success: false,
|
|
115
|
+
error: result.error ?? 'Job handler returned failure',
|
|
116
|
+
duration,
|
|
117
|
+
};
|
|
118
|
+
}
|
|
119
|
+
return {
|
|
120
|
+
success: true,
|
|
121
|
+
output: result,
|
|
122
|
+
duration,
|
|
123
|
+
};
|
|
124
|
+
}
|
|
125
|
+
return {
|
|
126
|
+
start() {
|
|
127
|
+
if (running)
|
|
128
|
+
return;
|
|
129
|
+
running = true;
|
|
130
|
+
poll();
|
|
131
|
+
pollTimer = setInterval(() => poll(), pollInterval);
|
|
132
|
+
},
|
|
133
|
+
stop() {
|
|
134
|
+
running = false;
|
|
135
|
+
if (pollTimer) {
|
|
136
|
+
clearInterval(pollTimer);
|
|
137
|
+
pollTimer = null;
|
|
138
|
+
}
|
|
139
|
+
},
|
|
140
|
+
};
|
|
141
|
+
}
|
|
142
|
+
//# sourceMappingURL=executor.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"executor.js","sourceRoot":"","sources":["../src/executor.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,gBAAgB,EAAE,WAAW,EAAE,eAAe,EAAE,MAAM,iBAAiB,CAAC;AAqBjF,MAAM,UAAU,iBAAiB,CAC/B,QAAkB,EAClB,UAA2B,EAAE;IAE7B,MAAM,YAAY,GAAG,OAAO,CAAC,YAAY,IAAI,IAAI,CAAC;IAClD,MAAM,WAAW,GAAG,OAAO,CAAC,WAAW,IAAI,CAAC,CAAC;IAC7C,MAAM,aAAa,GAAG,OAAO,CAAC,aAAa,CAAC;IAE5C,IAAI,OAAO,GAAG,KAAK,CAAC;IACpB,IAAI,UAAU,GAAG,CAAC,CAAC;IACnB,IAAI,SAAS,GAA0C,IAAI,CAAC;IAE5D,KAAK,UAAU,IAAI;QACjB,IAAI,CAAC,OAAO;YAAE,OAAO;QACrB,IAAI,UAAU,IAAI,WAAW;YAAE,OAAO;QAEtC,MAAM,cAAc,GAAG,WAAW,GAAG,UAAU,CAAC;QAEhD,MAAM,WAAW,GAAG,MAAM,QAAQ,CAAC,SAAS,CAAC,cAAc,CAAC,CAAC;QAE7D,KAAK,MAAM,GAAG,IAAI,WAAW,EAAE,CAAC;YAC9B,UAAU,CAAC,GAAG,CAAC,CAAC;QAClB,CAAC;IACH,CAAC;IAED,KAAK,UAAU,UAAU,CAAC,GAAc;QACtC,UAAU,EAAE,CAAC;QAEb,MAAM,QAAQ,CAAC,WAAW,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;QAEnC,MAAM,QAAQ,CAAC,YAAY,CAAC;YAC1B,KAAK,EAAE,GAAG,CAAC,EAAE;YACb,SAAS,EAAE,GAAG,CAAC,SAAS;YACxB,KAAK,EAAE,SAAS;SACjB,CAAC,CAAC;QAEH,MAAM,SAAS,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;QAE7B,IAAI,CAAC;YACH,MAAM,MAAM,GAAG,MAAM,eAAe,CAAC,GAAG,CAAC,CAAC;YAC1C,MAAM,QAAQ,GAAG,IAAI,CAAC,GAAG,EAAE,GAAG,SAAS,CAAC;YAExC,IAAI,MAAM,CAAC,OAAO,EAAE,CAAC;gBACnB,MAAM,SAAS,GAAG,MAAM,CAAC,MAAM,IAAI,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;gBAC/E,MAAM,QAAQ,CAAC,aAAa,CAAC,GAAG,CAAC,EAAE,EAAE,SAAS,CAAC,CAAC;gBAChD,MAAM,QAAQ,CAAC,YAAY,CAAC;oBAC1B,KAAK,EAAE,GAAG,CAAC,EAAE;oBACb,SAAS,EAAE,GAAG,CAAC,SAAS;oBACxB,KAAK,EAAE,WAAW;oBAClB,QAAQ,EAAE,IAAI,CAAC,SAAS,CAAC,EAAE,QAAQ,EAAE,CAAC;iBACvC,CAAC,CAAC;YACL,CAAC;iBAAM,CAAC;gBACN,MAAM,aAAa,CAAC,GAAG,EAAE,MAAM,CAAC,KAAK,IAAI,eAAe,EAAE,QAAQ,CAAC,CAAC;YACtE,CAAC;QACH,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACf,MAAM,QAAQ,GAAG,IAAI,CAAC,GAAG,EAAE,GAAG,SAAS,CAAC;YACxC,MAAM,YAAY,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,eAAe,CAAC;YAC9E,MAAM,aAAa,CAAC,GAAG,EAAE,YAAY,EAAE,QAAQ,CAAC,CAAC;QACnD,CAAC;gBAAS,CAAC;YACT,UAAU,EAAE,CAAC;QACf,CAAC;IACH,CAAC;IAED,KAAK,UAAU,aAAa,CAAC,GAAc,EAAE,KAAa,EAAE,QAAgB;QAC1E,MAAM,WAAW,GAAG,GAAG,CAAC,OAAO,GAAG,CAAC,CAAC;QAEpC,IAAI,WAAW,IAAI,GAAG,CAAC,WAAW,EAAE,CAAC;YACnC,MAAM,SAAS,GAAG,gBAAgB,CAAC,GAAG,CAAC,OAAO,EAAE,GAAG,CAAC,eAA2C,EAAE,GAAG,CAAC,YAAY,CAAC,CAAC;YACnH,MAAM,KAAK,GAAG,IAAI,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,SAAS,CAAC,CAAC;YAE/C,MAAM,QAAQ,CAAC,YAAY,CAAC,GAAG,CAAC,EAAE,EAAE,WAAW,EAAE,KAAK,EAAE,KAAK,CAAC,CAAC;YAC/D,MAAM,QAAQ,CAAC,YAAY,CAAC;gBAC1B,KAAK,EAAE,GAAG,CAAC,EAAE;gBACb,SAAS,EAAE,GAAG,CAAC,SAAS;gBACxB,KAAK,EAAE,UAAU;gBACjB,QAAQ,EAAE,IAAI,CAAC,SAAS,CAAC,EAAE,KAAK,EAAE,OAAO,EAAE,WAAW,EAAE,QAAQ,EAAE,SAAS,EAAE,KAAK,CAAC,WAAW,EAAE,EAAE,CAAC;aACpG,CAAC,CAAC;QACL,CAAC;aAAM,CAAC;YACN,MAAM,QAAQ,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE,EAAE,KAAK,CAAC,CAAC;YACzC,MAAM,QAAQ,CAAC,YAAY,CAAC;gBAC1B,KAAK,EAAE,GAAG,CAAC,EAAE;gBACb,SAAS,EAAE,GAAG,CAAC,SAAS;gBACxB,KAAK,EAAE,QAAQ;gBACf,QAAQ,EAAE,IAAI,CAAC,SAAS,CAAC,EAAE,KAAK,EAAE,QAAQ,EAAE,CAAC;aAC9C,CAAC,CAAC;QACL,CAAC;IACH,CAAC;IAED,KAAK,UAAU,eAAe,CAAC,GAAc;QAC3C,MAAM,SAAS,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;QAE7B,MAAM,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC;YAC1B,KAAK,EAAE,GAAG,CAAC,QAAQ;YACnB,IAAI,EAAE,GAAG,CAAC,IAAI;YACd,OAAO,EAAE,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,OAAO,CAAC;YAChC,OAAO,EAAE,GAAG,CAAC,OAAO;YACpB,WAAW,EAAE,GAAG,CAAC,WAAW;SAC7B,CAAC,CAAC;QAEH,MAAM,OAAO,GAA2B;YACtC,cAAc,EAAE,kBAAkB;YAClC,oBAAoB,EAAE,GAAG,CAAC,QAAQ;YAClC,sBAAsB,EAAE,GAAG,CAAC,IAAI;YAChC,qBAAqB,EAAE,GAAG,CAAC,OAAO,CAAC,QAAQ,EAAE;SAC9C,CAAC;QAEF,IAAI,aAAa,EAAE,CAAC;YAClB,MAAM,EAAE,SAAS,EAAE,GAAG,WAAW,CAAC,IAAI,EAAE,aAAa,CAAC,CAAC;YACvD,OAAO,CAAC,eAAe,CAAC,SAAS,CAAC,GAAG,SAAS,CAAC;QACjD,CAAC;QAED,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,GAAG,CAAC,WAAW,EAAE;YAC5C,MAAM,EAAE,MAAM;YACd,OAAO;YACP,IAAI;SACL,CAAC,CAAC;QAEH,MAAM,QAAQ,GAAG,IAAI,CAAC,GAAG,EAAE,GAAG,SAAS,CAAC;QAExC,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE,CAAC;YACjB,MAAM,SAAS,GAAG,MAAM,QAAQ,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC,yBAAyB,CAAC,CAAC;YAC/E,OAAO;gBACL,OAAO,EAAE,KAAK;gBACd,KAAK,EAAE,+BAA+B,QAAQ,CAAC,MAAM,KAAK,SAAS,EAAE;gBACrE,QAAQ;aACT,CAAC;QACJ,CAAC;QAED,MAAM,MAAM,GAAG,CAAC,MAAM,QAAQ,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAA4D,CAAC;QAEpH,IAAI,MAAM,CAAC,OAAO,KAAK,KAAK,EAAE,CAAC;YAC7B,OAAO;gBACL,OAAO,EAAE,KAAK;gBACd,KAAK,EAAE,MAAM,CAAC,KAAK,IAAI,8BAA8B;gBACrD,QAAQ;aACT,CAAC;QACJ,CAAC;QAED,OAAO;YACL,OAAO,EAAE,IAAI;YACb,MAAM,EAAE,MAAM;YACd,QAAQ;SACT,CAAC;IACJ,CAAC;IAED,OAAO;QACL,KAAK;YACH,IAAI,OAAO;gBAAE,OAAO;YACpB,OAAO,GAAG,IAAI,CAAC;YACf,IAAI,EAAE,CAAC;YACP,SAAS,GAAG,WAAW,CAAC,GAAG,EAAE,CAAC,IAAI,EAAE,EAAE,YAAY,CAAC,CAAC;QACtD,CAAC;QACD,IAAI;YACF,OAAO,GAAG,KAAK,CAAC;YAChB,IAAI,SAAS,EAAE,CAAC;gBACd,aAAa,CAAC,SAAS,CAAC,CAAC;gBACzB,SAAS,GAAG,IAAI,CAAC;YACnB,CAAC;QACH,CAAC;KACF,CAAC;AACJ,CAAC"}
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
export { enqueueJob, type EnqueueParams } from './queue.js';
|
|
2
|
+
export { createJobExecutor, type ExecutionResult, type ExecutorOptions, type JobExecutorHandle } from './executor.js';
|
|
3
|
+
export type { WorkerDb, WorkerJob, JobLogEntry, JobLogEvent } from './adapter.js';
|
|
4
|
+
export { createSqliteWorkerDb } from './adapters/sqlite.js';
|
|
5
|
+
export { createPostgresWorkerDb } from './adapters/postgres.js';
|
|
6
|
+
export { acquireCronLock, cleanupExpiredJobs, cleanupExpiredAuditLogs } from './cron/index.js';
|
|
7
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,UAAU,EAAE,KAAK,aAAa,EAAE,MAAM,YAAY,CAAC;AAC5D,OAAO,EAAE,iBAAiB,EAAE,KAAK,eAAe,EAAE,KAAK,eAAe,EAAE,KAAK,iBAAiB,EAAE,MAAM,eAAe,CAAC;AACtH,YAAY,EAAE,QAAQ,EAAE,SAAS,EAAE,WAAW,EAAE,WAAW,EAAE,MAAM,cAAc,CAAC;AAClF,OAAO,EAAE,oBAAoB,EAAE,MAAM,sBAAsB,CAAC;AAC5D,OAAO,EAAE,sBAAsB,EAAE,MAAM,wBAAwB,CAAC;AAChE,OAAO,EAAE,eAAe,EAAE,kBAAkB,EAAE,uBAAuB,EAAE,MAAM,iBAAiB,CAAC"}
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
export { enqueueJob } from './queue.js';
|
|
2
|
+
export { createJobExecutor } from './executor.js';
|
|
3
|
+
export { createSqliteWorkerDb } from './adapters/sqlite.js';
|
|
4
|
+
export { createPostgresWorkerDb } from './adapters/postgres.js';
|
|
5
|
+
export { acquireCronLock, cleanupExpiredJobs, cleanupExpiredAuditLogs } from './cron/index.js';
|
|
6
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,UAAU,EAAsB,MAAM,YAAY,CAAC;AAC5D,OAAO,EAAE,iBAAiB,EAAsE,MAAM,eAAe,CAAC;AAEtH,OAAO,EAAE,oBAAoB,EAAE,MAAM,sBAAsB,CAAC;AAC5D,OAAO,EAAE,sBAAsB,EAAE,MAAM,wBAAwB,CAAC;AAChE,OAAO,EAAE,eAAe,EAAE,kBAAkB,EAAE,uBAAuB,EAAE,MAAM,iBAAiB,CAAC"}
|
package/dist/queue.d.ts
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import { type EnqueueOptions } from '@queuebase/core';
|
|
2
|
+
import { type SqliteDb } from '@queuebase/db/sqlite';
|
|
3
|
+
export interface EnqueueParams {
|
|
4
|
+
name: string;
|
|
5
|
+
payload: unknown;
|
|
6
|
+
callbackUrl: string;
|
|
7
|
+
options?: EnqueueOptions;
|
|
8
|
+
}
|
|
9
|
+
/**
|
|
10
|
+
* Enqueue a job to the local SQLite database
|
|
11
|
+
*/
|
|
12
|
+
export declare function enqueueJob(db: SqliteDb, params: EnqueueParams): {
|
|
13
|
+
jobId: string;
|
|
14
|
+
};
|
|
15
|
+
//# sourceMappingURL=queue.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"queue.d.ts","sourceRoot":"","sources":["../src/queue.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,cAAc,EAAgC,MAAM,iBAAiB,CAAC;AACpF,OAAO,EAAe,KAAK,QAAQ,EAAQ,MAAM,sBAAsB,CAAC;AAExE,MAAM,WAAW,aAAa;IAC5B,IAAI,EAAE,MAAM,CAAC;IACb,OAAO,EAAE,OAAO,CAAC;IACjB,WAAW,EAAE,MAAM,CAAC;IACpB,OAAO,CAAC,EAAE,cAAc,CAAC;CAC1B;AAED;;GAEG;AACH,wBAAgB,UAAU,CAAC,EAAE,EAAE,QAAQ,EAAE,MAAM,EAAE,aAAa,GAAG;IAAE,KAAK,EAAE,MAAM,CAAA;CAAE,CAwBjF"}
|
package/dist/queue.js
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
import { generatePublicId, parseDelay } from '@queuebase/core';
|
|
2
|
+
import { jobs } from '@queuebase/db/sqlite';
|
|
3
|
+
/**
|
|
4
|
+
* Enqueue a job to the local SQLite database
|
|
5
|
+
*/
|
|
6
|
+
export function enqueueJob(db, params) {
|
|
7
|
+
const { name, payload, callbackUrl, options = {} } = params;
|
|
8
|
+
const publicId = generatePublicId();
|
|
9
|
+
const now = Date.now();
|
|
10
|
+
const delay = options.delay ? parseDelay(options.delay) : 0;
|
|
11
|
+
const job = {
|
|
12
|
+
publicId,
|
|
13
|
+
name,
|
|
14
|
+
payload: JSON.stringify(payload),
|
|
15
|
+
status: 'pending',
|
|
16
|
+
attempt: 1,
|
|
17
|
+
maxAttempts: (options.retries ?? 0) + 1,
|
|
18
|
+
runAt: new Date(now + delay),
|
|
19
|
+
createdAt: new Date(now),
|
|
20
|
+
backoffStrategy: options.backoff ?? 'exponential',
|
|
21
|
+
backoffDelay: options.backoffDelay ?? 1000,
|
|
22
|
+
callbackUrl,
|
|
23
|
+
};
|
|
24
|
+
db.insert(jobs).values(job).run();
|
|
25
|
+
return { jobId: publicId };
|
|
26
|
+
}
|
|
27
|
+
//# sourceMappingURL=queue.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"queue.js","sourceRoot":"","sources":["../src/queue.ts"],"names":[],"mappings":"AAAA,OAAO,EAAuB,gBAAgB,EAAE,UAAU,EAAE,MAAM,iBAAiB,CAAC;AACpF,OAAO,EAA8B,IAAI,EAAE,MAAM,sBAAsB,CAAC;AASxE;;GAEG;AACH,MAAM,UAAU,UAAU,CAAC,EAAY,EAAE,MAAqB;IAC5D,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,WAAW,EAAE,OAAO,GAAG,EAAE,EAAE,GAAG,MAAM,CAAC;IAE5D,MAAM,QAAQ,GAAG,gBAAgB,EAAE,CAAC;IACpC,MAAM,GAAG,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;IACvB,MAAM,KAAK,GAAG,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,UAAU,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;IAE5D,MAAM,GAAG,GAAW;QAClB,QAAQ;QACR,IAAI;QACJ,OAAO,EAAE,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC;QAChC,MAAM,EAAE,SAAS;QACjB,OAAO,EAAE,CAAC;QACV,WAAW,EAAE,CAAC,OAAO,CAAC,OAAO,IAAI,CAAC,CAAC,GAAG,CAAC;QACvC,KAAK,EAAE,IAAI,IAAI,CAAC,GAAG,GAAG,KAAK,CAAC;QAC5B,SAAS,EAAE,IAAI,IAAI,CAAC,GAAG,CAAC;QACxB,eAAe,EAAE,OAAO,CAAC,OAAO,IAAI,aAAa;QACjD,YAAY,EAAE,OAAO,CAAC,YAAY,IAAI,IAAI;QAC1C,WAAW;KACZ,CAAC;IAEF,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,CAAC;IAElC,OAAO,EAAE,KAAK,EAAE,QAAQ,EAAE,CAAC;AAC7B,CAAC"}
|
package/package.json
ADDED
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@queuebase/worker",
|
|
3
|
+
"version": "0.0.1",
|
|
4
|
+
"type": "module",
|
|
5
|
+
"exports": {
|
|
6
|
+
".": {
|
|
7
|
+
"types": "./dist/index.d.ts",
|
|
8
|
+
"import": "./dist/index.js"
|
|
9
|
+
}
|
|
10
|
+
},
|
|
11
|
+
"files": [
|
|
12
|
+
"dist"
|
|
13
|
+
],
|
|
14
|
+
"publishConfig": {
|
|
15
|
+
"access": "public"
|
|
16
|
+
},
|
|
17
|
+
"dependencies": {
|
|
18
|
+
"drizzle-orm": "^0.38.3",
|
|
19
|
+
"@queuebase/core": "0.0.1",
|
|
20
|
+
"@queuebase/db": "0.0.1"
|
|
21
|
+
},
|
|
22
|
+
"devDependencies": {
|
|
23
|
+
"@types/better-sqlite3": "^7.6.12",
|
|
24
|
+
"better-sqlite3": "^11.7.0",
|
|
25
|
+
"postgres": "^3.4.5",
|
|
26
|
+
"typescript": "^5.7.2",
|
|
27
|
+
"vitest": "^2.1.8",
|
|
28
|
+
"@queuebase/tsconfig": "0.0.0"
|
|
29
|
+
},
|
|
30
|
+
"peerDependencies": {
|
|
31
|
+
"postgres": ">=3.0.0"
|
|
32
|
+
},
|
|
33
|
+
"peerDependenciesMeta": {
|
|
34
|
+
"postgres": {
|
|
35
|
+
"optional": true
|
|
36
|
+
}
|
|
37
|
+
},
|
|
38
|
+
"scripts": {
|
|
39
|
+
"build": "tsc -b",
|
|
40
|
+
"dev": "tsc -b --watch",
|
|
41
|
+
"clean": "rm -rf dist .turbo *.tsbuildinfo",
|
|
42
|
+
"typecheck": "tsc --noEmit",
|
|
43
|
+
"lint": "biome lint ./src",
|
|
44
|
+
"lint:fix": "biome lint --write ./src",
|
|
45
|
+
"test": "vitest run --passWithNoTests",
|
|
46
|
+
"test:watch": "vitest"
|
|
47
|
+
}
|
|
48
|
+
}
|