@contractspec/lib.jobs 0.0.0-canary-20260113162409
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +126 -0
- package/dist/_virtual/rolldown_runtime.js +36 -0
- package/dist/contracts/index.d.ts +547 -0
- package/dist/contracts/index.d.ts.map +1 -0
- package/dist/contracts/index.js +482 -0
- package/dist/contracts/index.js.map +1 -0
- package/dist/entities/index.d.ts +145 -0
- package/dist/entities/index.d.ts.map +1 -0
- package/dist/entities/index.js +198 -0
- package/dist/entities/index.js.map +1 -0
- package/dist/events.d.ts +388 -0
- package/dist/events.d.ts.map +1 -0
- package/dist/events.js +353 -0
- package/dist/events.js.map +1 -0
- package/dist/handlers/gmail-sync-handler.d.ts +10 -0
- package/dist/handlers/gmail-sync-handler.d.ts.map +1 -0
- package/dist/handlers/gmail-sync-handler.js +10 -0
- package/dist/handlers/gmail-sync-handler.js.map +1 -0
- package/dist/handlers/index.d.ts +10 -0
- package/dist/handlers/index.d.ts.map +1 -0
- package/dist/handlers/index.js +13 -0
- package/dist/handlers/index.js.map +1 -0
- package/dist/handlers/ping-job.d.ts +11 -0
- package/dist/handlers/ping-job.d.ts.map +1 -0
- package/dist/handlers/ping-job.js +14 -0
- package/dist/handlers/ping-job.js.map +1 -0
- package/dist/handlers/storage-document-handler.d.ts +13 -0
- package/dist/handlers/storage-document-handler.d.ts.map +1 -0
- package/dist/handlers/storage-document-handler.js +15 -0
- package/dist/handlers/storage-document-handler.js.map +1 -0
- package/dist/index.d.ts +25 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +67 -0
- package/dist/index.js.map +1 -0
- package/dist/jobs.capability.d.ts +8 -0
- package/dist/jobs.capability.d.ts.map +1 -0
- package/dist/jobs.capability.js +33 -0
- package/dist/jobs.capability.js.map +1 -0
- package/dist/jobs.feature.d.ts +12 -0
- package/dist/jobs.feature.d.ts.map +1 -0
- package/dist/jobs.feature.js +110 -0
- package/dist/jobs.feature.js.map +1 -0
- package/dist/queue/gcp-cloud-tasks.d.ts +42 -0
- package/dist/queue/gcp-cloud-tasks.d.ts.map +1 -0
- package/dist/queue/gcp-cloud-tasks.js +61 -0
- package/dist/queue/gcp-cloud-tasks.js.map +1 -0
- package/dist/queue/gcp-pubsub.d.ts +26 -0
- package/dist/queue/gcp-pubsub.d.ts.map +1 -0
- package/dist/queue/gcp-pubsub.js +47 -0
- package/dist/queue/gcp-pubsub.js.map +1 -0
- package/dist/queue/index.d.ts +16 -0
- package/dist/queue/index.d.ts.map +1 -0
- package/dist/queue/index.js +23 -0
- package/dist/queue/index.js.map +1 -0
- package/dist/queue/memory-queue.d.ts +35 -0
- package/dist/queue/memory-queue.d.ts.map +1 -0
- package/dist/queue/memory-queue.js +140 -0
- package/dist/queue/memory-queue.js.map +1 -0
- package/dist/queue/register-defined-job.d.ts +8 -0
- package/dist/queue/register-defined-job.d.ts.map +1 -0
- package/dist/queue/register-defined-job.js +16 -0
- package/dist/queue/register-defined-job.js.map +1 -0
- package/dist/queue/scaleway-sqs-queue.d.ts +39 -0
- package/dist/queue/scaleway-sqs-queue.d.ts.map +1 -0
- package/dist/queue/scaleway-sqs-queue.js +175 -0
- package/dist/queue/scaleway-sqs-queue.js.map +1 -0
- package/dist/queue/types.d.ts +8 -0
- package/dist/queue/types.d.ts.map +1 -0
- package/dist/queue/types.js +12 -0
- package/dist/queue/types.js.map +1 -0
- package/dist/scheduler/index.d.ts +93 -0
- package/dist/scheduler/index.d.ts.map +1 -0
- package/dist/scheduler/index.js +146 -0
- package/dist/scheduler/index.js.map +1 -0
- package/package.json +97 -0
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
import { StabilityEnum, defineCapability } from "@contractspec/lib.contracts";
|
|
2
|
+
|
|
3
|
+
//#region src/jobs.capability.ts
|
|
4
|
+
const JobsCapability = defineCapability({ meta: {
|
|
5
|
+
key: "jobs",
|
|
6
|
+
version: "1.0.0",
|
|
7
|
+
kind: "api",
|
|
8
|
+
stability: StabilityEnum.Experimental,
|
|
9
|
+
description: "Background job processing",
|
|
10
|
+
owners: ["@platform.core"],
|
|
11
|
+
tags: [
|
|
12
|
+
"jobs",
|
|
13
|
+
"background",
|
|
14
|
+
"async"
|
|
15
|
+
]
|
|
16
|
+
} });
|
|
17
|
+
const SchedulerCapability = defineCapability({ meta: {
|
|
18
|
+
key: "scheduler",
|
|
19
|
+
version: "1.0.0",
|
|
20
|
+
kind: "api",
|
|
21
|
+
stability: StabilityEnum.Experimental,
|
|
22
|
+
description: "Scheduled job execution",
|
|
23
|
+
owners: ["@platform.core"],
|
|
24
|
+
tags: [
|
|
25
|
+
"scheduler",
|
|
26
|
+
"cron",
|
|
27
|
+
"jobs"
|
|
28
|
+
]
|
|
29
|
+
} });
|
|
30
|
+
|
|
31
|
+
//#endregion
|
|
32
|
+
export { JobsCapability, SchedulerCapability };
|
|
33
|
+
//# sourceMappingURL=jobs.capability.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"jobs.capability.js","names":[],"sources":["../src/jobs.capability.ts"],"sourcesContent":["import { defineCapability, StabilityEnum } from '@contractspec/lib.contracts';\n\nexport const JobsCapability = defineCapability({\n meta: {\n key: 'jobs',\n version: '1.0.0',\n kind: 'api',\n stability: StabilityEnum.Experimental,\n description: 'Background job processing',\n owners: ['@platform.core'],\n tags: ['jobs', 'background', 'async'],\n },\n});\n\nexport const SchedulerCapability = defineCapability({\n meta: {\n key: 'scheduler',\n version: '1.0.0',\n kind: 'api',\n stability: StabilityEnum.Experimental,\n description: 'Scheduled job execution',\n owners: ['@platform.core'],\n tags: ['scheduler', 'cron', 'jobs'],\n },\n});\n"],"mappings":";;;AAEA,MAAa,iBAAiB,iBAAiB,EAC7C,MAAM;CACJ,KAAK;CACL,SAAS;CACT,MAAM;CACN,WAAW,cAAc;CACzB,aAAa;CACb,QAAQ,CAAC,iBAAiB;CAC1B,MAAM;EAAC;EAAQ;EAAc;EAAQ;CACtC,EACF,CAAC;AAEF,MAAa,sBAAsB,iBAAiB,EAClD,MAAM;CACJ,KAAK;CACL,SAAS;CACT,MAAM;CACN,WAAW,cAAc;CACzB,aAAa;CACb,QAAQ,CAAC,iBAAiB;CAC1B,MAAM;EAAC;EAAa;EAAQ;EAAO;CACpC,EACF,CAAC"}
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import * as _contractspec_lib_contracts17 from "@contractspec/lib.contracts";
|
|
2
|
+
|
|
3
|
+
//#region src/jobs.feature.d.ts
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Jobs feature module that bundles background job processing,
|
|
7
|
+
* queues, and scheduling capabilities.
|
|
8
|
+
*/
|
|
9
|
+
declare const JobsFeature: _contractspec_lib_contracts17.FeatureModuleSpec;
|
|
10
|
+
//#endregion
|
|
11
|
+
export { JobsFeature };
|
|
12
|
+
//# sourceMappingURL=jobs.feature.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"jobs.feature.d.ts","names":[],"sources":["../src/jobs.feature.ts"],"sourcesContent":[],"mappings":";;;;;;;AAWA;cAAa,aAsDX,6BAAA,CAtDsB"}
|
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
import { defineFeature } from "@contractspec/lib.contracts";
|
|
2
|
+
|
|
3
|
+
//#region src/jobs.feature.ts
|
|
4
|
+
/**
|
|
5
|
+
* Jobs Feature Module Specification
|
|
6
|
+
*
|
|
7
|
+
* Defines the feature module for background job processing and scheduling.
|
|
8
|
+
*/
|
|
9
|
+
/**
|
|
10
|
+
* Jobs feature module that bundles background job processing,
|
|
11
|
+
* queues, and scheduling capabilities.
|
|
12
|
+
*/
|
|
13
|
+
const JobsFeature = defineFeature({
|
|
14
|
+
meta: {
|
|
15
|
+
key: "jobs",
|
|
16
|
+
title: "Background Jobs",
|
|
17
|
+
description: "Background job processing, scheduling, and queue management",
|
|
18
|
+
domain: "platform",
|
|
19
|
+
owners: ["@platform.jobs"],
|
|
20
|
+
tags: [
|
|
21
|
+
"jobs",
|
|
22
|
+
"queue",
|
|
23
|
+
"background",
|
|
24
|
+
"scheduler"
|
|
25
|
+
],
|
|
26
|
+
stability: "stable",
|
|
27
|
+
version: "1.0.0"
|
|
28
|
+
},
|
|
29
|
+
operations: [
|
|
30
|
+
{
|
|
31
|
+
key: "jobs.enqueue",
|
|
32
|
+
version: "1.0.0"
|
|
33
|
+
},
|
|
34
|
+
{
|
|
35
|
+
key: "jobs.cancel",
|
|
36
|
+
version: "1.0.0"
|
|
37
|
+
},
|
|
38
|
+
{
|
|
39
|
+
key: "jobs.get",
|
|
40
|
+
version: "1.0.0"
|
|
41
|
+
},
|
|
42
|
+
{
|
|
43
|
+
key: "jobs.stats",
|
|
44
|
+
version: "1.0.0"
|
|
45
|
+
},
|
|
46
|
+
{
|
|
47
|
+
key: "jobs.schedule.create",
|
|
48
|
+
version: "1.0.0"
|
|
49
|
+
},
|
|
50
|
+
{
|
|
51
|
+
key: "jobs.schedule.toggle",
|
|
52
|
+
version: "1.0.0"
|
|
53
|
+
},
|
|
54
|
+
{
|
|
55
|
+
key: "jobs.schedule.list",
|
|
56
|
+
version: "1.0.0"
|
|
57
|
+
}
|
|
58
|
+
],
|
|
59
|
+
events: [
|
|
60
|
+
{
|
|
61
|
+
key: "job.enqueued",
|
|
62
|
+
version: "1.0.0"
|
|
63
|
+
},
|
|
64
|
+
{
|
|
65
|
+
key: "job.started",
|
|
66
|
+
version: "1.0.0"
|
|
67
|
+
},
|
|
68
|
+
{
|
|
69
|
+
key: "job.completed",
|
|
70
|
+
version: "1.0.0"
|
|
71
|
+
},
|
|
72
|
+
{
|
|
73
|
+
key: "job.failed",
|
|
74
|
+
version: "1.0.0"
|
|
75
|
+
},
|
|
76
|
+
{
|
|
77
|
+
key: "job.retrying",
|
|
78
|
+
version: "1.0.0"
|
|
79
|
+
},
|
|
80
|
+
{
|
|
81
|
+
key: "job.dead_lettered",
|
|
82
|
+
version: "1.0.0"
|
|
83
|
+
},
|
|
84
|
+
{
|
|
85
|
+
key: "job.cancelled",
|
|
86
|
+
version: "1.0.0"
|
|
87
|
+
},
|
|
88
|
+
{
|
|
89
|
+
key: "scheduler.job_triggered",
|
|
90
|
+
version: "1.0.0"
|
|
91
|
+
}
|
|
92
|
+
],
|
|
93
|
+
presentations: [],
|
|
94
|
+
opToPresentation: [],
|
|
95
|
+
presentationsTargets: [],
|
|
96
|
+
capabilities: {
|
|
97
|
+
provides: [{
|
|
98
|
+
key: "jobs",
|
|
99
|
+
version: "1.0.0"
|
|
100
|
+
}, {
|
|
101
|
+
key: "scheduler",
|
|
102
|
+
version: "1.0.0"
|
|
103
|
+
}],
|
|
104
|
+
requires: []
|
|
105
|
+
}
|
|
106
|
+
});
|
|
107
|
+
|
|
108
|
+
//#endregion
|
|
109
|
+
export { JobsFeature };
|
|
110
|
+
//# sourceMappingURL=jobs.feature.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"jobs.feature.js","names":[],"sources":["../src/jobs.feature.ts"],"sourcesContent":["/**\n * Jobs Feature Module Specification\n *\n * Defines the feature module for background job processing and scheduling.\n */\nimport { defineFeature } from '@contractspec/lib.contracts';\n\n/**\n * Jobs feature module that bundles background job processing,\n * queues, and scheduling capabilities.\n */\nexport const JobsFeature = defineFeature({\n meta: {\n key: 'jobs',\n title: 'Background Jobs',\n description: 'Background job processing, scheduling, and queue management',\n domain: 'platform',\n owners: ['@platform.jobs'],\n tags: ['jobs', 'queue', 'background', 'scheduler'],\n stability: 'stable',\n version: '1.0.0',\n },\n\n // All contract operations included in this feature\n operations: [\n // Job operations\n { key: 'jobs.enqueue', version: '1.0.0' },\n { key: 'jobs.cancel', version: '1.0.0' },\n { key: 'jobs.get', version: '1.0.0' },\n { key: 'jobs.stats', version: '1.0.0' },\n\n // Schedule operations\n { key: 'jobs.schedule.create', version: '1.0.0' },\n { key: 'jobs.schedule.toggle', version: '1.0.0' },\n { key: 'jobs.schedule.list', version: '1.0.0' },\n ],\n\n // Events emitted by this feature\n events: [\n // Job lifecycle events\n { key: 'job.enqueued', version: '1.0.0' },\n { key: 'job.started', version: '1.0.0' },\n { key: 'job.completed', version: '1.0.0' },\n { key: 'job.failed', version: '1.0.0' },\n { key: 'job.retrying', version: '1.0.0' },\n { key: 'job.dead_lettered', version: '1.0.0' },\n { key: 'job.cancelled', version: '1.0.0' },\n\n // Scheduler events\n { key: 'scheduler.job_triggered', version: '1.0.0' },\n ],\n\n // No presentations for this library feature\n presentations: [],\n opToPresentation: [],\n presentationsTargets: [],\n\n // Capability definitions\n capabilities: {\n provides: [\n { key: 'jobs', version: '1.0.0' },\n { key: 'scheduler', version: '1.0.0' },\n ],\n requires: [],\n },\n});\n"],"mappings":";;;;;;;;;;;;AAWA,MAAa,cAAc,cAAc;CACvC,MAAM;EACJ,KAAK;EACL,OAAO;EACP,aAAa;EACb,QAAQ;EACR,QAAQ,CAAC,iBAAiB;EAC1B,MAAM;GAAC;GAAQ;GAAS;GAAc;GAAY;EAClD,WAAW;EACX,SAAS;EACV;CAGD,YAAY;EAEV;GAAE,KAAK;GAAgB,SAAS;GAAS;EACzC;GAAE,KAAK;GAAe,SAAS;GAAS;EACxC;GAAE,KAAK;GAAY,SAAS;GAAS;EACrC;GAAE,KAAK;GAAc,SAAS;GAAS;EAGvC;GAAE,KAAK;GAAwB,SAAS;GAAS;EACjD;GAAE,KAAK;GAAwB,SAAS;GAAS;EACjD;GAAE,KAAK;GAAsB,SAAS;GAAS;EAChD;CAGD,QAAQ;EAEN;GAAE,KAAK;GAAgB,SAAS;GAAS;EACzC;GAAE,KAAK;GAAe,SAAS;GAAS;EACxC;GAAE,KAAK;GAAiB,SAAS;GAAS;EAC1C;GAAE,KAAK;GAAc,SAAS;GAAS;EACvC;GAAE,KAAK;GAAgB,SAAS;GAAS;EACzC;GAAE,KAAK;GAAqB,SAAS;GAAS;EAC9C;GAAE,KAAK;GAAiB,SAAS;GAAS;EAG1C;GAAE,KAAK;GAA2B,SAAS;GAAS;EACrD;CAGD,eAAe,EAAE;CACjB,kBAAkB,EAAE;CACpB,sBAAsB,EAAE;CAGxB,cAAc;EACZ,UAAU,CACR;GAAE,KAAK;GAAQ,SAAS;GAAS,EACjC;GAAE,KAAK;GAAa,SAAS;GAAS,CACvC;EACD,UAAU,EAAE;EACb;CACF,CAAC"}
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
import { types_d_exports } from "./types.js";
|
|
2
|
+
|
|
3
|
+
//#region src/queue/gcp-cloud-tasks.d.ts
|
|
4
|
+
interface CloudTasksClientLike {
|
|
5
|
+
createTask(request: {
|
|
6
|
+
parent: string;
|
|
7
|
+
task: {
|
|
8
|
+
httpRequest: {
|
|
9
|
+
httpMethod: number | string;
|
|
10
|
+
url: string;
|
|
11
|
+
body: Buffer;
|
|
12
|
+
headers?: Record<string, string>;
|
|
13
|
+
oidcToken?: {
|
|
14
|
+
serviceAccountEmail: string;
|
|
15
|
+
};
|
|
16
|
+
};
|
|
17
|
+
scheduleTime?: {
|
|
18
|
+
seconds: number;
|
|
19
|
+
};
|
|
20
|
+
};
|
|
21
|
+
}): Promise<unknown>;
|
|
22
|
+
}
|
|
23
|
+
interface GcpCloudTasksQueueOptions {
|
|
24
|
+
client: CloudTasksClientLike;
|
|
25
|
+
projectId: string;
|
|
26
|
+
location: string;
|
|
27
|
+
queue: string;
|
|
28
|
+
resolveUrl(jobType: string): string;
|
|
29
|
+
serviceAccountEmail?: string;
|
|
30
|
+
}
|
|
31
|
+
declare class GcpCloudTasksQueue implements types_d_exports.JobQueue {
|
|
32
|
+
private readonly options;
|
|
33
|
+
private readonly handlers;
|
|
34
|
+
constructor(options: GcpCloudTasksQueueOptions);
|
|
35
|
+
enqueue<TPayload>(jobType: string, payload: TPayload, options?: types_d_exports.EnqueueOptions): Promise<types_d_exports.Job<TPayload>>;
|
|
36
|
+
register<TPayload, TResult = void>(jobType: string, handler: types_d_exports.JobHandler<TPayload, TResult>): void;
|
|
37
|
+
start(): void;
|
|
38
|
+
stop(): Promise<void>;
|
|
39
|
+
}
|
|
40
|
+
//#endregion
|
|
41
|
+
export { GcpCloudTasksQueue, GcpCloudTasksQueueOptions };
|
|
42
|
+
//# sourceMappingURL=gcp-cloud-tasks.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"gcp-cloud-tasks.d.ts","names":[],"sources":["../../src/queue/gcp-cloud-tasks.ts"],"sourcesContent":[],"mappings":";;;UAUU,oBAAA;;IAAA,MAAA,EAAA,MAAA;IAOI,IAAA,EAAA;MACI,WAAA,EAAA;QAKZ,UAAA,EAAA,MAAA,GAAA,MAAA;QAAO,GAAA,EAAA,MAAA;QAGI,IAAA,EATH,MASG;QASJ,OAAA,CAAA,EAjBK,MAiBc,CAAA,MAAA,EAAA,MAAA,CAAA;QAGQ,SAAA,CAAA,EAAA;UAI3B,mBAAA,EAAA,MAAA;QACA,CAAA;MACI,CAAA;MAAJ,YAAA,CAAA,EAAA;QAAR,OAAA,EAAA,MAAA;MAsDmB,CAAA;IAAU,CAAA;EAArB,CAAA,CAAA,EA3EP,OA2EO,CAAA,OAAA,CAAA;;AA/D8B,UAT1B,yBAAA,CAS0B;EAAQ,MAAA,EARzC,oBAQyC;;;;;;;cAAtC,kBAAA,YAA8B,eAAA,CAAA;;;uBAGH;8CAI3B,oBACA,eAAA,CAAA,iBACR,QAAQ,eAAA,CAAA,IAAI;+DAsDJ,eAAA,CAAA,WAAW,UAAU;;UASlB"}
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
import { types_exports } from "./types.js";
|
|
2
|
+
import { randomUUID } from "node:crypto";
|
|
3
|
+
|
|
4
|
+
//#region src/queue/gcp-cloud-tasks.ts
|
|
5
|
+
var GcpCloudTasksQueue = class {
|
|
6
|
+
handlers = /* @__PURE__ */ new Map();
|
|
7
|
+
constructor(options) {
|
|
8
|
+
this.options = options;
|
|
9
|
+
}
|
|
10
|
+
async enqueue(jobType, payload, options = {}) {
|
|
11
|
+
const now = /* @__PURE__ */ new Date();
|
|
12
|
+
const enqueueTime = options.delaySeconds != null ? { seconds: Math.floor(Date.now() / 1e3) + options.delaySeconds } : void 0;
|
|
13
|
+
const body = Buffer.from(JSON.stringify({
|
|
14
|
+
id: randomUUID(),
|
|
15
|
+
type: jobType,
|
|
16
|
+
payload
|
|
17
|
+
}), "utf-8");
|
|
18
|
+
await this.options.client.createTask({
|
|
19
|
+
parent: `projects/${this.options.projectId}/locations/${this.options.location}/queues/${this.options.queue}`,
|
|
20
|
+
task: {
|
|
21
|
+
httpRequest: {
|
|
22
|
+
httpMethod: "POST",
|
|
23
|
+
url: this.options.resolveUrl(jobType),
|
|
24
|
+
body,
|
|
25
|
+
headers: { "Content-Type": "application/json" },
|
|
26
|
+
oidcToken: this.options.serviceAccountEmail ? { serviceAccountEmail: this.options.serviceAccountEmail } : void 0
|
|
27
|
+
},
|
|
28
|
+
scheduleTime: enqueueTime
|
|
29
|
+
}
|
|
30
|
+
});
|
|
31
|
+
return {
|
|
32
|
+
id: randomUUID(),
|
|
33
|
+
type: jobType,
|
|
34
|
+
version: "1.0.0",
|
|
35
|
+
payload,
|
|
36
|
+
status: "pending",
|
|
37
|
+
priority: options.priority ?? 0,
|
|
38
|
+
attempts: 0,
|
|
39
|
+
maxRetries: options.maxRetries ?? types_exports.DEFAULT_RETRY_POLICY.maxRetries,
|
|
40
|
+
createdAt: now,
|
|
41
|
+
updatedAt: now,
|
|
42
|
+
scheduledAt: options.delaySeconds ? new Date(now.getTime() + options.delaySeconds * 1e3) : now,
|
|
43
|
+
dedupeKey: options.dedupeKey,
|
|
44
|
+
tenantId: options.tenantId,
|
|
45
|
+
userId: options.userId,
|
|
46
|
+
traceId: options.traceId,
|
|
47
|
+
metadata: options.metadata
|
|
48
|
+
};
|
|
49
|
+
}
|
|
50
|
+
register(jobType, handler) {
|
|
51
|
+
this.handlers.set(jobType, handler);
|
|
52
|
+
}
|
|
53
|
+
start() {}
|
|
54
|
+
async stop() {
|
|
55
|
+
this.handlers.clear();
|
|
56
|
+
}
|
|
57
|
+
};
|
|
58
|
+
|
|
59
|
+
//#endregion
|
|
60
|
+
export { GcpCloudTasksQueue };
|
|
61
|
+
//# sourceMappingURL=gcp-cloud-tasks.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"gcp-cloud-tasks.js","names":["DEFAULT_RETRY_POLICY"],"sources":["../../src/queue/gcp-cloud-tasks.ts"],"sourcesContent":["import { randomUUID } from 'node:crypto';\n\nimport {\n DEFAULT_RETRY_POLICY,\n type EnqueueOptions,\n type Job,\n type JobHandler,\n type JobQueue,\n} from './types';\n\ninterface CloudTasksClientLike {\n createTask(request: {\n parent: string;\n task: {\n httpRequest: {\n httpMethod: number | string;\n url: string;\n body: Buffer;\n headers?: Record<string, string>;\n oidcToken?: { serviceAccountEmail: string };\n };\n scheduleTime?: { seconds: number };\n };\n }): Promise<unknown>;\n}\n\nexport interface GcpCloudTasksQueueOptions {\n client: CloudTasksClientLike;\n projectId: string;\n location: string;\n queue: string;\n resolveUrl(jobType: string): string;\n serviceAccountEmail?: string;\n}\n\nexport class GcpCloudTasksQueue implements JobQueue {\n private readonly handlers = new Map<string, JobHandler>();\n\n constructor(private readonly options: GcpCloudTasksQueueOptions) {}\n\n async enqueue<TPayload>(\n jobType: string,\n payload: TPayload,\n options: EnqueueOptions = {}\n ): Promise<Job<TPayload>> {\n const now = new Date();\n const enqueueTime =\n options.delaySeconds != null\n ? { seconds: Math.floor(Date.now() / 1000) + options.delaySeconds }\n : undefined;\n const body = Buffer.from(\n JSON.stringify({\n id: randomUUID(),\n type: jobType,\n payload,\n }),\n 'utf-8'\n );\n await this.options.client.createTask({\n parent: `projects/${this.options.projectId}/locations/${this.options.location}/queues/${this.options.queue}`,\n task: {\n httpRequest: {\n httpMethod: 'POST',\n url: this.options.resolveUrl(jobType),\n body,\n headers: { 'Content-Type': 'application/json' },\n oidcToken: this.options.serviceAccountEmail\n ? { serviceAccountEmail: this.options.serviceAccountEmail }\n : undefined,\n },\n scheduleTime: enqueueTime,\n },\n });\n\n return {\n id: randomUUID(),\n type: jobType,\n version: '1.0.0',\n payload,\n status: 'pending',\n priority: options.priority ?? 0,\n attempts: 0,\n maxRetries: options.maxRetries ?? DEFAULT_RETRY_POLICY.maxRetries,\n createdAt: now,\n updatedAt: now,\n scheduledAt: options.delaySeconds\n ? new Date(now.getTime() + options.delaySeconds * 1000)\n : now,\n dedupeKey: options.dedupeKey,\n tenantId: options.tenantId,\n userId: options.userId,\n traceId: options.traceId,\n metadata: options.metadata,\n };\n }\n\n register<TPayload, TResult = void>(\n jobType: string,\n handler: JobHandler<TPayload, TResult>\n ): void {\n this.handlers.set(jobType, handler as JobHandler);\n }\n\n start(): void {\n // Execution is handled by Cloud Tasks via HTTP callbacks.\n }\n\n async stop(): Promise<void> {\n this.handlers.clear();\n }\n}\n"],"mappings":";;;;AAmCA,IAAa,qBAAb,MAAoD;CAClD,AAAiB,2BAAW,IAAI,KAAyB;CAEzD,YAAY,AAAiB,SAAoC;EAApC;;CAE7B,MAAM,QACJ,SACA,SACA,UAA0B,EAAE,EACJ;EACxB,MAAM,sBAAM,IAAI,MAAM;EACtB,MAAM,cACJ,QAAQ,gBAAgB,OACpB,EAAE,SAAS,KAAK,MAAM,KAAK,KAAK,GAAG,IAAK,GAAG,QAAQ,cAAc,GACjE;EACN,MAAM,OAAO,OAAO,KAClB,KAAK,UAAU;GACb,IAAI,YAAY;GAChB,MAAM;GACN;GACD,CAAC,EACF,QACD;AACD,QAAM,KAAK,QAAQ,OAAO,WAAW;GACnC,QAAQ,YAAY,KAAK,QAAQ,UAAU,aAAa,KAAK,QAAQ,SAAS,UAAU,KAAK,QAAQ;GACrG,MAAM;IACJ,aAAa;KACX,YAAY;KACZ,KAAK,KAAK,QAAQ,WAAW,QAAQ;KACrC;KACA,SAAS,EAAE,gBAAgB,oBAAoB;KAC/C,WAAW,KAAK,QAAQ,sBACpB,EAAE,qBAAqB,KAAK,QAAQ,qBAAqB,GACzD;KACL;IACD,cAAc;IACf;GACF,CAAC;AAEF,SAAO;GACL,IAAI,YAAY;GAChB,MAAM;GACN,SAAS;GACT;GACA,QAAQ;GACR,UAAU,QAAQ,YAAY;GAC9B,UAAU;GACV,YAAY,QAAQ,cAAcA,mCAAqB;GACvD,WAAW;GACX,WAAW;GACX,aAAa,QAAQ,eACjB,IAAI,KAAK,IAAI,SAAS,GAAG,QAAQ,eAAe,IAAK,GACrD;GACJ,WAAW,QAAQ;GACnB,UAAU,QAAQ;GAClB,QAAQ,QAAQ;GAChB,SAAS,QAAQ;GACjB,UAAU,QAAQ;GACnB;;CAGH,SACE,SACA,SACM;AACN,OAAK,SAAS,IAAI,SAAS,QAAsB;;CAGnD,QAAc;CAId,MAAM,OAAsB;AAC1B,OAAK,SAAS,OAAO"}
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import { types_d_exports } from "./types.js";
|
|
2
|
+
|
|
3
|
+
//#region src/queue/gcp-pubsub.d.ts
|
|
4
|
+
interface PubSubClientLike {
|
|
5
|
+
topic(name: string): {
|
|
6
|
+
publishMessage(message: {
|
|
7
|
+
data: Buffer;
|
|
8
|
+
}): Promise<string>;
|
|
9
|
+
};
|
|
10
|
+
}
|
|
11
|
+
interface GcpPubSubQueueOptions {
|
|
12
|
+
client: PubSubClientLike;
|
|
13
|
+
topicName: string;
|
|
14
|
+
}
|
|
15
|
+
declare class GcpPubSubQueue implements types_d_exports.JobQueue {
|
|
16
|
+
private readonly options;
|
|
17
|
+
private readonly handlers;
|
|
18
|
+
constructor(options: GcpPubSubQueueOptions);
|
|
19
|
+
enqueue<TPayload>(jobType: string, payload: TPayload, options?: types_d_exports.EnqueueOptions): Promise<types_d_exports.Job<TPayload>>;
|
|
20
|
+
register<TPayload, TResult = void>(jobType: string, handler: types_d_exports.JobHandler<TPayload, TResult>): void;
|
|
21
|
+
start(): void;
|
|
22
|
+
stop(): Promise<void>;
|
|
23
|
+
}
|
|
24
|
+
//#endregion
|
|
25
|
+
export { GcpPubSubQueue, GcpPubSubQueueOptions };
|
|
26
|
+
//# sourceMappingURL=gcp-pubsub.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"gcp-pubsub.d.ts","names":[],"sources":["../../src/queue/gcp-pubsub.ts"],"sourcesContent":[],"mappings":";;;UAUU,gBAAA;;IAAA,cAAA,CAAgB,OAAA,EAAA;MAMT,IAAA,EAJmB,MAInB;IAKJ,CAAA,CAAA,EATkC,OASlC,CAAA,MAAe,CAAA;EAGY,CAAA;;AAK3B,UAbI,qBAAA,CAaJ;EACI,MAAA,EAbP,gBAaO;EAAJ,SAAA,EAAA,MAAA;;AAqCW,cA9CX,cAAA,YAA0B,eAAA,CAAA,QA8Cf,CAAA;EAAU,iBAAA,OAAA;EAArB,iBAAA,QAAA;EASG,WAAA,CAAA,OAAA,EApDwB,qBAoDxB;EAvDuB,OAAA,CAAA,QAAA,CAAA,CAAA,OAAA,EAAA,MAAA,EAAA,OAAA,EAO1B,QAP0B,EAAA,OAAA,CAAA,EAQ1B,eAAA,CAAA,cAR0B,CAAA,EASlC,OATkC,CAS1B,eAAA,CAAA,GAT0B,CAStB,QATsB,CAAA,CAAA;EAAQ,QAAA,CAAA,QAAA,EAAA,UAAA,IAAA,CAAA,CAAA,OAAA,EAAA,MAAA,EAAA,OAAA,EA8ClC,eAAA,CAAA,UA9CkC,CA8CvB,QA9CuB,EA8Cb,OA9Ca,CAAA,CAAA,EAAA,IAAA;;UAuD/B"}
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
import { types_exports } from "./types.js";
|
|
2
|
+
import { randomUUID } from "node:crypto";
|
|
3
|
+
|
|
4
|
+
//#region src/queue/gcp-pubsub.ts
|
|
5
|
+
var GcpPubSubQueue = class {
|
|
6
|
+
handlers = /* @__PURE__ */ new Map();
|
|
7
|
+
constructor(options) {
|
|
8
|
+
this.options = options;
|
|
9
|
+
}
|
|
10
|
+
async enqueue(jobType, payload, options = {}) {
|
|
11
|
+
const now = /* @__PURE__ */ new Date();
|
|
12
|
+
await this.options.client.topic(this.options.topicName).publishMessage({ data: Buffer.from(JSON.stringify({
|
|
13
|
+
id: randomUUID(),
|
|
14
|
+
type: jobType,
|
|
15
|
+
payload
|
|
16
|
+
}), "utf-8") });
|
|
17
|
+
return {
|
|
18
|
+
id: randomUUID(),
|
|
19
|
+
type: jobType,
|
|
20
|
+
version: "1.0.0",
|
|
21
|
+
payload,
|
|
22
|
+
status: "pending",
|
|
23
|
+
priority: options.priority ?? 0,
|
|
24
|
+
attempts: 0,
|
|
25
|
+
maxRetries: options.maxRetries ?? types_exports.DEFAULT_RETRY_POLICY.maxRetries,
|
|
26
|
+
createdAt: now,
|
|
27
|
+
updatedAt: now,
|
|
28
|
+
scheduledAt: options.delaySeconds ? new Date(now.getTime() + options.delaySeconds * 1e3) : now,
|
|
29
|
+
dedupeKey: options.dedupeKey,
|
|
30
|
+
tenantId: options.tenantId,
|
|
31
|
+
userId: options.userId,
|
|
32
|
+
traceId: options.traceId,
|
|
33
|
+
metadata: options.metadata
|
|
34
|
+
};
|
|
35
|
+
}
|
|
36
|
+
register(jobType, handler) {
|
|
37
|
+
this.handlers.set(jobType, handler);
|
|
38
|
+
}
|
|
39
|
+
start() {}
|
|
40
|
+
async stop() {
|
|
41
|
+
this.handlers.clear();
|
|
42
|
+
}
|
|
43
|
+
};
|
|
44
|
+
|
|
45
|
+
//#endregion
|
|
46
|
+
export { GcpPubSubQueue };
|
|
47
|
+
//# sourceMappingURL=gcp-pubsub.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"gcp-pubsub.js","names":["DEFAULT_RETRY_POLICY"],"sources":["../../src/queue/gcp-pubsub.ts"],"sourcesContent":["import { randomUUID } from 'node:crypto';\n\nimport {\n DEFAULT_RETRY_POLICY,\n type EnqueueOptions,\n type Job,\n type JobHandler,\n type JobQueue,\n} from './types';\n\ninterface PubSubClientLike {\n topic(name: string): {\n publishMessage(message: { data: Buffer }): Promise<string>;\n };\n}\n\nexport interface GcpPubSubQueueOptions {\n client: PubSubClientLike;\n topicName: string;\n}\n\nexport class GcpPubSubQueue implements JobQueue {\n private readonly handlers = new Map<string, JobHandler>();\n\n constructor(private readonly options: GcpPubSubQueueOptions) {}\n\n async enqueue<TPayload>(\n jobType: string,\n payload: TPayload,\n options: EnqueueOptions = {}\n ): Promise<Job<TPayload>> {\n const now = new Date();\n await this.options.client.topic(this.options.topicName).publishMessage({\n data: Buffer.from(\n JSON.stringify({\n id: randomUUID(),\n type: jobType,\n payload,\n }),\n 'utf-8'\n ),\n });\n\n return {\n id: randomUUID(),\n type: jobType,\n version: '1.0.0',\n payload,\n status: 'pending',\n priority: options.priority ?? 0,\n attempts: 0,\n maxRetries: options.maxRetries ?? DEFAULT_RETRY_POLICY.maxRetries,\n createdAt: now,\n updatedAt: now,\n scheduledAt: options.delaySeconds\n ? new Date(now.getTime() + options.delaySeconds * 1000)\n : now,\n dedupeKey: options.dedupeKey,\n tenantId: options.tenantId,\n userId: options.userId,\n traceId: options.traceId,\n metadata: options.metadata,\n };\n }\n\n register<TPayload, TResult = void>(\n jobType: string,\n handler: JobHandler<TPayload, TResult>\n ): void {\n this.handlers.set(jobType, handler as JobHandler);\n }\n\n start(): void {\n // Message consumption handled externally via Pub/Sub subscription.\n }\n\n async stop(): Promise<void> {\n this.handlers.clear();\n }\n}\n"],"mappings":";;;;AAqBA,IAAa,iBAAb,MAAgD;CAC9C,AAAiB,2BAAW,IAAI,KAAyB;CAEzD,YAAY,AAAiB,SAAgC;EAAhC;;CAE7B,MAAM,QACJ,SACA,SACA,UAA0B,EAAE,EACJ;EACxB,MAAM,sBAAM,IAAI,MAAM;AACtB,QAAM,KAAK,QAAQ,OAAO,MAAM,KAAK,QAAQ,UAAU,CAAC,eAAe,EACrE,MAAM,OAAO,KACX,KAAK,UAAU;GACb,IAAI,YAAY;GAChB,MAAM;GACN;GACD,CAAC,EACF,QACD,EACF,CAAC;AAEF,SAAO;GACL,IAAI,YAAY;GAChB,MAAM;GACN,SAAS;GACT;GACA,QAAQ;GACR,UAAU,QAAQ,YAAY;GAC9B,UAAU;GACV,YAAY,QAAQ,cAAcA,mCAAqB;GACvD,WAAW;GACX,WAAW;GACX,aAAa,QAAQ,eACjB,IAAI,KAAK,IAAI,SAAS,GAAG,QAAQ,eAAe,IAAK,GACrD;GACJ,WAAW,QAAQ;GACnB,UAAU,QAAQ;GAClB,QAAQ,QAAQ;GAChB,SAAS,QAAQ;GACjB,UAAU,QAAQ;GACnB;;CAGH,SACE,SACA,SACM;AACN,OAAK,SAAS,IAAI,SAAS,QAAsB;;CAGnD,QAAc;CAId,MAAM,OAAsB;AAC1B,OAAK,SAAS,OAAO"}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import { __exportAll, __reExport } from "../_virtual/rolldown_runtime.js";
|
|
2
|
+
import { types_d_exports } from "./types.js";
|
|
3
|
+
import { MemoryJobQueue, MemoryQueueOptions } from "./memory-queue.js";
|
|
4
|
+
import { ScalewaySqsJobQueue, ScalewaySqsQueueConfig, ScalewaySqsQueueCredentials } from "./scaleway-sqs-queue.js";
|
|
5
|
+
import { GcpCloudTasksQueue, GcpCloudTasksQueueOptions } from "./gcp-cloud-tasks.js";
|
|
6
|
+
import { GcpPubSubQueue, GcpPubSubQueueOptions } from "./gcp-pubsub.js";
|
|
7
|
+
import { registerDefinedJob } from "./register-defined-job.js";
|
|
8
|
+
export * from "@contractspec/lib.contracts/jobs/queue";
|
|
9
|
+
|
|
10
|
+
//#region src/queue/index.d.ts
|
|
11
|
+
declare namespace index_d_exports {
|
|
12
|
+
export { GcpCloudTasksQueue, GcpCloudTasksQueueOptions, GcpPubSubQueue, GcpPubSubQueueOptions, MemoryJobQueue, MemoryQueueOptions, ScalewaySqsJobQueue, ScalewaySqsQueueConfig, ScalewaySqsQueueCredentials, registerDefinedJob };
|
|
13
|
+
}
|
|
14
|
+
//#endregion
|
|
15
|
+
export { GcpCloudTasksQueue, GcpCloudTasksQueueOptions, GcpPubSubQueue, GcpPubSubQueueOptions, MemoryJobQueue, MemoryQueueOptions, ScalewaySqsJobQueue, ScalewaySqsQueueConfig, ScalewaySqsQueueCredentials, index_d_exports, registerDefinedJob };
|
|
16
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","names":[],"sources":["../../src/queue/index.ts"],"sourcesContent":[],"mappings":""}
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
import { __exportAll, __reExport } from "../_virtual/rolldown_runtime.js";
|
|
2
|
+
import { types_exports } from "./types.js";
|
|
3
|
+
import { MemoryJobQueue } from "./memory-queue.js";
|
|
4
|
+
import { ScalewaySqsJobQueue } from "./scaleway-sqs-queue.js";
|
|
5
|
+
import { GcpCloudTasksQueue } from "./gcp-cloud-tasks.js";
|
|
6
|
+
import { GcpPubSubQueue } from "./gcp-pubsub.js";
|
|
7
|
+
import { registerDefinedJob } from "./register-defined-job.js";
|
|
8
|
+
|
|
9
|
+
export * from "@contractspec/lib.contracts/jobs/queue"
|
|
10
|
+
|
|
11
|
+
//#region src/queue/index.ts
|
|
12
|
+
var queue_exports = /* @__PURE__ */ __exportAll({
|
|
13
|
+
GcpCloudTasksQueue: () => GcpCloudTasksQueue,
|
|
14
|
+
GcpPubSubQueue: () => GcpPubSubQueue,
|
|
15
|
+
MemoryJobQueue: () => MemoryJobQueue,
|
|
16
|
+
ScalewaySqsJobQueue: () => ScalewaySqsJobQueue,
|
|
17
|
+
registerDefinedJob: () => registerDefinedJob
|
|
18
|
+
});
|
|
19
|
+
__reExport(queue_exports, types_exports);
|
|
20
|
+
|
|
21
|
+
//#endregion
|
|
22
|
+
export { GcpCloudTasksQueue, GcpPubSubQueue, MemoryJobQueue, ScalewaySqsJobQueue, queue_exports, registerDefinedJob };
|
|
23
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","names":[],"sources":["../../src/queue/index.ts"],"sourcesContent":["export * from './types';\nexport * from './memory-queue';\nexport * from './scaleway-sqs-queue';\nexport * from './gcp-cloud-tasks';\nexport * from './gcp-pubsub';\nexport * from './register-defined-job';\n"],"mappings":""}
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
import { types_d_exports } from "./types.js";
|
|
2
|
+
|
|
3
|
+
//#region src/queue/memory-queue.d.ts
|
|
4
|
+
interface MemoryQueueOptions {
|
|
5
|
+
/** Poll interval in milliseconds */
|
|
6
|
+
pollIntervalMs?: number;
|
|
7
|
+
/** Maximum concurrent jobs */
|
|
8
|
+
concurrency?: number;
|
|
9
|
+
/** Default retry policy */
|
|
10
|
+
retryPolicy?: types_d_exports.RetryPolicy;
|
|
11
|
+
}
|
|
12
|
+
/**
|
|
13
|
+
* In-memory job queue for development and testing.
|
|
14
|
+
*/
|
|
15
|
+
declare class MemoryJobQueue implements types_d_exports.JobQueue {
|
|
16
|
+
private readonly jobs;
|
|
17
|
+
private readonly handlers;
|
|
18
|
+
private timer?;
|
|
19
|
+
private activeCount;
|
|
20
|
+
private readonly pollIntervalMs;
|
|
21
|
+
private readonly concurrency;
|
|
22
|
+
private readonly retryPolicy;
|
|
23
|
+
constructor(options?: MemoryQueueOptions);
|
|
24
|
+
enqueue<TPayload>(jobType: string, payload: TPayload, options?: types_d_exports.EnqueueOptions): Promise<types_d_exports.Job<TPayload>>;
|
|
25
|
+
register<TPayload, TResult = void>(jobType: string, handler: types_d_exports.JobHandler<TPayload, TResult>): void;
|
|
26
|
+
start(): void;
|
|
27
|
+
stop(): Promise<void>;
|
|
28
|
+
getJob(jobId: string): Promise<types_d_exports.Job | null>;
|
|
29
|
+
cancelJob(jobId: string): Promise<boolean>;
|
|
30
|
+
getStats(): Promise<types_d_exports.QueueStats>;
|
|
31
|
+
private processNext;
|
|
32
|
+
}
|
|
33
|
+
//#endregion
|
|
34
|
+
export { MemoryJobQueue, MemoryQueueOptions };
|
|
35
|
+
//# sourceMappingURL=memory-queue.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"memory-queue.d.ts","names":[],"sources":["../../src/queue/memory-queue.ts"],"sourcesContent":[],"mappings":";;;UAWiB,kBAAA;;EAAA,cAAA,CAAA,EAAA,MAAkB;EAYtB;EASU,WAAA,CAAA,EAAA,MAAA;EAQV;EACA,WAAA,CAAA,EAxBG,eAAA,CAAA,WAwBH;;;;;AA8CqB,cAhErB,cAAA,YAA0B,eAAA,CAAA,QAgEL,CAAA;EAArB,iBAAA,IAAA;EAYG,iBAAA,QAAA;EAWuB,QAAA,KAAA;EAAR,QAAA,WAAA;EAIG,iBAAA,cAAA;EAUN,iBAAA,WAAA;EAAR,iBAAA,WAAA;EArGmB,WAAA,CAAA,OAAA,CAAA,EAShB,kBATgB;EAAQ,OAAA,CAAA,QAAA,CAAA,CAAA,OAAA,EAAA,MAAA,EAAA,OAAA,EAiBlC,QAjBkC,EAAA,OAAA,CAAA,EAkBlC,eAAA,CAAA,cAlBkC,CAAA,EAmB1C,OAnB0C,CAmBlC,eAAA,CAAA,GAnBkC,CAmB9B,QAnB8B,CAAA,CAAA;+DAgElC,eAAA,CAAA,WAAW,UAAU;;UAYlB;yBAWe,QAAQ,eAAA,CAAA;4BAIL;cAUd,QAAQ,eAAA,CAAA"}
|
|
@@ -0,0 +1,140 @@
|
|
|
1
|
+
import { types_exports } from "./types.js";
|
|
2
|
+
import { randomUUID } from "node:crypto";
|
|
3
|
+
|
|
4
|
+
//#region src/queue/memory-queue.ts
|
|
5
|
+
/**
|
|
6
|
+
* In-memory job queue for development and testing.
|
|
7
|
+
*/
|
|
8
|
+
var MemoryJobQueue = class {
|
|
9
|
+
jobs = /* @__PURE__ */ new Map();
|
|
10
|
+
handlers = /* @__PURE__ */ new Map();
|
|
11
|
+
timer;
|
|
12
|
+
activeCount = 0;
|
|
13
|
+
pollIntervalMs;
|
|
14
|
+
concurrency;
|
|
15
|
+
retryPolicy;
|
|
16
|
+
constructor(options = {}) {
|
|
17
|
+
this.pollIntervalMs = options.pollIntervalMs ?? 200;
|
|
18
|
+
this.concurrency = options.concurrency ?? 5;
|
|
19
|
+
this.retryPolicy = options.retryPolicy ?? types_exports.DEFAULT_RETRY_POLICY;
|
|
20
|
+
}
|
|
21
|
+
async enqueue(jobType, payload, options = {}) {
|
|
22
|
+
if (options.dedupeKey) {
|
|
23
|
+
const existing = Array.from(this.jobs.values()).find((j) => j.dedupeKey === options.dedupeKey && j.status === "pending");
|
|
24
|
+
if (existing) return existing;
|
|
25
|
+
}
|
|
26
|
+
const now = /* @__PURE__ */ new Date();
|
|
27
|
+
const scheduledAt = options.delaySeconds ? new Date(now.getTime() + options.delaySeconds * 1e3) : now;
|
|
28
|
+
const job = {
|
|
29
|
+
id: randomUUID(),
|
|
30
|
+
type: jobType,
|
|
31
|
+
version: "1.0.0",
|
|
32
|
+
payload,
|
|
33
|
+
status: "pending",
|
|
34
|
+
priority: options.priority ?? 0,
|
|
35
|
+
attempts: 0,
|
|
36
|
+
maxRetries: options.maxRetries ?? this.retryPolicy.maxRetries,
|
|
37
|
+
createdAt: now,
|
|
38
|
+
updatedAt: now,
|
|
39
|
+
scheduledAt,
|
|
40
|
+
dedupeKey: options.dedupeKey,
|
|
41
|
+
tenantId: options.tenantId,
|
|
42
|
+
userId: options.userId,
|
|
43
|
+
traceId: options.traceId,
|
|
44
|
+
metadata: options.metadata
|
|
45
|
+
};
|
|
46
|
+
if (options.timeoutMs) job.timeoutAt = new Date(now.getTime() + options.timeoutMs);
|
|
47
|
+
this.jobs.set(job.id, job);
|
|
48
|
+
return job;
|
|
49
|
+
}
|
|
50
|
+
register(jobType, handler) {
|
|
51
|
+
this.handlers.set(jobType, handler);
|
|
52
|
+
}
|
|
53
|
+
start() {
|
|
54
|
+
if (this.timer) return;
|
|
55
|
+
this.timer = setInterval(() => {
|
|
56
|
+
this.processNext();
|
|
57
|
+
}, this.pollIntervalMs);
|
|
58
|
+
}
|
|
59
|
+
async stop() {
|
|
60
|
+
if (this.timer) {
|
|
61
|
+
clearInterval(this.timer);
|
|
62
|
+
this.timer = void 0;
|
|
63
|
+
}
|
|
64
|
+
while (this.activeCount > 0) await new Promise((resolve) => setTimeout(resolve, 50));
|
|
65
|
+
}
|
|
66
|
+
async getJob(jobId) {
|
|
67
|
+
return this.jobs.get(jobId) ?? null;
|
|
68
|
+
}
|
|
69
|
+
async cancelJob(jobId) {
|
|
70
|
+
const job = this.jobs.get(jobId);
|
|
71
|
+
if (!job || job.status !== "pending") return false;
|
|
72
|
+
job.status = "cancelled";
|
|
73
|
+
job.updatedAt = /* @__PURE__ */ new Date();
|
|
74
|
+
return true;
|
|
75
|
+
}
|
|
76
|
+
async getStats() {
|
|
77
|
+
const stats = {
|
|
78
|
+
pending: 0,
|
|
79
|
+
running: 0,
|
|
80
|
+
completed: 0,
|
|
81
|
+
failed: 0,
|
|
82
|
+
deadLetter: 0
|
|
83
|
+
};
|
|
84
|
+
for (const job of this.jobs.values()) switch (job.status) {
|
|
85
|
+
case "pending":
|
|
86
|
+
stats.pending++;
|
|
87
|
+
break;
|
|
88
|
+
case "running":
|
|
89
|
+
stats.running++;
|
|
90
|
+
break;
|
|
91
|
+
case "completed":
|
|
92
|
+
stats.completed++;
|
|
93
|
+
break;
|
|
94
|
+
case "failed":
|
|
95
|
+
stats.failed++;
|
|
96
|
+
break;
|
|
97
|
+
case "dead_letter":
|
|
98
|
+
stats.deadLetter++;
|
|
99
|
+
break;
|
|
100
|
+
}
|
|
101
|
+
return stats;
|
|
102
|
+
}
|
|
103
|
+
async processNext() {
|
|
104
|
+
if (this.activeCount >= this.concurrency) return;
|
|
105
|
+
const now = /* @__PURE__ */ new Date();
|
|
106
|
+
const job = Array.from(this.jobs.values()).filter((j) => j.status === "pending" && (!j.scheduledAt || j.scheduledAt <= now)).sort((a, b) => {
|
|
107
|
+
if (a.priority !== b.priority) return b.priority - a.priority;
|
|
108
|
+
return (a.scheduledAt?.getTime() ?? 0) - (b.scheduledAt?.getTime() ?? 0);
|
|
109
|
+
})[0];
|
|
110
|
+
if (!job) return;
|
|
111
|
+
const handler = this.handlers.get(job.type);
|
|
112
|
+
if (!handler) return;
|
|
113
|
+
this.activeCount++;
|
|
114
|
+
job.status = "running";
|
|
115
|
+
job.startedAt = /* @__PURE__ */ new Date();
|
|
116
|
+
job.updatedAt = /* @__PURE__ */ new Date();
|
|
117
|
+
job.attempts += 1;
|
|
118
|
+
try {
|
|
119
|
+
const result = await handler(job);
|
|
120
|
+
job.status = "completed";
|
|
121
|
+
job.completedAt = /* @__PURE__ */ new Date();
|
|
122
|
+
job.result = result;
|
|
123
|
+
} catch (error) {
|
|
124
|
+
job.lastError = error instanceof Error ? error.message : "Unknown error";
|
|
125
|
+
if (job.attempts >= job.maxRetries) job.status = "dead_letter";
|
|
126
|
+
else {
|
|
127
|
+
const backoff = (0, types_exports.calculateBackoff)(job.attempts, this.retryPolicy);
|
|
128
|
+
job.status = "pending";
|
|
129
|
+
job.scheduledAt = new Date(Date.now() + backoff);
|
|
130
|
+
}
|
|
131
|
+
} finally {
|
|
132
|
+
job.updatedAt = /* @__PURE__ */ new Date();
|
|
133
|
+
this.activeCount--;
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
};
|
|
137
|
+
|
|
138
|
+
//#endregion
|
|
139
|
+
export { MemoryJobQueue };
|
|
140
|
+
//# sourceMappingURL=memory-queue.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"memory-queue.js","names":["DEFAULT_RETRY_POLICY"],"sources":["../../src/queue/memory-queue.ts"],"sourcesContent":["import { randomUUID } from 'node:crypto';\nimport type {\n Job,\n JobHandler,\n JobQueue,\n EnqueueOptions,\n QueueStats,\n RetryPolicy,\n} from './types';\nimport { calculateBackoff, DEFAULT_RETRY_POLICY } from './types';\n\nexport interface MemoryQueueOptions {\n /** Poll interval in milliseconds */\n pollIntervalMs?: number;\n /** Maximum concurrent jobs */\n concurrency?: number;\n /** Default retry policy */\n retryPolicy?: RetryPolicy;\n}\n\n/**\n * In-memory job queue for development and testing.\n */\nexport class MemoryJobQueue implements JobQueue {\n private readonly jobs = new Map<string, Job>();\n private readonly handlers = new Map<string, JobHandler>();\n private timer?: ReturnType<typeof setInterval>;\n private activeCount = 0;\n private readonly pollIntervalMs: number;\n private readonly concurrency: number;\n private readonly retryPolicy: RetryPolicy;\n\n constructor(options: MemoryQueueOptions = {}) {\n this.pollIntervalMs = options.pollIntervalMs ?? 200;\n this.concurrency = options.concurrency ?? 5;\n this.retryPolicy = options.retryPolicy ?? DEFAULT_RETRY_POLICY;\n }\n\n async enqueue<TPayload>(\n jobType: string,\n payload: TPayload,\n options: EnqueueOptions = {}\n ): Promise<Job<TPayload>> {\n // Check for duplicate\n if (options.dedupeKey) {\n const existing = Array.from(this.jobs.values()).find(\n (j) => j.dedupeKey === options.dedupeKey && j.status === 'pending'\n );\n if (existing) {\n return existing as Job<TPayload>;\n }\n }\n\n const now = new Date();\n const scheduledAt = options.delaySeconds\n ? new Date(now.getTime() + options.delaySeconds * 1000)\n : now;\n\n const job: Job<TPayload> = {\n id: randomUUID(),\n type: jobType,\n version: '1.0.0',\n payload,\n status: 'pending',\n priority: options.priority ?? 0,\n attempts: 0,\n maxRetries: options.maxRetries ?? this.retryPolicy.maxRetries,\n createdAt: now,\n updatedAt: now,\n scheduledAt,\n dedupeKey: options.dedupeKey,\n tenantId: options.tenantId,\n userId: options.userId,\n traceId: options.traceId,\n metadata: options.metadata,\n };\n\n if (options.timeoutMs) {\n job.timeoutAt = new Date(now.getTime() + options.timeoutMs);\n }\n\n this.jobs.set(job.id, job);\n return job;\n }\n\n register<TPayload, TResult = void>(\n jobType: string,\n handler: JobHandler<TPayload, TResult>\n ): void {\n this.handlers.set(jobType, handler as JobHandler);\n }\n\n start(): void {\n if (this.timer) return;\n this.timer = setInterval(() => {\n void this.processNext();\n }, this.pollIntervalMs);\n }\n\n async stop(): Promise<void> {\n if (this.timer) {\n clearInterval(this.timer);\n this.timer = undefined;\n }\n // Wait for active jobs to complete\n while (this.activeCount > 0) {\n await new Promise((resolve) => setTimeout(resolve, 50));\n }\n }\n\n async getJob(jobId: string): Promise<Job | null> {\n return this.jobs.get(jobId) ?? null;\n }\n\n async cancelJob(jobId: string): Promise<boolean> {\n const job = this.jobs.get(jobId);\n if (!job || job.status !== 'pending') {\n return false;\n }\n job.status = 'cancelled';\n job.updatedAt = new Date();\n return true;\n }\n\n async getStats(): Promise<QueueStats> {\n const stats: QueueStats = {\n pending: 0,\n running: 0,\n completed: 0,\n failed: 0,\n deadLetter: 0,\n };\n\n for (const job of this.jobs.values()) {\n switch (job.status) {\n case 'pending':\n stats.pending++;\n break;\n case 'running':\n stats.running++;\n break;\n case 'completed':\n stats.completed++;\n break;\n case 'failed':\n stats.failed++;\n break;\n case 'dead_letter':\n stats.deadLetter++;\n break;\n }\n }\n\n return stats;\n }\n\n private async processNext(): Promise<void> {\n if (this.activeCount >= this.concurrency) return;\n\n const now = new Date();\n const pendingJobs = Array.from(this.jobs.values())\n .filter(\n (j) =>\n j.status === 'pending' && (!j.scheduledAt || j.scheduledAt <= now)\n )\n .sort((a, b) => {\n // Higher priority first\n if (a.priority !== b.priority) {\n return b.priority - a.priority;\n }\n // Earlier scheduled first\n return (\n (a.scheduledAt?.getTime() ?? 0) - (b.scheduledAt?.getTime() ?? 0)\n );\n });\n\n const job = pendingJobs[0];\n if (!job) return;\n\n const handler = this.handlers.get(job.type);\n if (!handler) return;\n\n this.activeCount++;\n job.status = 'running';\n job.startedAt = new Date();\n job.updatedAt = new Date();\n job.attempts += 1;\n\n try {\n const result = await handler(job);\n job.status = 'completed';\n job.completedAt = new Date();\n job.result = result;\n } catch (error) {\n const errorMessage =\n error instanceof Error ? error.message : 'Unknown error';\n job.lastError = errorMessage;\n\n if (job.attempts >= job.maxRetries) {\n job.status = 'dead_letter';\n } else {\n // Schedule retry with backoff\n const backoff = calculateBackoff(job.attempts, this.retryPolicy);\n job.status = 'pending';\n job.scheduledAt = new Date(Date.now() + backoff);\n }\n } finally {\n job.updatedAt = new Date();\n this.activeCount--;\n }\n }\n}\n"],"mappings":";;;;;;;AAuBA,IAAa,iBAAb,MAAgD;CAC9C,AAAiB,uBAAO,IAAI,KAAkB;CAC9C,AAAiB,2BAAW,IAAI,KAAyB;CACzD,AAAQ;CACR,AAAQ,cAAc;CACtB,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CAEjB,YAAY,UAA8B,EAAE,EAAE;AAC5C,OAAK,iBAAiB,QAAQ,kBAAkB;AAChD,OAAK,cAAc,QAAQ,eAAe;AAC1C,OAAK,cAAc,QAAQ,eAAeA;;CAG5C,MAAM,QACJ,SACA,SACA,UAA0B,EAAE,EACJ;AAExB,MAAI,QAAQ,WAAW;GACrB,MAAM,WAAW,MAAM,KAAK,KAAK,KAAK,QAAQ,CAAC,CAAC,MAC7C,MAAM,EAAE,cAAc,QAAQ,aAAa,EAAE,WAAW,UAC1D;AACD,OAAI,SACF,QAAO;;EAIX,MAAM,sBAAM,IAAI,MAAM;EACtB,MAAM,cAAc,QAAQ,eACxB,IAAI,KAAK,IAAI,SAAS,GAAG,QAAQ,eAAe,IAAK,GACrD;EAEJ,MAAM,MAAqB;GACzB,IAAI,YAAY;GAChB,MAAM;GACN,SAAS;GACT;GACA,QAAQ;GACR,UAAU,QAAQ,YAAY;GAC9B,UAAU;GACV,YAAY,QAAQ,cAAc,KAAK,YAAY;GACnD,WAAW;GACX,WAAW;GACX;GACA,WAAW,QAAQ;GACnB,UAAU,QAAQ;GAClB,QAAQ,QAAQ;GAChB,SAAS,QAAQ;GACjB,UAAU,QAAQ;GACnB;AAED,MAAI,QAAQ,UACV,KAAI,YAAY,IAAI,KAAK,IAAI,SAAS,GAAG,QAAQ,UAAU;AAG7D,OAAK,KAAK,IAAI,IAAI,IAAI,IAAI;AAC1B,SAAO;;CAGT,SACE,SACA,SACM;AACN,OAAK,SAAS,IAAI,SAAS,QAAsB;;CAGnD,QAAc;AACZ,MAAI,KAAK,MAAO;AAChB,OAAK,QAAQ,kBAAkB;AAC7B,GAAK,KAAK,aAAa;KACtB,KAAK,eAAe;;CAGzB,MAAM,OAAsB;AAC1B,MAAI,KAAK,OAAO;AACd,iBAAc,KAAK,MAAM;AACzB,QAAK,QAAQ;;AAGf,SAAO,KAAK,cAAc,EACxB,OAAM,IAAI,SAAS,YAAY,WAAW,SAAS,GAAG,CAAC;;CAI3D,MAAM,OAAO,OAAoC;AAC/C,SAAO,KAAK,KAAK,IAAI,MAAM,IAAI;;CAGjC,MAAM,UAAU,OAAiC;EAC/C,MAAM,MAAM,KAAK,KAAK,IAAI,MAAM;AAChC,MAAI,CAAC,OAAO,IAAI,WAAW,UACzB,QAAO;AAET,MAAI,SAAS;AACb,MAAI,4BAAY,IAAI,MAAM;AAC1B,SAAO;;CAGT,MAAM,WAAgC;EACpC,MAAM,QAAoB;GACxB,SAAS;GACT,SAAS;GACT,WAAW;GACX,QAAQ;GACR,YAAY;GACb;AAED,OAAK,MAAM,OAAO,KAAK,KAAK,QAAQ,CAClC,SAAQ,IAAI,QAAZ;GACE,KAAK;AACH,UAAM;AACN;GACF,KAAK;AACH,UAAM;AACN;GACF,KAAK;AACH,UAAM;AACN;GACF,KAAK;AACH,UAAM;AACN;GACF,KAAK;AACH,UAAM;AACN;;AAIN,SAAO;;CAGT,MAAc,cAA6B;AACzC,MAAI,KAAK,eAAe,KAAK,YAAa;EAE1C,MAAM,sBAAM,IAAI,MAAM;EAiBtB,MAAM,MAhBc,MAAM,KAAK,KAAK,KAAK,QAAQ,CAAC,CAC/C,QACE,MACC,EAAE,WAAW,cAAc,CAAC,EAAE,eAAe,EAAE,eAAe,KACjE,CACA,MAAM,GAAG,MAAM;AAEd,OAAI,EAAE,aAAa,EAAE,SACnB,QAAO,EAAE,WAAW,EAAE;AAGxB,WACG,EAAE,aAAa,SAAS,IAAI,MAAM,EAAE,aAAa,SAAS,IAAI;IAEjE,CAEoB;AACxB,MAAI,CAAC,IAAK;EAEV,MAAM,UAAU,KAAK,SAAS,IAAI,IAAI,KAAK;AAC3C,MAAI,CAAC,QAAS;AAEd,OAAK;AACL,MAAI,SAAS;AACb,MAAI,4BAAY,IAAI,MAAM;AAC1B,MAAI,4BAAY,IAAI,MAAM;AAC1B,MAAI,YAAY;AAEhB,MAAI;GACF,MAAM,SAAS,MAAM,QAAQ,IAAI;AACjC,OAAI,SAAS;AACb,OAAI,8BAAc,IAAI,MAAM;AAC5B,OAAI,SAAS;WACN,OAAO;AAGd,OAAI,YADF,iBAAiB,QAAQ,MAAM,UAAU;AAG3C,OAAI,IAAI,YAAY,IAAI,WACtB,KAAI,SAAS;QACR;IAEL,MAAM,8CAA2B,IAAI,UAAU,KAAK,YAAY;AAChE,QAAI,SAAS;AACb,QAAI,cAAc,IAAI,KAAK,KAAK,KAAK,GAAG,QAAQ;;YAE1C;AACR,OAAI,4BAAY,IAAI,MAAM;AAC1B,QAAK"}
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
import { JobQueue } from "@contractspec/lib.contracts/jobs/queue";
|
|
2
|
+
import { DefinedJob } from "@contractspec/lib.contracts/jobs/define-job";
|
|
3
|
+
|
|
4
|
+
//#region src/queue/register-defined-job.d.ts
|
|
5
|
+
declare function registerDefinedJob<TPayload>(queue: JobQueue, def: DefinedJob<TPayload>): void;
|
|
6
|
+
//#endregion
|
|
7
|
+
export { registerDefinedJob };
|
|
8
|
+
//# sourceMappingURL=register-defined-job.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"register-defined-job.d.ts","names":[],"sources":["../../src/queue/register-defined-job.ts"],"sourcesContent":[],"mappings":";;;;iBAOgB,oCACP,eACF,WAAW"}
|