@falcondev-oss/workflow 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +20 -0
- package/README.md +65 -0
- package/dist/index.d.mts +96 -0
- package/dist/index.mjs +243 -0
- package/package.json +64 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
The MIT License (MIT)
|
|
2
|
+
Copyright (c) 2019 GitHub
|
|
3
|
+
|
|
4
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
5
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
6
|
+
in the Software without restriction, including without limitation the rights
|
|
7
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
8
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
9
|
+
furnished to do so, subject to the following conditions:
|
|
10
|
+
|
|
11
|
+
The above copyright notice and this permission notice shall be included in all
|
|
12
|
+
copies or substantial portions of the Software.
|
|
13
|
+
|
|
14
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
15
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
16
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
17
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
18
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
19
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
20
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
# @falcondev-oss/workflow
|
|
2
|
+
|
|
3
|
+
Simple type-safe queue worker with durable execution based on BullMQ.
|
|
4
|
+
|
|
5
|
+
## Installation
|
|
6
|
+
|
|
7
|
+
```bash
|
|
8
|
+
npm install @falcondev-oss/workflow
|
|
9
|
+
```
|
|
10
|
+
|
|
11
|
+
## Usage
|
|
12
|
+
|
|
13
|
+
```ts
|
|
14
|
+
const workflow = new Workflow({
|
|
15
|
+
id: 'example-workflow',
|
|
16
|
+
input: z.object({
|
|
17
|
+
timezone: z.string().default('UTC'),
|
|
18
|
+
name: z.string(),
|
|
19
|
+
}),
|
|
20
|
+
async run({ input, step }) {
|
|
21
|
+
await step.do('send welcome', () => {
|
|
22
|
+
console.log(`Welcome, ${input.name}! Timezone: ${input.timezone}`)
|
|
23
|
+
})
|
|
24
|
+
|
|
25
|
+
await step.wait('wait a lil', 60_000)
|
|
26
|
+
|
|
27
|
+
const isEngaged = await step.do('check engagement', () => {
|
|
28
|
+
return Math.random() > 0.5
|
|
29
|
+
})
|
|
30
|
+
|
|
31
|
+
if (!isEngaged) return { engagementLevel: 'low' }
|
|
32
|
+
|
|
33
|
+
await step.do('send tips', () => {
|
|
34
|
+
console.log(`Here are some tips to get started, ${input.name}!`)
|
|
35
|
+
})
|
|
36
|
+
|
|
37
|
+
await step.wait('wait feedback', 3000)
|
|
38
|
+
|
|
39
|
+
await step.do('send survey', () => {
|
|
40
|
+
console.log(`Hi ${input.name}, please take our survey!`)
|
|
41
|
+
})
|
|
42
|
+
|
|
43
|
+
return {
|
|
44
|
+
engagementLevel: 'high',
|
|
45
|
+
}
|
|
46
|
+
},
|
|
47
|
+
})
|
|
48
|
+
|
|
49
|
+
// Start worker
|
|
50
|
+
await workflow.work()
|
|
51
|
+
|
|
52
|
+
// Run workflow
|
|
53
|
+
const job = await workflow.run({
|
|
54
|
+
name: 'John Doe',
|
|
55
|
+
timezone: 'America/New_York',
|
|
56
|
+
})
|
|
57
|
+
|
|
58
|
+
// Wait for completion
|
|
59
|
+
const result = await job.wait()
|
|
60
|
+
console.log(result.engagementLevel)
|
|
61
|
+
```
|
|
62
|
+
|
|
63
|
+
## Inspiration
|
|
64
|
+
|
|
65
|
+
- https://x.com/imsh4yy/status/1984073526605967785?s=46
|
package/dist/index.d.mts
ADDED
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
import { ConnectionOptions, Job, JobsOptions, Queue, QueueEvents, QueueEventsOptions, QueueOptions, UnrecoverableError, WorkerOptions } from "bullmq";
|
|
2
|
+
import "@antfu/utils";
|
|
3
|
+
import IORedis from "ioredis";
|
|
4
|
+
import { SuperJSONResult } from "superjson";
|
|
5
|
+
import { Options } from "p-retry";
|
|
6
|
+
import { StandardSchemaV1 } from "@standard-schema/spec";
|
|
7
|
+
import { SetOptional, Tagged } from "type-fest";
|
|
8
|
+
|
|
9
|
+
//#region src/errors.d.ts
|
|
10
|
+
declare class WorkflowInputError extends UnrecoverableError {
|
|
11
|
+
issues: readonly StandardSchemaV1.Issue[];
|
|
12
|
+
constructor(message: string, issues: readonly StandardSchemaV1.Issue[]);
|
|
13
|
+
}
|
|
14
|
+
//#endregion
|
|
15
|
+
//#region src/settings.d.ts
|
|
16
|
+
declare const Settings: {
|
|
17
|
+
defaultPrefix: string;
|
|
18
|
+
defaultConnection: IORedis | undefined;
|
|
19
|
+
};
|
|
20
|
+
//#endregion
|
|
21
|
+
//#region src/serializer.d.ts
|
|
22
|
+
type Serialized<T> = Tagged<SuperJSONResult, 'data', T>;
|
|
23
|
+
//#endregion
|
|
24
|
+
//#region src/step.d.ts
|
|
25
|
+
type WorkflowStepData = {
|
|
26
|
+
type: 'do';
|
|
27
|
+
result?: unknown;
|
|
28
|
+
attempt: number;
|
|
29
|
+
} | {
|
|
30
|
+
type: 'wait';
|
|
31
|
+
durationMs: number;
|
|
32
|
+
startedAt: number;
|
|
33
|
+
};
|
|
34
|
+
declare class WorkflowStep {
|
|
35
|
+
private workflowId;
|
|
36
|
+
private queue;
|
|
37
|
+
private workflowJobId;
|
|
38
|
+
constructor(opts: {
|
|
39
|
+
queue: WorkflowQueueInternal<any, any>;
|
|
40
|
+
workflowJobId: string;
|
|
41
|
+
workflowId: string;
|
|
42
|
+
});
|
|
43
|
+
do<R>(name: string, run: () => R, options?: WorkflowStepOptions): Promise<R>;
|
|
44
|
+
wait(name: string, durationMs: number): Promise<void>;
|
|
45
|
+
waitUntil(name: string, date: Date): Promise<void>;
|
|
46
|
+
private getStepData;
|
|
47
|
+
private updateStepData;
|
|
48
|
+
private getWorkflowJob;
|
|
49
|
+
}
|
|
50
|
+
interface WorkflowStepOptions {
|
|
51
|
+
retry?: Options;
|
|
52
|
+
}
|
|
53
|
+
//#endregion
|
|
54
|
+
//#region src/types.d.ts
|
|
55
|
+
type WorkflowJobInternal<Input, Output> = Job<Serialized<{
|
|
56
|
+
input: Input;
|
|
57
|
+
stepData: Record<string, WorkflowStepData>;
|
|
58
|
+
}>, Serialized<Output>, string>;
|
|
59
|
+
type WorkflowQueueInternal<Input, Output> = Queue<WorkflowJobInternal<Input, Output>>;
|
|
60
|
+
//#endregion
|
|
61
|
+
//#region src/job.d.ts
|
|
62
|
+
declare class WorkflowJob<Output> {
|
|
63
|
+
private job;
|
|
64
|
+
private queueEvents;
|
|
65
|
+
constructor(opts: {
|
|
66
|
+
job: WorkflowJobInternal<unknown, Output>;
|
|
67
|
+
queueEvents: QueueEvents;
|
|
68
|
+
});
|
|
69
|
+
wait(timeoutMs?: number): Promise<Output>;
|
|
70
|
+
}
|
|
71
|
+
//#endregion
|
|
72
|
+
//#region src/workflow.d.ts
|
|
73
|
+
interface WorkflowOptions<RunInput, Input, Output> {
|
|
74
|
+
id: string;
|
|
75
|
+
input: StandardSchemaV1<RunInput, Input>;
|
|
76
|
+
run: (context: WorkflowRunContext<Input>) => Promise<Output>;
|
|
77
|
+
queueOptions?: SetOptional<QueueOptions, 'connection'>;
|
|
78
|
+
queueEventsOptions?: SetOptional<QueueEventsOptions, 'connection'>;
|
|
79
|
+
connection?: ConnectionOptions;
|
|
80
|
+
}
|
|
81
|
+
declare class Workflow<RunInput, Input, Output> {
|
|
82
|
+
private opts;
|
|
83
|
+
private queue?;
|
|
84
|
+
private queueEvents?;
|
|
85
|
+
constructor(opts: WorkflowOptions<RunInput, Input, Output>);
|
|
86
|
+
work(opts?: Omit<SetOptional<WorkerOptions, 'connection'>, 'autorun'>): Promise<this>;
|
|
87
|
+
run(input: RunInput, opts?: JobsOptions): Promise<WorkflowJob<Output>>;
|
|
88
|
+
private getOrCreateQueue;
|
|
89
|
+
private getOrCreateQueueEvents;
|
|
90
|
+
}
|
|
91
|
+
interface WorkflowRunContext<Input> {
|
|
92
|
+
input: Input;
|
|
93
|
+
step: WorkflowStep;
|
|
94
|
+
}
|
|
95
|
+
//#endregion
|
|
96
|
+
export { Settings, Workflow, WorkflowInputError, WorkflowOptions, WorkflowRunContext };
|
package/dist/index.mjs
ADDED
|
@@ -0,0 +1,243 @@
|
|
|
1
|
+
import { Queue, QueueEvents, UnrecoverableError, Worker } from "bullmq";
|
|
2
|
+
import { createSingletonPromise } from "@antfu/utils";
|
|
3
|
+
import IORedis from "ioredis";
|
|
4
|
+
import { deserialize, serialize } from "superjson";
|
|
5
|
+
import { setTimeout } from "node:timers/promises";
|
|
6
|
+
import pRetry from "p-retry";
|
|
7
|
+
import { SpanStatusCode, trace } from "@opentelemetry/api";
|
|
8
|
+
|
|
9
|
+
//#region src/errors.ts
|
|
10
|
+
var WorkflowInputError = class extends UnrecoverableError {
|
|
11
|
+
issues;
|
|
12
|
+
constructor(message, issues) {
|
|
13
|
+
super(message);
|
|
14
|
+
this.issues = issues;
|
|
15
|
+
}
|
|
16
|
+
};
|
|
17
|
+
|
|
18
|
+
//#endregion
|
|
19
|
+
//#region src/settings.ts
|
|
20
|
+
const Settings = {
|
|
21
|
+
defaultPrefix: "falcondev-oss-workflow",
|
|
22
|
+
defaultConnection: void 0
|
|
23
|
+
};
|
|
24
|
+
const defaultRedisConnection = createSingletonPromise(async () => {
|
|
25
|
+
if (Settings.defaultConnection) return Settings.defaultConnection;
|
|
26
|
+
const redis = new IORedis({
|
|
27
|
+
lazyConnect: true,
|
|
28
|
+
maxRetriesPerRequest: null
|
|
29
|
+
});
|
|
30
|
+
await redis.connect();
|
|
31
|
+
return redis;
|
|
32
|
+
});
|
|
33
|
+
|
|
34
|
+
//#endregion
|
|
35
|
+
//#region src/serializer.ts
|
|
36
|
+
function serialize$1(data) {
|
|
37
|
+
return serialize(data);
|
|
38
|
+
}
|
|
39
|
+
function deserialize$1(data) {
|
|
40
|
+
return deserialize(data);
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
//#endregion
|
|
44
|
+
//#region src/job.ts
|
|
45
|
+
var WorkflowJob = class {
|
|
46
|
+
job;
|
|
47
|
+
queueEvents;
|
|
48
|
+
constructor(opts) {
|
|
49
|
+
this.job = opts.job;
|
|
50
|
+
this.queueEvents = opts.queueEvents;
|
|
51
|
+
}
|
|
52
|
+
async wait(timeoutMs) {
|
|
53
|
+
return deserialize$1(await this.job.waitUntilFinished(this.queueEvents, timeoutMs));
|
|
54
|
+
}
|
|
55
|
+
};
|
|
56
|
+
|
|
57
|
+
//#endregion
|
|
58
|
+
//#region src/tracer.ts
|
|
59
|
+
function getTracer() {
|
|
60
|
+
return trace.getTracer("falcondev-oss-workflow");
|
|
61
|
+
}
|
|
62
|
+
async function runWithTracing(spanName, attributes, fn) {
|
|
63
|
+
return getTracer().startActiveSpan(spanName, async (span) => {
|
|
64
|
+
try {
|
|
65
|
+
span.setAttributes(attributes);
|
|
66
|
+
const result = await fn();
|
|
67
|
+
span.setStatus({ code: SpanStatusCode.OK });
|
|
68
|
+
return result;
|
|
69
|
+
} catch (err_) {
|
|
70
|
+
const err = err_;
|
|
71
|
+
span.recordException(err);
|
|
72
|
+
span.setStatus({
|
|
73
|
+
code: SpanStatusCode.ERROR,
|
|
74
|
+
message: err.message
|
|
75
|
+
});
|
|
76
|
+
throw err_;
|
|
77
|
+
} finally {
|
|
78
|
+
span.end();
|
|
79
|
+
}
|
|
80
|
+
});
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
//#endregion
|
|
84
|
+
//#region src/step.ts
|
|
85
|
+
var WorkflowStep = class {
|
|
86
|
+
workflowId;
|
|
87
|
+
queue;
|
|
88
|
+
workflowJobId;
|
|
89
|
+
constructor(opts) {
|
|
90
|
+
this.queue = opts.queue;
|
|
91
|
+
this.workflowJobId = opts.workflowJobId;
|
|
92
|
+
this.workflowId = opts.workflowId;
|
|
93
|
+
}
|
|
94
|
+
async do(name, run, options) {
|
|
95
|
+
const stepData = await this.getStepData("do", "name");
|
|
96
|
+
if (stepData && "result" in stepData) return stepData.result;
|
|
97
|
+
const initialAttempt = stepData?.attempt ?? 0;
|
|
98
|
+
await this.updateStepData(name, {
|
|
99
|
+
type: "do",
|
|
100
|
+
attempt: initialAttempt
|
|
101
|
+
});
|
|
102
|
+
return pRetry(async (attempt) => {
|
|
103
|
+
const result = await runWithTracing(`step:${name}`, {
|
|
104
|
+
"workflow.id": this.workflowId,
|
|
105
|
+
"workflow.job_id": this.workflowJobId,
|
|
106
|
+
"workflow.step_name": name,
|
|
107
|
+
"workflow.step.attempt": attempt
|
|
108
|
+
}, run);
|
|
109
|
+
await this.updateStepData(name, {
|
|
110
|
+
type: "do",
|
|
111
|
+
result,
|
|
112
|
+
attempt: initialAttempt + attempt
|
|
113
|
+
});
|
|
114
|
+
return result;
|
|
115
|
+
}, {
|
|
116
|
+
...options?.retry,
|
|
117
|
+
retries: (options?.retry?.retries ?? 0) - initialAttempt,
|
|
118
|
+
onFailedAttempt: async (context) => {
|
|
119
|
+
await this.updateStepData(name, {
|
|
120
|
+
type: "do",
|
|
121
|
+
attempt: initialAttempt + context.attemptNumber
|
|
122
|
+
});
|
|
123
|
+
return options?.retry?.onFailedAttempt?.(context);
|
|
124
|
+
}
|
|
125
|
+
});
|
|
126
|
+
}
|
|
127
|
+
async wait(name, durationMs) {
|
|
128
|
+
const job = await this.getWorkflowJob();
|
|
129
|
+
const existingStepData = await this.getStepData("wait", name);
|
|
130
|
+
const now = Date.now();
|
|
131
|
+
const stepData = existingStepData ?? {
|
|
132
|
+
type: "wait",
|
|
133
|
+
durationMs,
|
|
134
|
+
startedAt: now
|
|
135
|
+
};
|
|
136
|
+
await this.updateStepData(name, stepData);
|
|
137
|
+
await runWithTracing(`step:${name}`, {
|
|
138
|
+
"workflow.id": this.workflowId,
|
|
139
|
+
"workflow.job_id": this.workflowJobId,
|
|
140
|
+
"workflow.step_name": name
|
|
141
|
+
}, async () => {
|
|
142
|
+
const remainingMs = Math.max(0, stepData.startedAt + stepData.durationMs - now);
|
|
143
|
+
const interval = setInterval(() => {
|
|
144
|
+
job.updateProgress(name);
|
|
145
|
+
}, 15e3);
|
|
146
|
+
await setTimeout(remainingMs);
|
|
147
|
+
clearInterval(interval);
|
|
148
|
+
});
|
|
149
|
+
}
|
|
150
|
+
async waitUntil(name, date) {
|
|
151
|
+
const now = Date.now();
|
|
152
|
+
const targetTime = date.getTime();
|
|
153
|
+
const durationMs = Math.max(0, targetTime - now);
|
|
154
|
+
return this.wait(name, durationMs);
|
|
155
|
+
}
|
|
156
|
+
async getStepData(type, stepName) {
|
|
157
|
+
const stepData = deserialize$1((await this.getWorkflowJob()).data).stepData[stepName];
|
|
158
|
+
if (stepData && stepData.type !== type) throw new Error(`Step "${stepName}" is of type "${stepData.type}", expected "${type}"`);
|
|
159
|
+
return stepData;
|
|
160
|
+
}
|
|
161
|
+
async updateStepData(stepName, data) {
|
|
162
|
+
const job = await this.getWorkflowJob();
|
|
163
|
+
const jobData = deserialize$1(job.data);
|
|
164
|
+
jobData.stepData[stepName] = data;
|
|
165
|
+
await Promise.all([job.updateData(serialize$1(jobData)), job.updateProgress(stepName)]);
|
|
166
|
+
}
|
|
167
|
+
async getWorkflowJob() {
|
|
168
|
+
const job = await this.queue.getJob(this.workflowJobId);
|
|
169
|
+
if (!job) throw new UnrecoverableError(`Could not find workflow job with ID ${this.workflowJobId}`);
|
|
170
|
+
return job;
|
|
171
|
+
}
|
|
172
|
+
};
|
|
173
|
+
|
|
174
|
+
//#endregion
|
|
175
|
+
//#region src/workflow.ts
|
|
176
|
+
var Workflow = class {
|
|
177
|
+
opts;
|
|
178
|
+
queue;
|
|
179
|
+
queueEvents;
|
|
180
|
+
constructor(opts) {
|
|
181
|
+
this.opts = opts;
|
|
182
|
+
}
|
|
183
|
+
async work(opts) {
|
|
184
|
+
const queue = await this.getOrCreateQueue();
|
|
185
|
+
await new Worker(this.opts.id, async (job) => {
|
|
186
|
+
const jobId = job.id;
|
|
187
|
+
if (!jobId) throw new Error("Job ID is missing");
|
|
188
|
+
const deserializedData = deserialize$1(job.data);
|
|
189
|
+
const parsedData = await this.opts.input["~standard"].validate(deserializedData.input);
|
|
190
|
+
if (parsedData.issues) throw new WorkflowInputError("Invalid workflow input", parsedData.issues);
|
|
191
|
+
return runWithTracing(`workflow:${this.opts.id}`, {
|
|
192
|
+
"workflow.id": this.opts.id,
|
|
193
|
+
"workflow.job_id": jobId
|
|
194
|
+
}, async () => {
|
|
195
|
+
return serialize$1(await this.opts.run({
|
|
196
|
+
input: parsedData.value,
|
|
197
|
+
step: new WorkflowStep({
|
|
198
|
+
queue,
|
|
199
|
+
workflowJobId: jobId,
|
|
200
|
+
workflowId: this.opts.id
|
|
201
|
+
})
|
|
202
|
+
}));
|
|
203
|
+
});
|
|
204
|
+
}, {
|
|
205
|
+
connection: this.opts.connection ?? await defaultRedisConnection(),
|
|
206
|
+
prefix: Settings.defaultPrefix,
|
|
207
|
+
...opts
|
|
208
|
+
}).waitUntilReady();
|
|
209
|
+
return this;
|
|
210
|
+
}
|
|
211
|
+
async run(input, opts) {
|
|
212
|
+
const parsedInput = await this.opts.input["~standard"].validate(input);
|
|
213
|
+
if (parsedInput.issues) throw new WorkflowInputError("Invalid workflow input", parsedInput.issues);
|
|
214
|
+
return new WorkflowJob({
|
|
215
|
+
job: await (await this.getOrCreateQueue()).add("workflow-job", serialize$1({
|
|
216
|
+
input: parsedInput.value,
|
|
217
|
+
stepData: {}
|
|
218
|
+
}), opts),
|
|
219
|
+
queueEvents: await this.getOrCreateQueueEvents()
|
|
220
|
+
});
|
|
221
|
+
}
|
|
222
|
+
async getOrCreateQueue() {
|
|
223
|
+
if (!this.queue) this.queue = new Queue(this.opts.id, {
|
|
224
|
+
prefix: Settings.defaultPrefix,
|
|
225
|
+
connection: this.opts.connection ?? await defaultRedisConnection(),
|
|
226
|
+
...this.opts.queueOptions
|
|
227
|
+
});
|
|
228
|
+
await this.queue.waitUntilReady();
|
|
229
|
+
return this.queue;
|
|
230
|
+
}
|
|
231
|
+
async getOrCreateQueueEvents() {
|
|
232
|
+
if (!this.queueEvents) this.queueEvents = new QueueEvents(this.opts.id, {
|
|
233
|
+
prefix: Settings.defaultPrefix,
|
|
234
|
+
connection: this.opts.connection ?? await defaultRedisConnection(),
|
|
235
|
+
...this.opts.queueEventsOptions
|
|
236
|
+
});
|
|
237
|
+
await this.queueEvents.waitUntilReady();
|
|
238
|
+
return this.queueEvents;
|
|
239
|
+
}
|
|
240
|
+
};
|
|
241
|
+
|
|
242
|
+
//#endregion
|
|
243
|
+
export { Settings, Workflow, WorkflowInputError };
|
package/package.json
ADDED
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@falcondev-oss/workflow",
|
|
3
|
+
"type": "module",
|
|
4
|
+
"version": "0.1.0",
|
|
5
|
+
"description": "Simple type-safe queue worker with durable execution based on BullMQ.",
|
|
6
|
+
"license": "MIT",
|
|
7
|
+
"repository": "github:falcondev-oss/workflow",
|
|
8
|
+
"bugs": {
|
|
9
|
+
"url": "https://github.com/falcondev-oss/workflow/issues"
|
|
10
|
+
},
|
|
11
|
+
"keywords": [
|
|
12
|
+
"queue",
|
|
13
|
+
"worker",
|
|
14
|
+
"bullmq",
|
|
15
|
+
"durable-execution",
|
|
16
|
+
"tracing",
|
|
17
|
+
"typescript"
|
|
18
|
+
],
|
|
19
|
+
"exports": {
|
|
20
|
+
".": {
|
|
21
|
+
"types": "./dist/index.d.mts",
|
|
22
|
+
"import": "./dist/index.mjs",
|
|
23
|
+
"default": "./dist/index.mjs"
|
|
24
|
+
},
|
|
25
|
+
"./package.json": "./package.json"
|
|
26
|
+
},
|
|
27
|
+
"files": [
|
|
28
|
+
"dist"
|
|
29
|
+
],
|
|
30
|
+
"engines": {
|
|
31
|
+
"node": "24",
|
|
32
|
+
"pnpm": "10"
|
|
33
|
+
},
|
|
34
|
+
"peerDependencies": {
|
|
35
|
+
"@opentelemetry/api": "^1.9.0"
|
|
36
|
+
},
|
|
37
|
+
"dependencies": {
|
|
38
|
+
"@antfu/utils": "^9.3.0",
|
|
39
|
+
"@standard-schema/spec": "^1.1.0",
|
|
40
|
+
"@types/node": "^25.0.3",
|
|
41
|
+
"bullmq": "^5.66.4",
|
|
42
|
+
"ioredis": "^5.8.2",
|
|
43
|
+
"p-retry": "^7.1.1",
|
|
44
|
+
"superjson": "^2.2.6",
|
|
45
|
+
"tsx": "^4.21.0",
|
|
46
|
+
"type-fest": "^5.3.1",
|
|
47
|
+
"zod": "^4.3.4"
|
|
48
|
+
},
|
|
49
|
+
"devDependencies": {
|
|
50
|
+
"@falcondev-oss/configs": "^5.0.2",
|
|
51
|
+
"eslint": "^9.39.2",
|
|
52
|
+
"prettier": "^3.7.4",
|
|
53
|
+
"tsdown": "0.19.0-beta.5",
|
|
54
|
+
"typescript": "^5.9.3"
|
|
55
|
+
},
|
|
56
|
+
"scripts": {
|
|
57
|
+
"build": "tsdown",
|
|
58
|
+
"type-check": "tsc --noEmit",
|
|
59
|
+
"lint": "eslint --cache . && prettier --check --cache .",
|
|
60
|
+
"lint:ci": "eslint --cache --cache-strategy content . && prettier --check --cache --cache-strategy content .",
|
|
61
|
+
"lint:fix": "eslint --fix --cache . && prettier --write --cache ."
|
|
62
|
+
},
|
|
63
|
+
"types": "./dist/index.d.mts"
|
|
64
|
+
}
|