@falcondev-oss/workflow 0.4.0 → 0.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.mts CHANGED
@@ -1,4 +1,4 @@
1
- import { ConnectionOptions, Job, JobsOptions, Queue, QueueEvents, QueueEventsOptions, QueueOptions, UnrecoverableError, WorkerOptions } from "bullmq";
1
+ import { ConnectionOptions, Job, JobSchedulerTemplateOptions, JobsOptions, Queue, QueueEvents, QueueEventsOptions, QueueOptions, UnrecoverableError, WorkerOptions } from "bullmq";
2
2
  import "@antfu/utils";
3
3
  import IORedis from "ioredis";
4
4
  import { Span } from "@opentelemetry/api";
@@ -14,10 +14,15 @@ declare class WorkflowInputError extends UnrecoverableError {
14
14
  }
15
15
  //#endregion
16
16
  //#region src/settings.d.ts
17
+ type WorkflowLogger = {
18
+ info?: (...data: any[]) => void;
19
+ success?: (...data: any[]) => void;
20
+ };
17
21
  declare const Settings: {
18
22
  defaultPrefix: string;
19
- defaultConnection: IORedis | undefined;
23
+ defaultConnection: (() => Promise<IORedis> | IORedis) | undefined;
20
24
  defaultCronTimezone: string | undefined;
25
+ logger: WorkflowLogger | undefined;
21
26
  };
22
27
  //#endregion
23
28
  //#region src/serializer.d.ts
@@ -61,7 +66,7 @@ interface WorkflowStepOptions {
61
66
  //#endregion
62
67
  //#region src/types.d.ts
63
68
  type WorkflowJobInternal<Input, Output> = Job<Serialized<{
64
- input: Input;
69
+ input: Input | undefined;
65
70
  stepData: Record<string, WorkflowStepData>;
66
71
  tracingHeaders: unknown;
67
72
  }>, Serialized<Output>, string>;
@@ -97,7 +102,10 @@ declare class Workflow<RunInput, Input, Output> {
97
102
  run(input: RunInput, opts?: JobsOptions): Promise<WorkflowJob<Output>>;
98
103
  runIn(input: RunInput, delayMs: number, opts?: Except<JobsOptions, 'delay'>): Promise<WorkflowJob<Output>>;
99
104
  runAt(input: RunInput, date: Date, opts?: Except<JobsOptions, 'delay'>): Promise<WorkflowJob<Output>>;
100
- repeat(input: RunInput, cronOrInterval: string | number, opts?: Except<JobsOptions, 'repeat'>): Promise<WorkflowJob<Output>>;
105
+ private runSchedule;
106
+ runCron(schedulerId: string, cron: string, input: RunInput, opts?: JobSchedulerTemplateOptions): Promise<void>;
107
+ runEvery(schedulerId: string, everyMs: number, input: RunInput, opts?: JobSchedulerTemplateOptions): Promise<void>;
108
+ exportPrometheusMetrics(globalVariables?: Record<string, string>): Promise<string>;
101
109
  private getOrCreateQueue;
102
110
  private getOrCreateQueueEvents;
103
111
  }
package/dist/index.mjs CHANGED
@@ -2,6 +2,7 @@ import { Queue, QueueEvents, UnrecoverableError, Worker } from "bullmq";
2
2
  import { createSingletonPromise } from "@antfu/utils";
3
3
  import IORedis from "ioredis";
4
4
  import { ROOT_CONTEXT, SpanKind, SpanStatusCode, context, propagation, trace } from "@opentelemetry/api";
5
+ import { asyncExitHook } from "exit-hook";
5
6
  import { deserialize, serialize } from "superjson";
6
7
  import { setTimeout } from "node:timers/promises";
7
8
  import pRetry from "p-retry";
@@ -20,14 +21,18 @@ var WorkflowInputError = class extends UnrecoverableError {
20
21
  const Settings = {
21
22
  defaultPrefix: "falcondev-oss-workflow",
22
23
  defaultConnection: void 0,
23
- defaultCronTimezone: void 0
24
+ defaultCronTimezone: void 0,
25
+ logger: void 0
26
+ };
27
+ const defaultRedisOptions = {
28
+ lazyConnect: true,
29
+ maxRetriesPerRequest: null,
30
+ retryStrategy: (times) => Math.max(Math.min(Math.exp(times), 2e4), 1e3),
31
+ enableOfflineQueue: false
24
32
  };
25
33
  const defaultRedisConnection = createSingletonPromise(async () => {
26
- if (Settings.defaultConnection) return Settings.defaultConnection;
27
- const redis = new IORedis({
28
- lazyConnect: true,
29
- maxRetriesPerRequest: null
30
- });
34
+ if (Settings.defaultConnection) return Settings.defaultConnection();
35
+ const redis = new IORedis(defaultRedisOptions);
31
36
  await redis.connect();
32
37
  return redis;
33
38
  });
@@ -61,22 +66,26 @@ function getTracer() {
61
66
  return trace.getTracer("falcondev-oss-workflow");
62
67
  }
63
68
  async function runWithTracing(spanName, options, fn, context$1) {
64
- const span = getTracer().startSpan(spanName, options, context$1);
65
- try {
66
- const result = await fn(span);
67
- span.setStatus({ code: SpanStatusCode.OK });
68
- return result;
69
- } catch (err_) {
70
- const err = err_;
71
- span.recordException(err);
72
- span.setStatus({
73
- code: SpanStatusCode.ERROR,
74
- message: err.message
75
- });
76
- throw err_;
77
- } finally {
78
- span.end();
79
- }
69
+ return context$1 ? getTracer().startActiveSpan(spanName, options, context$1, runWithSpan(fn)) : getTracer().startActiveSpan(spanName, options, runWithSpan(fn));
70
+ }
71
+ function runWithSpan(fn) {
72
+ return async (span) => {
73
+ try {
74
+ const result = await fn(span);
75
+ span.setStatus({ code: SpanStatusCode.OK });
76
+ return result;
77
+ } catch (err_) {
78
+ const err = err_;
79
+ span.recordException(err);
80
+ span.setStatus({
81
+ code: SpanStatusCode.ERROR,
82
+ message: err.message
83
+ });
84
+ throw err_;
85
+ } finally {
86
+ span.end();
87
+ }
88
+ };
80
89
  }
81
90
 
82
91
  //#endregion
@@ -105,7 +114,7 @@ var WorkflowStep = class WorkflowStep {
105
114
  attempt: initialAttempt
106
115
  });
107
116
  return pRetry(async (attempt) => {
108
- const result = await runWithTracing(`step:${name}`, { attributes: {
117
+ const result = await runWithTracing(`workflow-worker/${this.workflowId}/step/${name}`, { attributes: {
109
118
  "workflow.id": this.workflowId,
110
119
  "workflow.job_id": this.workflowJobId,
111
120
  "workflow.step_name": name,
@@ -148,7 +157,7 @@ var WorkflowStep = class WorkflowStep {
148
157
  startedAt: now
149
158
  };
150
159
  await this.updateStepData(name, stepData);
151
- await runWithTracing(`step:${name}`, { attributes: {
160
+ await runWithTracing(`workflow-worker/${this.workflowId}/step/${name}`, { attributes: {
152
161
  "workflow.id": this.workflowId,
153
162
  "workflow.job_id": this.workflowJobId,
154
163
  "workflow.step_name": name
@@ -198,20 +207,22 @@ var Workflow = class {
198
207
  }
199
208
  async work(opts) {
200
209
  const queue = await this.getOrCreateQueue();
201
- await new Worker(this.opts.id, async (job) => {
210
+ const worker = new Worker(this.opts.id, async (job) => {
211
+ Settings.logger?.info?.(`Processing workflow job ${job.id} of workflow ${this.opts.id}`);
202
212
  const jobId = job.id;
203
213
  if (!jobId) throw new Error("Job ID is missing");
204
214
  const deserializedData = deserialize$1(job.data);
205
215
  const parsedData = this.opts.schema && await this.opts.schema["~standard"].validate(deserializedData.input);
206
216
  if (parsedData?.issues) throw new WorkflowInputError("Invalid workflow input", parsedData.issues);
207
- return runWithTracing(`workflow:work:${this.opts.id}`, {
217
+ return runWithTracing(`workflow-worker/${this.opts.id}`, {
208
218
  attributes: {
209
219
  "workflow.id": this.opts.id,
210
220
  "workflow.job_id": jobId
211
221
  },
212
222
  kind: SpanKind.CONSUMER
213
223
  }, async (span) => {
214
- return serialize$1(await this.opts.run({
224
+ const start = performance.now();
225
+ const result = await this.opts.run({
215
226
  input: parsedData?.value,
216
227
  step: new WorkflowStep({
217
228
  queue,
@@ -219,20 +230,29 @@ var Workflow = class {
219
230
  workflowId: this.opts.id
220
231
  }),
221
232
  span
222
- }));
233
+ });
234
+ const end = performance.now();
235
+ Settings.logger?.success?.(`Completed workflow job ${job.id} of workflow ${this.opts.id} in ${(end - start).toFixed(2)} ms`);
236
+ return serialize$1(result);
223
237
  }, propagation.extract(ROOT_CONTEXT, deserializedData.tracingHeaders));
224
238
  }, {
225
239
  connection: this.opts.connection ?? await defaultRedisConnection(),
226
240
  prefix: Settings.defaultPrefix,
227
241
  ...opts
228
- }).waitUntilReady();
242
+ });
243
+ await worker.waitUntilReady();
244
+ Settings.logger?.info?.(`Worker started for workflow ${this.opts.id}`);
245
+ asyncExitHook(async (signal) => {
246
+ Settings.logger?.info?.(`Received ${signal}, shutting down worker for workflow ${this.opts.id}...`);
247
+ await worker.close();
248
+ }, { wait: 1e4 });
229
249
  return this;
230
250
  }
231
251
  async run(input, opts) {
232
252
  const parsedInput = this.opts.schema && await this.opts.schema["~standard"].validate(input);
233
253
  if (parsedInput?.issues) throw new WorkflowInputError("Invalid workflow input", parsedInput.issues);
234
254
  const queue = await this.getOrCreateQueue();
235
- return runWithTracing(`workflow:run:${this.opts.id}`, {
255
+ return runWithTracing(`workflow-producer/${this.opts.id}`, {
236
256
  attributes: { "workflow.id": this.opts.id },
237
257
  kind: SpanKind.PRODUCER
238
258
  }, async () => {
@@ -258,19 +278,41 @@ var Workflow = class {
258
278
  const now = Date.now();
259
279
  return date.getTime() < now ? this.run(input, opts) : this.runIn(input, date.getTime() - Date.now(), opts);
260
280
  }
261
- async repeat(input, cronOrInterval, opts) {
262
- return this.run(input, {
263
- repeat: {
264
- tz: Settings.defaultCronTimezone,
265
- ...typeof cronOrInterval === "string" ? { pattern: cronOrInterval } : { every: cronOrInterval }
266
- },
267
- ...opts
281
+ async runSchedule(schedulerId, repeatOpts, input, opts) {
282
+ const parsedInput = this.opts.schema && await this.opts.schema["~standard"].validate(input);
283
+ if (parsedInput?.issues) throw new WorkflowInputError("Invalid workflow input", parsedInput.issues);
284
+ await (await this.getOrCreateQueue()).upsertJobScheduler(schedulerId, repeatOpts, {
285
+ name: "workflow-job",
286
+ data: serialize$1({
287
+ input: parsedInput?.value,
288
+ stepData: {},
289
+ tracingHeaders: {}
290
+ }),
291
+ opts
292
+ });
293
+ }
294
+ async runCron(schedulerId, cron, input, opts) {
295
+ return this.runSchedule(schedulerId, { pattern: cron }, input, opts);
296
+ }
297
+ async runEvery(schedulerId, everyMs, input, opts) {
298
+ return this.runSchedule(schedulerId, { every: everyMs }, input, opts);
299
+ }
300
+ async exportPrometheusMetrics(globalVariables) {
301
+ return (await this.getOrCreateQueue()).exportPrometheusMetrics({
302
+ workflowId: this.id,
303
+ workflowPrefix: Settings.defaultPrefix,
304
+ ...globalVariables
268
305
  });
269
306
  }
270
307
  async getOrCreateQueue() {
271
308
  if (!this.queue) this.queue = new Queue(this.opts.id, {
272
309
  prefix: Settings.defaultPrefix,
273
310
  connection: this.opts.connection ?? await defaultRedisConnection(),
311
+ defaultJobOptions: {
312
+ removeOnComplete: true,
313
+ removeOnFail: { age: 1440 * 60 },
314
+ ...this.opts.queueOptions?.defaultJobOptions
315
+ },
274
316
  ...this.opts.queueOptions
275
317
  });
276
318
  await this.queue.waitUntilReady();
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@falcondev-oss/workflow",
3
3
  "type": "module",
4
- "version": "0.4.0",
4
+ "version": "0.6.0",
5
5
  "description": "Simple type-safe queue worker with durable execution based on BullMQ.",
6
6
  "license": "MIT",
7
7
  "repository": "github:falcondev-oss/workflow",
@@ -39,6 +39,7 @@
39
39
  "@standard-schema/spec": "^1.1.0",
40
40
  "@types/node": "^25.0.3",
41
41
  "bullmq": "^5.66.4",
42
+ "exit-hook": "^5.0.1",
42
43
  "ioredis": "^5.8.2",
43
44
  "p-retry": "^7.1.1",
44
45
  "superjson": "^2.2.6",