@sonamu-kit/tasks 0.1.0 → 0.1.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. package/dist/client.d.ts +0 -1
  2. package/dist/client.d.ts.map +1 -1
  3. package/dist/client.js +0 -1
  4. package/dist/client.js.map +1 -1
  5. package/dist/client.test.js +2 -1
  6. package/dist/client.test.js.map +1 -1
  7. package/dist/core/duration.d.ts +1 -1
  8. package/dist/core/duration.js +1 -1
  9. package/dist/core/duration.js.map +1 -1
  10. package/dist/core/workflow.d.ts +2 -1
  11. package/dist/core/workflow.d.ts.map +1 -1
  12. package/dist/core/workflow.js +2 -1
  13. package/dist/core/workflow.js.map +1 -1
  14. package/dist/database/backend.d.ts +7 -8
  15. package/dist/database/backend.d.ts.map +1 -1
  16. package/dist/database/backend.js +134 -39
  17. package/dist/database/backend.js.map +1 -1
  18. package/dist/database/backend.test.js +3 -1
  19. package/dist/database/backend.test.js.map +1 -1
  20. package/dist/database/base.d.ts +2 -2
  21. package/dist/database/base.d.ts.map +1 -1
  22. package/dist/database/base.js +17 -5
  23. package/dist/database/base.js.map +1 -1
  24. package/dist/database/pubsub.d.ts.map +1 -1
  25. package/dist/database/pubsub.js +9 -3
  26. package/dist/database/pubsub.js.map +1 -1
  27. package/dist/execution.test.js +4 -2
  28. package/dist/execution.test.js.map +1 -1
  29. package/dist/practices/01-remote-workflow.js +2 -1
  30. package/dist/practices/01-remote-workflow.js.map +1 -1
  31. package/dist/testing/connection.d.ts +1 -1
  32. package/dist/testing/connection.d.ts.map +1 -1
  33. package/dist/testing/connection.js +5 -4
  34. package/dist/testing/connection.js.map +1 -1
  35. package/dist/worker.test.js +2 -1
  36. package/dist/worker.test.js.map +1 -1
  37. package/package.json +7 -5
  38. package/scripts/migrate.ts +1 -4
  39. package/src/client.test.ts +2 -1
  40. package/src/client.ts +0 -1
  41. package/src/core/duration.ts +1 -1
  42. package/src/core/workflow.ts +2 -1
  43. package/src/database/backend.test.ts +3 -1
  44. package/src/database/backend.ts +140 -45
  45. package/src/database/base.ts +12 -7
  46. package/src/database/pubsub.ts +9 -3
  47. package/src/execution.test.ts +4 -2
  48. package/src/practices/01-remote-workflow.ts +2 -1
  49. package/src/testing/connection.ts +5 -3
  50. package/src/worker.test.ts +2 -1
  51. package/templates/openworkflow.config.ts +1 -1
@@ -1,3 +1,4 @@
1
+ import { getLogger } from "@logtape/logtape";
1
2
  import { camelize } from "inflection";
2
3
  import knex, { type Knex } from "knex";
3
4
  import {
@@ -36,22 +37,82 @@ interface BackendPostgresOptions {
36
37
  usePubSub?: boolean;
37
38
  }
38
39
 
40
+ const logger = getLogger(["sonamu", "internal", "tasks"]);
41
+
39
42
  /**
40
43
  * Manages a connection to a Postgres database for workflow operations.
41
44
  */
42
45
  export class BackendPostgres implements Backend {
43
- private knex: Knex;
46
+ private config: Knex.Config;
44
47
  private namespaceId: string;
45
48
  private usePubSub: boolean;
46
49
  private pubsub: PostgresPubSub | null = null;
50
+ private initialized: boolean = false;
51
+ private runMigrations: boolean;
52
+
53
+ private _knex: Knex | null = null;
54
+ private get knex(): Knex {
55
+ if (!this._knex) {
56
+ this._knex = knex(this.config);
57
+ }
58
+
59
+ return this._knex;
60
+ }
61
+
62
+ constructor(config: Knex.Config, options?: BackendPostgresOptions) {
63
+ this.config = {
64
+ ...config,
65
+ postProcessResponse: (result, _queryContext) => {
66
+ if (result === null || result === undefined) {
67
+ return result;
68
+ }
69
+
70
+ if (config?.postProcessResponse) {
71
+ result = config.postProcessResponse(result, _queryContext);
72
+ }
73
+
74
+ const camelizeRow = (row: Record<string, unknown>) =>
75
+ Object.fromEntries(
76
+ Object.entries(row).map(([key, value]) => [camelize(key, true), value]),
77
+ );
78
+
79
+ if (Array.isArray(result)) {
80
+ return result.map(camelizeRow);
81
+ }
82
+
83
+ return camelizeRow(result);
84
+ },
85
+ };
86
+
87
+ const { namespaceId, usePubSub, runMigrations } = {
88
+ namespaceId: DEFAULT_NAMESPACE_ID,
89
+ usePubSub: true,
90
+ runMigrations: true,
91
+ ...options,
92
+ };
47
93
 
48
- private constructor(knex: Knex, namespaceId: string, usePubSub: boolean) {
49
- this.knex = knex;
50
94
  this.namespaceId = namespaceId;
51
95
  this.usePubSub = usePubSub;
96
+ this.runMigrations = runMigrations;
97
+ }
98
+
99
+ async initialize() {
100
+ if (this.initialized) {
101
+ return;
102
+ }
103
+
104
+ if (this.runMigrations) {
105
+ await migrate(this.config, DEFAULT_SCHEMA);
106
+ }
107
+
108
+ this.initialized = true;
52
109
  }
53
110
 
54
111
  async subscribe(callback: OnSubscribed) {
112
+ if (!this.initialized) {
113
+ throw new Error("Backend not initialized");
114
+ }
115
+
55
116
  if (!this.usePubSub) {
56
117
  return;
57
118
  }
@@ -64,6 +125,10 @@ export class BackendPostgres implements Backend {
64
125
  }
65
126
 
66
127
  async publish(payload?: string): Promise<void> {
128
+ if (!this.initialized) {
129
+ throw new Error("Backend not initialized");
130
+ }
131
+
67
132
  if (!this.usePubSub) {
68
133
  return;
69
134
  }
@@ -75,56 +140,21 @@ export class BackendPostgres implements Backend {
75
140
  );
76
141
  }
77
142
 
78
- /**
79
- * Create and initialize a new BackendPostgres instance. This will
80
- * automatically run migrations on startup unless `runMigrations` is set to
81
- * false.
82
- */
83
- static async connect(
84
- dbConf: Knex.Config,
85
- options?: BackendPostgresOptions,
86
- ): Promise<BackendPostgres> {
87
- const postProcessResponse: Knex.Config["postProcessResponse"] = (result, _queryContext) => {
88
- if (result === null || result === undefined) {
89
- return result;
90
- }
91
-
92
- if (dbConf?.postProcessResponse) {
93
- result = dbConf.postProcessResponse(result, _queryContext);
94
- }
95
-
96
- const camelizeRow = (row: Record<string, unknown>) =>
97
- Object.fromEntries(Object.entries(row).map(([key, value]) => [camelize(key, true), value]));
98
-
99
- if (Array.isArray(result)) {
100
- return result.map(camelizeRow);
101
- }
102
-
103
- return camelizeRow(result);
104
- };
105
-
106
- const { namespaceId, runMigrations, usePubSub } = {
107
- namespaceId: DEFAULT_NAMESPACE_ID,
108
- runMigrations: true,
109
- usePubSub: true,
110
- ...options,
111
- };
112
-
113
- const knexInstance = knex({ ...dbConf, postProcessResponse });
114
- if (runMigrations) {
115
- await migrate(knexInstance, DEFAULT_SCHEMA);
143
+ async stop(): Promise<void> {
144
+ if (!this.initialized) {
145
+ return;
116
146
  }
117
147
 
118
- return new BackendPostgres(knexInstance, namespaceId, usePubSub);
119
- }
120
-
121
- async stop(): Promise<void> {
122
148
  await this.pubsub?.destroy();
123
149
  this.pubsub = null;
124
150
  await this.knex.destroy();
125
151
  }
126
152
 
127
153
  async createWorkflowRun(params: CreateWorkflowRunParams): Promise<WorkflowRun> {
154
+ if (!this.initialized) {
155
+ throw new Error("Backend not initialized");
156
+ }
157
+
128
158
  const qb = this.knex
129
159
  .withSchema(DEFAULT_SCHEMA)
130
160
  .table("workflow_runs")
@@ -148,6 +178,7 @@ export class BackendPostgres implements Backend {
148
178
 
149
179
  const workflowRun = await qb;
150
180
  if (!workflowRun[0]) {
181
+ logger.error("Failed to create workflow run: {params}", { params });
151
182
  throw new Error("Failed to create workflow run");
152
183
  }
153
184
 
@@ -155,6 +186,10 @@ export class BackendPostgres implements Backend {
155
186
  }
156
187
 
157
188
  async getWorkflowRun(params: GetWorkflowRunParams): Promise<WorkflowRun | null> {
189
+ if (!this.initialized) {
190
+ throw new Error("Backend not initialized");
191
+ }
192
+
158
193
  const workflowRun = await this.knex
159
194
  .withSchema(DEFAULT_SCHEMA)
160
195
  .table("workflow_runs")
@@ -189,6 +224,10 @@ export class BackendPostgres implements Backend {
189
224
  }
190
225
 
191
226
  async listWorkflowRuns(params: ListWorkflowRunsParams): Promise<PaginatedResponse<WorkflowRun>> {
227
+ if (!this.initialized) {
228
+ throw new Error("Backend not initialized");
229
+ }
230
+
192
231
  const limit = params.limit ?? DEFAULT_PAGINATION_PAGE_SIZE;
193
232
  const { after, before } = params;
194
233
 
@@ -232,6 +271,10 @@ export class BackendPostgres implements Backend {
232
271
  }
233
272
 
234
273
  async claimWorkflowRun(params: ClaimWorkflowRunParams): Promise<WorkflowRun | null> {
274
+ if (!this.initialized) {
275
+ throw new Error("Backend not initialized");
276
+ }
277
+
235
278
  const claimed = await this.knex
236
279
  .with("expired", (qb) =>
237
280
  qb
@@ -288,6 +331,10 @@ export class BackendPostgres implements Backend {
288
331
  }
289
332
 
290
333
  async extendWorkflowRunLease(params: ExtendWorkflowRunLeaseParams): Promise<WorkflowRun> {
334
+ if (!this.initialized) {
335
+ throw new Error("Backend not initialized");
336
+ }
337
+
291
338
  const [updated] = await this.knex
292
339
  .withSchema(DEFAULT_SCHEMA)
293
340
  .table("workflow_runs")
@@ -302,6 +349,7 @@ export class BackendPostgres implements Backend {
302
349
  .returning("*");
303
350
 
304
351
  if (!updated) {
352
+ logger.error("Failed to extend lease for workflow run: {params}", { params });
305
353
  throw new Error("Failed to extend lease for workflow run");
306
354
  }
307
355
 
@@ -309,6 +357,10 @@ export class BackendPostgres implements Backend {
309
357
  }
310
358
 
311
359
  async sleepWorkflowRun(params: SleepWorkflowRunParams): Promise<WorkflowRun> {
360
+ if (!this.initialized) {
361
+ throw new Error("Backend not initialized");
362
+ }
363
+
312
364
  // 'succeeded' status is deprecated
313
365
  const [updated] = await this.knex
314
366
  .withSchema(DEFAULT_SCHEMA)
@@ -326,6 +378,7 @@ export class BackendPostgres implements Backend {
326
378
  .returning("*");
327
379
 
328
380
  if (!updated) {
381
+ logger.error("Failed to sleep workflow run: {params}", { params });
329
382
  throw new Error("Failed to sleep workflow run");
330
383
  }
331
384
 
@@ -333,6 +386,10 @@ export class BackendPostgres implements Backend {
333
386
  }
334
387
 
335
388
  async completeWorkflowRun(params: CompleteWorkflowRunParams): Promise<WorkflowRun> {
389
+ if (!this.initialized) {
390
+ throw new Error("Backend not initialized");
391
+ }
392
+
336
393
  const [updated] = await this.knex
337
394
  .withSchema(DEFAULT_SCHEMA)
338
395
  .table("workflow_runs")
@@ -352,6 +409,7 @@ export class BackendPostgres implements Backend {
352
409
  .returning("*");
353
410
 
354
411
  if (!updated) {
412
+ logger.error("Failed to complete workflow run: {params}", { params });
355
413
  throw new Error("Failed to complete workflow run");
356
414
  }
357
415
 
@@ -359,6 +417,10 @@ export class BackendPostgres implements Backend {
359
417
  }
360
418
 
361
419
  async failWorkflowRun(params: FailWorkflowRunParams): Promise<WorkflowRun> {
420
+ if (!this.initialized) {
421
+ throw new Error("Backend not initialized");
422
+ }
423
+
362
424
  const { workflowRunId, error } = params;
363
425
  const { initialIntervalMs, backoffCoefficient, maximumIntervalMs } = DEFAULT_RETRY_POLICY;
364
426
 
@@ -396,6 +458,7 @@ export class BackendPostgres implements Backend {
396
458
  .returning("*");
397
459
 
398
460
  if (!updated) {
461
+ logger.error("Failed to mark workflow run failed: {params}", { params });
399
462
  throw new Error("Failed to mark workflow run failed");
400
463
  }
401
464
 
@@ -403,6 +466,10 @@ export class BackendPostgres implements Backend {
403
466
  }
404
467
 
405
468
  async cancelWorkflowRun(params: CancelWorkflowRunParams): Promise<WorkflowRun> {
469
+ if (!this.initialized) {
470
+ throw new Error("Backend not initialized");
471
+ }
472
+
406
473
  const [updated] = await this.knex
407
474
  .withSchema(DEFAULT_SCHEMA)
408
475
  .table("workflow_runs")
@@ -435,11 +502,16 @@ export class BackendPostgres implements Backend {
435
502
  // throw error for completed/failed workflows
436
503
  // 'succeeded' status is deprecated
437
504
  if (["succeeded", "completed", "failed"].includes(existing.status)) {
505
+ logger.error("Cannot cancel workflow run: {params} with status {status}", {
506
+ params,
507
+ status: existing.status,
508
+ });
438
509
  throw new Error(
439
510
  `Cannot cancel workflow run ${params.workflowRunId} with status ${existing.status}`,
440
511
  );
441
512
  }
442
513
 
514
+ logger.error("Failed to cancel workflow run: {params}", { params });
443
515
  throw new Error("Failed to cancel workflow run");
444
516
  }
445
517
 
@@ -447,6 +519,10 @@ export class BackendPostgres implements Backend {
447
519
  }
448
520
 
449
521
  async createStepAttempt(params: CreateStepAttemptParams): Promise<StepAttempt> {
522
+ if (!this.initialized) {
523
+ throw new Error("Backend not initialized");
524
+ }
525
+
450
526
  const [stepAttempt] = await this.knex
451
527
  .withSchema(DEFAULT_SCHEMA)
452
528
  .table("step_attempts")
@@ -466,6 +542,7 @@ export class BackendPostgres implements Backend {
466
542
  .returning("*");
467
543
 
468
544
  if (!stepAttempt) {
545
+ logger.error("Failed to create step attempt: {params}", { params });
469
546
  throw new Error("Failed to create step attempt");
470
547
  }
471
548
 
@@ -473,6 +550,10 @@ export class BackendPostgres implements Backend {
473
550
  }
474
551
 
475
552
  async getStepAttempt(params: GetStepAttemptParams): Promise<StepAttempt | null> {
553
+ if (!this.initialized) {
554
+ throw new Error("Backend not initialized");
555
+ }
556
+
476
557
  const stepAttempt = await this.knex
477
558
  .withSchema(DEFAULT_SCHEMA)
478
559
  .table("step_attempts")
@@ -484,6 +565,10 @@ export class BackendPostgres implements Backend {
484
565
  }
485
566
 
486
567
  async listStepAttempts(params: ListStepAttemptsParams): Promise<PaginatedResponse<StepAttempt>> {
568
+ if (!this.initialized) {
569
+ throw new Error("Backend not initialized");
570
+ }
571
+
487
572
  const limit = params.limit ?? DEFAULT_PAGINATION_PAGE_SIZE;
488
573
  const { after, before } = params;
489
574
 
@@ -569,6 +654,10 @@ export class BackendPostgres implements Backend {
569
654
  }
570
655
 
571
656
  async completeStepAttempt(params: CompleteStepAttemptParams): Promise<StepAttempt> {
657
+ if (!this.initialized) {
658
+ throw new Error("Backend not initialized");
659
+ }
660
+
572
661
  const [updated] = await this.knex
573
662
  .withSchema(DEFAULT_SCHEMA)
574
663
  .table("step_attempts as sa")
@@ -591,6 +680,7 @@ export class BackendPostgres implements Backend {
591
680
  .returning("sa.*");
592
681
 
593
682
  if (!updated) {
683
+ logger.error("Failed to mark step attempt completed: {params}", { params });
594
684
  throw new Error("Failed to mark step attempt completed");
595
685
  }
596
686
 
@@ -598,6 +688,10 @@ export class BackendPostgres implements Backend {
598
688
  }
599
689
 
600
690
  async failStepAttempt(params: FailStepAttemptParams): Promise<StepAttempt> {
691
+ if (!this.initialized) {
692
+ throw new Error("Backend not initialized");
693
+ }
694
+
601
695
  const [updated] = await this.knex
602
696
  .withSchema(DEFAULT_SCHEMA)
603
697
  .table("step_attempts as sa")
@@ -620,6 +714,7 @@ export class BackendPostgres implements Backend {
620
714
  .returning("sa.*");
621
715
 
622
716
  if (!updated) {
717
+ logger.error("Failed to mark step attempt failed: {params}", { params });
623
718
  throw new Error("Failed to mark step attempt failed");
624
719
  }
625
720
 
@@ -1,5 +1,5 @@
1
1
  import path from "node:path";
2
- import type { Knex } from "knex";
2
+ import knex, { type Knex } from "knex";
3
3
 
4
4
  export const DEFAULT_SCHEMA = "sonamu_tasks";
5
5
 
@@ -7,12 +7,17 @@ export const DEFAULT_SCHEMA = "sonamu_tasks";
7
7
  * migrate applies pending migrations to the database. Does nothing if the
8
8
  * database is already up to date.
9
9
  */
10
- export async function migrate(knex: Knex, schema: string) {
11
- await knex.schema.createSchemaIfNotExists(schema);
12
- await knex.migrate.latest({
13
- directory: path.join(import.meta.dirname, "migrations"),
14
- schemaName: schema,
15
- });
10
+ export async function migrate(config: Knex.Config, schema: string) {
11
+ const instance = knex({ ...config, pool: { min: 1, max: 1 } });
12
+ try {
13
+ await instance.schema.createSchemaIfNotExists(schema);
14
+ await instance.migrate.latest({
15
+ directory: path.join(import.meta.dirname, "migrations"),
16
+ schemaName: schema,
17
+ });
18
+ } finally {
19
+ await instance.destroy();
20
+ }
16
21
  }
17
22
 
18
23
  /**
@@ -65,9 +65,15 @@ export class PostgresPubSub {
65
65
 
66
66
  // destroy the listener and close the connection, do not destroy the knex connection
67
67
  async destroy() {
68
- this._destroyed = true;
69
- this._connection.off("close", this._onClosed);
70
- await this.knex.client.destroyRawConnection(this._connection);
68
+ if (this._destroyed) {
69
+ return;
70
+ }
71
+ try {
72
+ this._connection.off("close", this._onClosed);
73
+ await this.knex.client.destroyRawConnection(this._connection);
74
+ } finally {
75
+ this._destroyed = true;
76
+ }
71
77
  }
72
78
 
73
79
  // create a new listener and connect to the database
@@ -8,10 +8,11 @@ describe("StepExecutor", () => {
8
8
  let backend: BackendPostgres;
9
9
 
10
10
  beforeAll(async () => {
11
- backend = await BackendPostgres.connect(KNEX_GLOBAL_CONFIG, {
11
+ backend = new BackendPostgres(KNEX_GLOBAL_CONFIG, {
12
12
  namespaceId: randomUUID(),
13
13
  runMigrations: false,
14
14
  });
15
+ await backend.initialize();
15
16
  });
16
17
 
17
18
  afterAll(async () => {
@@ -164,10 +165,11 @@ describe("executeWorkflow", () => {
164
165
  let backend: BackendPostgres;
165
166
 
166
167
  beforeAll(async () => {
167
- backend = await BackendPostgres.connect(KNEX_GLOBAL_CONFIG, {
168
+ backend = new BackendPostgres(KNEX_GLOBAL_CONFIG, {
168
169
  namespaceId: randomUUID(),
169
170
  runMigrations: false,
170
171
  });
172
+ await backend.initialize();
171
173
  });
172
174
 
173
175
  afterAll(async () => {
@@ -10,11 +10,12 @@ async function getBackend(): Promise<BackendPostgres> {
10
10
  return _backend;
11
11
  }
12
12
 
13
- _backend = await BackendPostgres.connect(KNEX_GLOBAL_CONFIG, {
13
+ _backend = new BackendPostgres(KNEX_GLOBAL_CONFIG, {
14
14
  runMigrations: true,
15
15
  namespaceId: randomUUID(),
16
16
  });
17
17
 
18
+ await _backend.initialize();
18
19
  return _backend;
19
20
  }
20
21
 
@@ -1,5 +1,5 @@
1
1
  import { randomUUID } from "node:crypto";
2
- import knex, { type Knex } from "knex";
2
+ import type { Knex } from "knex";
3
3
  import { BackendPostgres } from "../database/backend";
4
4
  import { migrate as baseMigrate, DEFAULT_SCHEMA } from "../database/base";
5
5
 
@@ -20,7 +20,7 @@ export const KNEX_GLOBAL_CONFIG: Knex.Config = {
20
20
  } as const;
21
21
 
22
22
  export async function migrate(): Promise<void> {
23
- await baseMigrate(knex(KNEX_GLOBAL_CONFIG), DEFAULT_SCHEMA);
23
+ await baseMigrate(KNEX_GLOBAL_CONFIG, DEFAULT_SCHEMA);
24
24
  }
25
25
 
26
26
  export async function createBackend(): Promise<BackendPostgres> {
@@ -28,9 +28,11 @@ export async function createBackend(): Promise<BackendPostgres> {
28
28
  return backend;
29
29
  }
30
30
 
31
- backend = await BackendPostgres.connect(KNEX_GLOBAL_CONFIG, {
31
+ backend = new BackendPostgres(KNEX_GLOBAL_CONFIG, {
32
32
  namespaceId: randomUUID(),
33
+ runMigrations: false,
33
34
  });
35
+ await backend.initialize();
34
36
 
35
37
  return backend;
36
38
  }
@@ -8,10 +8,11 @@ describe("Worker", () => {
8
8
  let backend: BackendPostgres;
9
9
 
10
10
  beforeEach(async () => {
11
- backend = await BackendPostgres.connect(KNEX_GLOBAL_CONFIG, {
11
+ backend = new BackendPostgres(KNEX_GLOBAL_CONFIG, {
12
12
  namespaceId: randomUUID(),
13
13
  runMigrations: false,
14
14
  });
15
+ await backend.initialize();
15
16
  });
16
17
 
17
18
  afterEach(async () => {
@@ -13,7 +13,7 @@ const config: Knex.Config = {
13
13
  } as const;
14
14
 
15
15
  // Use Postgres (configured with Knex config)
16
- const backend = await BackendPostgres.connect(config, {
16
+ const backend = new BackendPostgres(config, {
17
17
  runMigrations: false,
18
18
  });
19
19