@sonamu-kit/tasks 0.1.3 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. package/dist/backend.d.ts +4 -0
  2. package/dist/backend.d.ts.map +1 -1
  3. package/dist/backend.js.map +1 -1
  4. package/dist/client.d.ts.map +1 -1
  5. package/dist/client.js +3 -1
  6. package/dist/client.js.map +1 -1
  7. package/dist/core/retry.d.ts +35 -19
  8. package/dist/core/retry.d.ts.map +1 -1
  9. package/dist/core/retry.js +50 -14
  10. package/dist/core/retry.js.map +1 -1
  11. package/dist/core/retry.test.js +172 -11
  12. package/dist/core/retry.test.js.map +1 -1
  13. package/dist/database/backend.d.ts.map +1 -1
  14. package/dist/database/backend.js +42 -10
  15. package/dist/database/backend.js.map +1 -1
  16. package/dist/database/backend.testsuite.d.ts.map +1 -1
  17. package/dist/database/backend.testsuite.js +106 -0
  18. package/dist/database/backend.testsuite.js.map +1 -1
  19. package/dist/execution.d.ts +2 -0
  20. package/dist/execution.d.ts.map +1 -1
  21. package/dist/execution.js +17 -3
  22. package/dist/execution.js.map +1 -1
  23. package/dist/execution.test.js +104 -0
  24. package/dist/execution.test.js.map +1 -1
  25. package/dist/internal.d.ts +2 -1
  26. package/dist/internal.d.ts.map +1 -1
  27. package/dist/internal.js +1 -1
  28. package/dist/internal.js.map +1 -1
  29. package/dist/worker.d.ts.map +1 -1
  30. package/dist/worker.js +2 -1
  31. package/dist/worker.js.map +1 -1
  32. package/dist/workflow.d.ts +3 -0
  33. package/dist/workflow.d.ts.map +1 -1
  34. package/dist/workflow.js.map +1 -1
  35. package/package.json +3 -3
  36. package/src/backend.ts +4 -0
  37. package/src/client.ts +2 -0
  38. package/src/core/retry.test.ts +180 -11
  39. package/src/core/retry.ts +95 -19
  40. package/src/database/backend.testsuite.ts +119 -0
  41. package/src/database/backend.ts +65 -11
  42. package/src/execution.test.ts +115 -0
  43. package/src/execution.ts +18 -2
  44. package/src/internal.ts +21 -1
  45. package/src/worker.ts +1 -0
  46. package/src/workflow.ts +3 -0
@@ -1,5 +1,16 @@
1
1
  import { describe, expect, test } from "vitest";
2
- import { calculateRetryDelayMs, DEFAULT_RETRY_POLICY, shouldRetry } from "./retry";
2
+ import type { SerializedError } from "./error";
3
+ import type { DynamicRetryPolicy, StaticRetryPolicy } from "./retry";
4
+ import {
5
+ calculateRetryDelayMs,
6
+ DEFAULT_RETRY_POLICY,
7
+ isDynamicRetryPolicy,
8
+ isStaticRetryPolicy,
9
+ mergeRetryPolicy,
10
+ serializeRetryPolicy,
11
+ shouldRetry,
12
+ shouldRetryByPolicy,
13
+ } from "./retry";
3
14
 
4
15
  describe("calculateRetryDelayMs", () => {
5
16
  test("calculates exponential backoff correctly", () => {
@@ -9,14 +20,15 @@ describe("calculateRetryDelayMs", () => {
9
20
  expect(calculateRetryDelayMs(4)).toBe(8000);
10
21
  expect(calculateRetryDelayMs(5)).toBe(16_000);
11
22
  expect(calculateRetryDelayMs(6)).toBe(32_000);
12
- expect(calculateRetryDelayMs(7)).toBe(64_000);
23
+ // attempt 7: 1s * 2^6 = 64s = 64000ms, capped at 60000ms (max)
24
+ expect(calculateRetryDelayMs(7)).toBe(60_000);
13
25
  });
14
26
 
15
27
  test("caps delay at maximum interval", () => {
16
28
  const { maximumIntervalMs } = DEFAULT_RETRY_POLICY;
17
29
 
18
- // attempt 8: 1s * 2^7 = 128s = 128000ms, but capped at 100000ms (max)
19
- expect(calculateRetryDelayMs(8)).toBe(maximumIntervalMs);
30
+ // attempt 7: 1s * 2^6 = 64s = 64000ms, capped at 60000ms (max)
31
+ expect(calculateRetryDelayMs(7)).toBe(maximumIntervalMs);
20
32
 
21
33
  // attempts 10 & 100: should still be capped
22
34
  expect(calculateRetryDelayMs(10)).toBe(maximumIntervalMs);
@@ -26,16 +38,173 @@ describe("calculateRetryDelayMs", () => {
26
38
  test("handles edge cases", () => {
27
39
  // attempt 0: 1s * 2^-1 = 0.5s = 500ms
28
40
  expect(calculateRetryDelayMs(0)).toBe(500);
29
- expect(calculateRetryDelayMs(Infinity)).toBe(100_000);
41
+ expect(calculateRetryDelayMs(Infinity)).toBe(60_000);
30
42
  });
31
43
  });
32
44
 
33
45
  describe("shouldRetry", () => {
34
- test("always returns true with default policy (infinite retries)", () => {
35
- const retryPolicy = DEFAULT_RETRY_POLICY;
36
- expect(shouldRetry(retryPolicy, 1)).toBe(true);
37
- expect(shouldRetry(retryPolicy, 10)).toBe(true);
38
- expect(shouldRetry(retryPolicy, 100)).toBe(true);
39
- expect(shouldRetry(retryPolicy, 1000)).toBe(true);
46
+ test("returns false when attempt reaches maxAttempts", () => {
47
+ // 기본 정책: maxAttempts = 5
48
+ expect(shouldRetry(DEFAULT_RETRY_POLICY, 1)).toBe(true);
49
+ expect(shouldRetry(DEFAULT_RETRY_POLICY, 4)).toBe(true);
50
+ expect(shouldRetry(DEFAULT_RETRY_POLICY, 5)).toBe(false);
51
+ expect(shouldRetry(DEFAULT_RETRY_POLICY, 10)).toBe(false);
52
+ });
53
+ });
54
+
55
+ describe("shouldRetryByPolicy", () => {
56
+ test("respects maxAttempts from policy", () => {
57
+ expect(shouldRetryByPolicy({ maxAttempts: 3 }, 1)).toBe(true);
58
+ expect(shouldRetryByPolicy({ maxAttempts: 3 }, 2)).toBe(true);
59
+ expect(shouldRetryByPolicy({ maxAttempts: 3 }, 3)).toBe(false);
60
+ expect(shouldRetryByPolicy({ maxAttempts: 3 }, 4)).toBe(false);
61
+ });
62
+
63
+ test("uses default maxAttempts when not specified", () => {
64
+ expect(shouldRetryByPolicy({}, 1)).toBe(true);
65
+ expect(shouldRetryByPolicy({}, 4)).toBe(true);
66
+ expect(shouldRetryByPolicy({}, 5)).toBe(false);
67
+ });
68
+ });
69
+
70
+ describe("isDynamicRetryPolicy", () => {
71
+ test("returns true for policy with shouldRetry function", () => {
72
+ const dynamicPolicy: DynamicRetryPolicy = {
73
+ maxAttempts: 3,
74
+ shouldRetry: () => ({ shouldRetry: true, delayMs: 1000 }),
75
+ };
76
+ expect(isDynamicRetryPolicy(dynamicPolicy)).toBe(true);
77
+ });
78
+
79
+ test("returns false for static policy without shouldRetry", () => {
80
+ const staticPolicy: StaticRetryPolicy = {
81
+ maxAttempts: 5,
82
+ initialIntervalMs: 1000,
83
+ };
84
+ expect(isDynamicRetryPolicy(staticPolicy)).toBe(false);
85
+ });
86
+
87
+ test("returns false for empty policy", () => {
88
+ expect(isDynamicRetryPolicy({})).toBe(false);
89
+ });
90
+ });
91
+
92
+ describe("isStaticRetryPolicy", () => {
93
+ test("returns true for static policy without shouldRetry", () => {
94
+ const staticPolicy: StaticRetryPolicy = {
95
+ maxAttempts: 5,
96
+ initialIntervalMs: 1000,
97
+ };
98
+ expect(isStaticRetryPolicy(staticPolicy)).toBe(true);
99
+ });
100
+
101
+ test("returns true for empty policy", () => {
102
+ expect(isStaticRetryPolicy({})).toBe(true);
103
+ });
104
+
105
+ test("returns false for dynamic policy", () => {
106
+ const dynamicPolicy: DynamicRetryPolicy = {
107
+ maxAttempts: 3,
108
+ shouldRetry: () => ({ shouldRetry: true, delayMs: 1000 }),
109
+ };
110
+ expect(isStaticRetryPolicy(dynamicPolicy)).toBe(false);
111
+ });
112
+ });
113
+
114
+ describe("mergeRetryPolicy", () => {
115
+ test("returns default values when policy is undefined", () => {
116
+ const merged = mergeRetryPolicy(undefined);
117
+ expect(merged.maxAttempts).toBe(5);
118
+ expect(merged.initialIntervalMs).toBe(1000);
119
+ expect(merged.backoffCoefficient).toBe(2);
120
+ expect(merged.maximumIntervalMs).toBe(60_000);
121
+ });
122
+
123
+ test("uses provided values and fills missing with defaults for static policy", () => {
124
+ const merged = mergeRetryPolicy({ maxAttempts: 10, initialIntervalMs: 500 });
125
+ expect(merged.maxAttempts).toBe(10);
126
+ expect(merged.initialIntervalMs).toBe(500);
127
+ expect(merged.backoffCoefficient).toBe(2);
128
+ expect(merged.maximumIntervalMs).toBe(60_000);
129
+ });
130
+
131
+ test("returns only maxAttempts and shouldRetry for dynamic policy", () => {
132
+ const customFn = (_error: SerializedError, _attempt: number) => ({
133
+ shouldRetry: false,
134
+ delayMs: 5000,
135
+ });
136
+ const dynamicPolicy: DynamicRetryPolicy = {
137
+ maxAttempts: 3,
138
+ shouldRetry: customFn,
139
+ };
140
+ const merged = mergeRetryPolicy(dynamicPolicy);
141
+
142
+ expect(merged.maxAttempts).toBe(3);
143
+ expect(merged.shouldRetry).toBe(customFn);
144
+ // 동적 정책에서는 backoff 필드들이 없어야 합니다.
145
+ expect("initialIntervalMs" in merged).toBe(false);
146
+ expect("backoffCoefficient" in merged).toBe(false);
147
+ expect("maximumIntervalMs" in merged).toBe(false);
148
+ });
149
+
150
+ test("uses default maxAttempts for dynamic policy when not specified", () => {
151
+ const customFn = () => ({ shouldRetry: true, delayMs: 1000 });
152
+ const dynamicPolicy: DynamicRetryPolicy = {
153
+ shouldRetry: customFn,
154
+ };
155
+ const merged = mergeRetryPolicy(dynamicPolicy);
156
+
157
+ expect(merged.maxAttempts).toBe(5); // 기본값
158
+ expect(merged.shouldRetry).toBe(customFn);
159
+ });
160
+ });
161
+
162
+ describe("serializeRetryPolicy", () => {
163
+ test("returns empty object with hasDynamicPolicy=false for undefined", () => {
164
+ const serialized = serializeRetryPolicy(undefined);
165
+ expect(serialized.hasDynamicPolicy).toBe(false);
166
+ expect(serialized.maxAttempts).toBeUndefined();
167
+ });
168
+
169
+ test("serializes static fields for static policy", () => {
170
+ const serialized = serializeRetryPolicy({
171
+ maxAttempts: 10,
172
+ initialIntervalMs: 2000,
173
+ });
174
+ expect(serialized.maxAttempts).toBe(10);
175
+ expect(serialized.initialIntervalMs).toBe(2000);
176
+ expect(serialized.hasDynamicPolicy).toBe(false);
177
+ expect("shouldRetry" in serialized).toBe(false);
178
+ });
179
+
180
+ test("excludes backoff fields for dynamic policy", () => {
181
+ const dynamicPolicy: DynamicRetryPolicy = {
182
+ maxAttempts: 3,
183
+ shouldRetry: () => ({ shouldRetry: true, delayMs: 1000 }),
184
+ };
185
+ const serialized = serializeRetryPolicy(dynamicPolicy);
186
+
187
+ expect(serialized.maxAttempts).toBe(3);
188
+ expect(serialized.hasDynamicPolicy).toBe(true);
189
+ // 동적 정책에서는 backoff 필드들이 없어야 합니다.
190
+ expect(serialized.initialIntervalMs).toBeUndefined();
191
+ expect(serialized.backoffCoefficient).toBeUndefined();
192
+ expect(serialized.maximumIntervalMs).toBeUndefined();
193
+ });
194
+
195
+ test("includes backoff fields for static policy", () => {
196
+ const staticPolicy: StaticRetryPolicy = {
197
+ maxAttempts: 5,
198
+ initialIntervalMs: 2000,
199
+ backoffCoefficient: 3,
200
+ maximumIntervalMs: 30000,
201
+ };
202
+ const serialized = serializeRetryPolicy(staticPolicy);
203
+
204
+ expect(serialized.maxAttempts).toBe(5);
205
+ expect(serialized.initialIntervalMs).toBe(2000);
206
+ expect(serialized.backoffCoefficient).toBe(3);
207
+ expect(serialized.maximumIntervalMs).toBe(30000);
208
+ expect(serialized.hasDynamicPolicy).toBe(false);
40
209
  });
41
210
  });
package/src/core/retry.ts CHANGED
@@ -1,29 +1,105 @@
1
- export const DEFAULT_RETRY_POLICY = {
2
- initialIntervalMs: 1000, // 1s
1
+ import type { SerializedError } from "./error";
2
+
3
+ export interface RetryDecision {
4
+ shouldRetry: boolean;
5
+ delayMs: number;
6
+ }
7
+
8
+ export type RetryDecisionFn = (error: SerializedError, attempt: number) => RetryDecision;
9
+
10
+ export interface StaticRetryPolicy {
11
+ maxAttempts?: number;
12
+ initialIntervalMs?: number;
13
+ backoffCoefficient?: number;
14
+ maximumIntervalMs?: number;
15
+ }
16
+
17
+ export interface DynamicRetryPolicy {
18
+ maxAttempts?: number;
19
+ shouldRetry: RetryDecisionFn;
20
+ }
21
+
22
+ export type RetryPolicy = StaticRetryPolicy | DynamicRetryPolicy;
23
+
24
+ export interface SerializableRetryPolicy extends StaticRetryPolicy {
25
+ hasDynamicPolicy?: boolean;
26
+ }
27
+
28
+ export type MergedStaticRetryPolicy = Required<StaticRetryPolicy>;
29
+
30
+ export interface MergedDynamicRetryPolicy {
31
+ maxAttempts: number;
32
+ shouldRetry: RetryDecisionFn;
33
+ }
34
+
35
+ export type MergedRetryPolicy = MergedStaticRetryPolicy | MergedDynamicRetryPolicy;
36
+
37
+ export const DEFAULT_RETRY_POLICY: Required<StaticRetryPolicy> = {
38
+ maxAttempts: 5,
39
+ initialIntervalMs: 1000,
3
40
  backoffCoefficient: 2,
4
- maximumIntervalMs: 100 * 1000, // 100s
5
- maximumAttempts: Infinity, // unlimited
6
- } as const;
41
+ maximumIntervalMs: 60_000,
42
+ };
7
43
 
8
- export type RetryPolicy = typeof DEFAULT_RETRY_POLICY;
44
+ export function isDynamicRetryPolicy(policy: RetryPolicy): policy is DynamicRetryPolicy {
45
+ return "shouldRetry" in policy && typeof policy.shouldRetry === "function";
46
+ }
47
+
48
+ export function isStaticRetryPolicy(policy: RetryPolicy): policy is StaticRetryPolicy {
49
+ return !isDynamicRetryPolicy(policy);
50
+ }
9
51
 
10
- /**
11
- * Calculate the next retry delay using exponential backoff.
12
- * @param attemptNumber - Attempt number (1-based)
13
- * @returns Delay in milliseconds
14
- */
15
52
  export function calculateRetryDelayMs(attemptNumber: number): number {
16
53
  const { initialIntervalMs, backoffCoefficient, maximumIntervalMs } = DEFAULT_RETRY_POLICY;
17
54
  const backoffMs = initialIntervalMs * backoffCoefficient ** (attemptNumber - 1);
18
55
  return Math.min(backoffMs, maximumIntervalMs);
19
56
  }
20
57
 
21
- /**
22
- * Check if an operation should be retried based on the retry policy.
23
- * @param retryPolicy - Retry policy
24
- * @param attemptNumber - Attempt number (1-based)
25
- * @returns True if another attempt should be made
26
- */
27
- export function shouldRetry(retryPolicy: RetryPolicy, attemptNumber: number): boolean {
28
- return attemptNumber < retryPolicy.maximumAttempts;
58
+ export function shouldRetry(retryPolicy: StaticRetryPolicy, attemptNumber: number): boolean {
59
+ const maxAttempts = retryPolicy.maxAttempts ?? DEFAULT_RETRY_POLICY.maxAttempts;
60
+ return attemptNumber < maxAttempts;
61
+ }
62
+
63
+ export function shouldRetryByPolicy(policy: StaticRetryPolicy, attemptNumber: number): boolean {
64
+ const maxAttempts = policy.maxAttempts ?? DEFAULT_RETRY_POLICY.maxAttempts;
65
+ return attemptNumber < maxAttempts;
66
+ }
67
+
68
+ export function mergeRetryPolicy(policy: StaticRetryPolicy | undefined): MergedStaticRetryPolicy;
69
+ export function mergeRetryPolicy(policy: DynamicRetryPolicy): MergedDynamicRetryPolicy;
70
+ export function mergeRetryPolicy(policy?: RetryPolicy): MergedRetryPolicy;
71
+ export function mergeRetryPolicy(policy?: RetryPolicy): MergedRetryPolicy {
72
+ if (policy && isDynamicRetryPolicy(policy)) {
73
+ return {
74
+ maxAttempts: policy.maxAttempts ?? DEFAULT_RETRY_POLICY.maxAttempts,
75
+ shouldRetry: policy.shouldRetry,
76
+ };
77
+ }
78
+ return {
79
+ maxAttempts: policy?.maxAttempts ?? DEFAULT_RETRY_POLICY.maxAttempts,
80
+ initialIntervalMs: policy?.initialIntervalMs ?? DEFAULT_RETRY_POLICY.initialIntervalMs,
81
+ backoffCoefficient: policy?.backoffCoefficient ?? DEFAULT_RETRY_POLICY.backoffCoefficient,
82
+ maximumIntervalMs: policy?.maximumIntervalMs ?? DEFAULT_RETRY_POLICY.maximumIntervalMs,
83
+ };
84
+ }
85
+
86
+ export function serializeRetryPolicy(policy?: RetryPolicy): SerializableRetryPolicy {
87
+ if (!policy) {
88
+ return { hasDynamicPolicy: false };
89
+ }
90
+
91
+ if (isDynamicRetryPolicy(policy)) {
92
+ return {
93
+ maxAttempts: policy.maxAttempts,
94
+ hasDynamicPolicy: true,
95
+ };
96
+ }
97
+
98
+ return {
99
+ maxAttempts: policy.maxAttempts,
100
+ initialIntervalMs: policy.initialIntervalMs,
101
+ backoffCoefficient: policy.backoffCoefficient,
102
+ maximumIntervalMs: policy.maximumIntervalMs,
103
+ hasDynamicPolicy: false,
104
+ };
29
105
  }
@@ -1,6 +1,7 @@
1
1
  import { randomUUID } from "node:crypto";
2
2
  import { afterAll, beforeAll, describe, expect, test } from "vitest";
3
3
  import type { Backend } from "..//backend";
4
+ import type { SerializableRetryPolicy } from "../core/retry";
4
5
  import type { StepAttempt } from "../core/step";
5
6
  import type { WorkflowRun } from "../core/workflow";
6
7
 
@@ -532,6 +533,124 @@ export function testBackend(options: TestBackendOptions): void {
532
533
 
533
534
  await teardown(backend);
534
535
  });
536
+
537
+ test("marks workflow run as failed when maxAttempts is reached", async () => {
538
+ const backend = await setup();
539
+
540
+ // retryPolicy에 maxAttempts: 2를 지정하여 생성
541
+ const retryPolicy: SerializableRetryPolicy = {
542
+ maxAttempts: 2,
543
+ initialIntervalMs: 100,
544
+ };
545
+ await backend.createWorkflowRun({
546
+ workflowName: randomUUID(),
547
+ version: null,
548
+ idempotencyKey: null,
549
+ input: null,
550
+ config: {},
551
+ context: null,
552
+ availableAt: null,
553
+ deadlineAt: null,
554
+ retryPolicy,
555
+ });
556
+
557
+ // 첫 번째 시도 - 실패하면 pending으로 스케줄링
558
+ let workerId = randomUUID();
559
+ let claimed = await backend.claimWorkflowRun({
560
+ workerId,
561
+ leaseDurationMs: 100,
562
+ });
563
+ if (!claimed) throw new Error("Expected workflow run to be claimed");
564
+ expect(claimed.attempts).toBe(1);
565
+
566
+ const firstFailed = await backend.failWorkflowRun({
567
+ workflowRunId: claimed.id,
568
+ workerId,
569
+ error: { message: "first failure" },
570
+ });
571
+ expect(firstFailed.status).toBe("pending"); // 아직 maxAttempts(2) 미달
572
+
573
+ await sleep(150); // 100ms backoff 대기
574
+
575
+ // 두 번째 시도 - maxAttempts에 도달하면 failed로 종료
576
+ workerId = randomUUID();
577
+ claimed = await backend.claimWorkflowRun({
578
+ workerId,
579
+ leaseDurationMs: 100,
580
+ });
581
+ if (!claimed) throw new Error("Expected workflow run to be claimed");
582
+ expect(claimed.attempts).toBe(2);
583
+
584
+ const secondFailed = await backend.failWorkflowRun({
585
+ workflowRunId: claimed.id,
586
+ workerId,
587
+ error: { message: "second failure" },
588
+ });
589
+
590
+ // maxAttempts에 도달했으므로 failed로 종료
591
+ expect(secondFailed.status).toBe("failed");
592
+ expect(secondFailed.availableAt).toBeNull();
593
+ expect(secondFailed.finishedAt).not.toBeNull();
594
+
595
+ await teardown(backend);
596
+ });
597
+
598
+ test("marks workflow run as failed immediately when forceComplete is true", async () => {
599
+ const backend = await setup();
600
+
601
+ await createPendingWorkflowRun(backend);
602
+
603
+ const workerId = randomUUID();
604
+ const claimed = await backend.claimWorkflowRun({
605
+ workerId,
606
+ leaseDurationMs: 100,
607
+ });
608
+ if (!claimed) throw new Error("Expected workflow run to be claimed");
609
+
610
+ // forceComplete: true로 호출하면 재시도 없이 즉시 failed
611
+ const failed = await backend.failWorkflowRun({
612
+ workflowRunId: claimed.id,
613
+ workerId,
614
+ error: { message: "forced failure" },
615
+ forceComplete: true,
616
+ });
617
+
618
+ expect(failed.status).toBe("failed");
619
+ expect(failed.availableAt).toBeNull();
620
+ expect(failed.finishedAt).not.toBeNull();
621
+
622
+ await teardown(backend);
623
+ });
624
+
625
+ test("stores retryPolicy in config when creating workflow run", async () => {
626
+ const backend = await setup();
627
+
628
+ const retryPolicy: SerializableRetryPolicy = {
629
+ maxAttempts: 10,
630
+ initialIntervalMs: 500,
631
+ backoffCoefficient: 1.5,
632
+ maximumIntervalMs: 30000,
633
+ };
634
+
635
+ const created = await backend.createWorkflowRun({
636
+ workflowName: randomUUID(),
637
+ version: null,
638
+ idempotencyKey: null,
639
+ input: null,
640
+ config: { existingKey: "existingValue" },
641
+ context: null,
642
+ availableAt: null,
643
+ deadlineAt: null,
644
+ retryPolicy,
645
+ });
646
+
647
+ // config에 retryPolicy가 저장되어 있는지 확인
648
+ const config = created.config as Record<string, unknown>;
649
+ expect(config.existingKey).toBe("existingValue");
650
+ expect(config.retryPolicy).toEqual(retryPolicy);
651
+
652
+ await teardown(backend);
653
+ });
535
654
  });
536
655
 
537
656
  describe("createStepAttempt()", () => {
@@ -20,7 +20,7 @@ import {
20
20
  type PaginatedResponse,
21
21
  type SleepWorkflowRunParams,
22
22
  } from "../backend";
23
- import { DEFAULT_RETRY_POLICY } from "../core/retry";
23
+ import { mergeRetryPolicy, type SerializableRetryPolicy } from "../core/retry";
24
24
  import type { StepAttempt } from "../core/step";
25
25
  import type { WorkflowRun } from "../core/workflow";
26
26
  import { DEFAULT_SCHEMA, migrate } from "./base";
@@ -167,6 +167,12 @@ export class BackendPostgres implements Backend {
167
167
  version: params.version,
168
168
  });
169
169
 
170
+ // config에 retryPolicy를 포함시킵니다.
171
+ const configWithRetryPolicy = {
172
+ ...(typeof params.config === "object" && params.config !== null ? params.config : {}),
173
+ retryPolicy: params.retryPolicy ?? undefined,
174
+ };
175
+
170
176
  const qb = this.knex
171
177
  .withSchema(DEFAULT_SCHEMA)
172
178
  .table("workflow_runs")
@@ -177,7 +183,7 @@ export class BackendPostgres implements Backend {
177
183
  version: params.version,
178
184
  status: "pending",
179
185
  idempotency_key: params.idempotencyKey,
180
- config: params.config,
186
+ config: JSON.stringify(configWithRetryPolicy),
181
187
  context: params.context,
182
188
  input: params.input,
183
189
  attempts: 0,
@@ -459,8 +465,60 @@ export class BackendPostgres implements Backend {
459
465
  throw new Error("Backend not initialized");
460
466
  }
461
467
 
462
- const { workflowRunId, error } = params;
463
- const { initialIntervalMs, backoffCoefficient, maximumIntervalMs } = DEFAULT_RETRY_POLICY;
468
+ const { workflowRunId, error, forceComplete, customDelayMs } = params;
469
+
470
+ logger.info("Failing workflow run: {workflowRunId}, {workerId}, {error}", {
471
+ workflowRunId: params.workflowRunId,
472
+ workerId: params.workerId,
473
+ error: params.error,
474
+ });
475
+
476
+ const workflowRun = await this.knex
477
+ .withSchema(DEFAULT_SCHEMA)
478
+ .table("workflow_runs")
479
+ .where("namespace_id", this.namespaceId)
480
+ .where("id", workflowRunId)
481
+ .first();
482
+
483
+ if (!workflowRun) {
484
+ throw new Error("Workflow run not found");
485
+ }
486
+
487
+ const config =
488
+ typeof workflowRun.config === "string" ? JSON.parse(workflowRun.config) : workflowRun.config;
489
+ const savedRetryPolicy: SerializableRetryPolicy | undefined = config?.retryPolicy;
490
+ const retryPolicy = mergeRetryPolicy(savedRetryPolicy);
491
+
492
+ const { initialIntervalMs, backoffCoefficient, maximumIntervalMs, maxAttempts } = retryPolicy;
493
+
494
+ const currentAttempts = workflowRun.attempts ?? 0;
495
+ const shouldForceComplete = forceComplete || currentAttempts >= maxAttempts;
496
+
497
+ if (shouldForceComplete) {
498
+ const [updated] = await this.knex
499
+ .withSchema(DEFAULT_SCHEMA)
500
+ .table("workflow_runs")
501
+ .where("namespace_id", this.namespaceId)
502
+ .where("id", workflowRunId)
503
+ .where("status", "running")
504
+ .where("worker_id", params.workerId)
505
+ .update({
506
+ status: "failed",
507
+ available_at: null,
508
+ finished_at: this.knex.fn.now(),
509
+ error: JSON.stringify(error),
510
+ worker_id: null,
511
+ started_at: null,
512
+ updated_at: this.knex.fn.now(),
513
+ })
514
+ .returning("*");
515
+
516
+ if (!updated) {
517
+ logger.error("Failed to mark workflow run failed: {params}", { params });
518
+ throw new Error("Failed to mark workflow run failed");
519
+ }
520
+ return updated;
521
+ }
464
522
 
465
523
  // this beefy query updates a workflow's status, available_at, and
466
524
  // finished_at based on the workflow's deadline and retry policy
@@ -468,15 +526,11 @@ export class BackendPostgres implements Backend {
468
526
  // if the next retry would exceed the deadline, the run is marked as
469
527
  // 'failed' and finalized, otherwise, the run is rescheduled with an updated
470
528
  // 'available_at' timestamp for the next retry
471
- const retryIntervalExpr = `LEAST(${initialIntervalMs} * POWER(${backoffCoefficient}, "attempts" - 1), ${maximumIntervalMs}) * INTERVAL '1 millisecond'`;
529
+ const retryIntervalExpr = customDelayMs
530
+ ? `${customDelayMs} * INTERVAL '1 millisecond'`
531
+ : `LEAST(${initialIntervalMs} * POWER(${backoffCoefficient}, "attempts" - 1), ${maximumIntervalMs}) * INTERVAL '1 millisecond'`;
472
532
  const deadlineExceededCondition = `"deadline_at" IS NOT NULL AND NOW() + (${retryIntervalExpr}) >= "deadline_at"`;
473
533
 
474
- logger.info("Failing workflow run: {workflowRunId}, {workerId}, {error}", {
475
- workflowRunId: params.workflowRunId,
476
- workerId: params.workerId,
477
- error: params.error,
478
- });
479
-
480
534
  const [updated] = await this.knex
481
535
  .withSchema(DEFAULT_SCHEMA)
482
536
  .table("workflow_runs")