@sonamu-kit/tasks 0.1.3 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.oxlintrc.json +3 -0
- package/AGENTS.md +21 -0
- package/dist/backend.d.ts +126 -103
- package/dist/backend.d.ts.map +1 -1
- package/dist/backend.js +4 -1
- package/dist/backend.js.map +1 -1
- package/dist/client.d.ts +145 -132
- package/dist/client.d.ts.map +1 -1
- package/dist/client.js +220 -212
- package/dist/client.js.map +1 -1
- package/dist/config.d.ts +15 -8
- package/dist/config.d.ts.map +1 -1
- package/dist/config.js +22 -17
- package/dist/config.js.map +1 -1
- package/dist/core/duration.d.ts +5 -4
- package/dist/core/duration.d.ts.map +1 -1
- package/dist/core/duration.js +54 -59
- package/dist/core/duration.js.map +1 -1
- package/dist/core/error.d.ts +10 -7
- package/dist/core/error.d.ts.map +1 -1
- package/dist/core/error.js +21 -21
- package/dist/core/error.js.map +1 -1
- package/dist/core/json.d.ts +8 -3
- package/dist/core/json.d.ts.map +1 -1
- package/dist/core/result.d.ts +10 -14
- package/dist/core/result.d.ts.map +1 -1
- package/dist/core/result.js +21 -16
- package/dist/core/result.js.map +1 -1
- package/dist/core/retry.d.ts +42 -20
- package/dist/core/retry.d.ts.map +1 -1
- package/dist/core/retry.js +49 -20
- package/dist/core/retry.js.map +1 -1
- package/dist/core/schema.d.ts +57 -53
- package/dist/core/schema.d.ts.map +1 -1
- package/dist/core/step.d.ts +28 -78
- package/dist/core/step.d.ts.map +1 -1
- package/dist/core/step.js +53 -63
- package/dist/core/step.js.map +1 -1
- package/dist/core/workflow.d.ts +33 -61
- package/dist/core/workflow.d.ts.map +1 -1
- package/dist/core/workflow.js +31 -41
- package/dist/core/workflow.js.map +1 -1
- package/dist/database/backend.d.ts +53 -46
- package/dist/database/backend.d.ts.map +1 -1
- package/dist/database/backend.js +544 -545
- package/dist/database/backend.js.map +1 -1
- package/dist/database/base.js +48 -25
- package/dist/database/base.js.map +1 -1
- package/dist/database/migrations/20251212000000_0_init.d.ts +10 -0
- package/dist/database/migrations/20251212000000_0_init.d.ts.map +1 -0
- package/dist/database/migrations/20251212000000_0_init.js +8 -4
- package/dist/database/migrations/20251212000000_0_init.js.map +1 -1
- package/dist/database/migrations/20251212000000_1_tables.d.ts +10 -0
- package/dist/database/migrations/20251212000000_1_tables.d.ts.map +1 -0
- package/dist/database/migrations/20251212000000_1_tables.js +81 -83
- package/dist/database/migrations/20251212000000_1_tables.js.map +1 -1
- package/dist/database/migrations/20251212000000_2_fk.d.ts +10 -0
- package/dist/database/migrations/20251212000000_2_fk.d.ts.map +1 -0
- package/dist/database/migrations/20251212000000_2_fk.js +20 -43
- package/dist/database/migrations/20251212000000_2_fk.js.map +1 -1
- package/dist/database/migrations/20251212000000_3_indexes.d.ts +10 -0
- package/dist/database/migrations/20251212000000_3_indexes.d.ts.map +1 -0
- package/dist/database/migrations/20251212000000_3_indexes.js +88 -102
- package/dist/database/migrations/20251212000000_3_indexes.js.map +1 -1
- package/dist/database/pubsub.d.ts +7 -16
- package/dist/database/pubsub.d.ts.map +1 -1
- package/dist/database/pubsub.js +75 -73
- package/dist/database/pubsub.js.map +1 -1
- package/dist/execution.d.ts +20 -57
- package/dist/execution.d.ts.map +1 -1
- package/dist/execution.js +175 -174
- package/dist/execution.js.map +1 -1
- package/dist/index.d.ts +5 -8
- package/dist/index.js +5 -5
- package/dist/internal.d.ts +12 -12
- package/dist/internal.js +4 -4
- package/dist/registry.d.ts +33 -27
- package/dist/registry.d.ts.map +1 -1
- package/dist/registry.js +58 -49
- package/dist/registry.js.map +1 -1
- package/dist/worker.d.ts +57 -50
- package/dist/worker.d.ts.map +1 -1
- package/dist/worker.js +194 -198
- package/dist/worker.js.map +1 -1
- package/dist/workflow.d.ts +26 -27
- package/dist/workflow.d.ts.map +1 -1
- package/dist/workflow.js +20 -15
- package/dist/workflow.js.map +1 -1
- package/nodemon.json +1 -1
- package/package.json +18 -20
- package/src/backend.ts +28 -8
- package/src/chaos.test.ts +3 -1
- package/src/client.test.ts +2 -0
- package/src/client.ts +32 -8
- package/src/config.test.ts +1 -0
- package/src/config.ts +3 -2
- package/src/core/duration.test.ts +2 -1
- package/src/core/duration.ts +1 -1
- package/src/core/error.test.ts +1 -0
- package/src/core/error.ts +1 -1
- package/src/core/result.test.ts +1 -0
- package/src/core/retry.test.ts +181 -11
- package/src/core/retry.ts +95 -19
- package/src/core/schema.ts +2 -2
- package/src/core/step.test.ts +2 -1
- package/src/core/step.ts +4 -3
- package/src/core/workflow.test.ts +2 -1
- package/src/core/workflow.ts +4 -3
- package/src/database/backend.test.ts +1 -0
- package/src/database/backend.testsuite.ts +162 -39
- package/src/database/backend.ts +271 -35
- package/src/database/base.test.ts +41 -0
- package/src/database/base.ts +51 -2
- package/src/database/migrations/20251212000000_0_init.ts +2 -1
- package/src/database/migrations/20251212000000_1_tables.ts +2 -1
- package/src/database/migrations/20251212000000_2_fk.ts +2 -1
- package/src/database/migrations/20251212000000_3_indexes.ts +2 -1
- package/src/database/pubsub.test.ts +6 -3
- package/src/database/pubsub.ts +55 -33
- package/src/execution.test.ts +117 -0
- package/src/execution.ts +65 -10
- package/src/internal.ts +21 -1
- package/src/practices/01-remote-workflow.ts +1 -0
- package/src/registry.test.ts +1 -0
- package/src/registry.ts +1 -1
- package/src/testing/connection.ts +3 -1
- package/src/worker.test.ts +2 -0
- package/src/worker.ts +31 -9
- package/src/workflow.test.ts +1 -0
- package/src/workflow.ts +5 -2
- package/templates/openworkflow.config.ts +2 -1
- package/tsdown.config.ts +31 -0
- package/.swcrc +0 -17
- package/dist/chaos.test.d.ts +0 -2
- package/dist/chaos.test.d.ts.map +0 -1
- package/dist/chaos.test.js +0 -92
- package/dist/chaos.test.js.map +0 -1
- package/dist/client.test.d.ts +0 -2
- package/dist/client.test.d.ts.map +0 -1
- package/dist/client.test.js +0 -340
- package/dist/client.test.js.map +0 -1
- package/dist/config.test.d.ts +0 -2
- package/dist/config.test.d.ts.map +0 -1
- package/dist/config.test.js +0 -24
- package/dist/config.test.js.map +0 -1
- package/dist/core/duration.test.d.ts +0 -2
- package/dist/core/duration.test.d.ts.map +0 -1
- package/dist/core/duration.test.js +0 -265
- package/dist/core/duration.test.js.map +0 -1
- package/dist/core/error.test.d.ts +0 -2
- package/dist/core/error.test.d.ts.map +0 -1
- package/dist/core/error.test.js +0 -63
- package/dist/core/error.test.js.map +0 -1
- package/dist/core/json.js +0 -3
- package/dist/core/json.js.map +0 -1
- package/dist/core/result.test.d.ts +0 -2
- package/dist/core/result.test.d.ts.map +0 -1
- package/dist/core/result.test.js +0 -19
- package/dist/core/result.test.js.map +0 -1
- package/dist/core/retry.test.d.ts +0 -2
- package/dist/core/retry.test.d.ts.map +0 -1
- package/dist/core/retry.test.js +0 -37
- package/dist/core/retry.test.js.map +0 -1
- package/dist/core/schema.js +0 -4
- package/dist/core/schema.js.map +0 -1
- package/dist/core/step.test.d.ts +0 -2
- package/dist/core/step.test.d.ts.map +0 -1
- package/dist/core/step.test.js +0 -356
- package/dist/core/step.test.js.map +0 -1
- package/dist/core/workflow.test.d.ts +0 -2
- package/dist/core/workflow.test.d.ts.map +0 -1
- package/dist/core/workflow.test.js +0 -172
- package/dist/core/workflow.test.js.map +0 -1
- package/dist/database/backend.test.d.ts +0 -2
- package/dist/database/backend.test.d.ts.map +0 -1
- package/dist/database/backend.test.js +0 -19
- package/dist/database/backend.test.js.map +0 -1
- package/dist/database/backend.testsuite.d.ts +0 -20
- package/dist/database/backend.testsuite.d.ts.map +0 -1
- package/dist/database/backend.testsuite.js +0 -1174
- package/dist/database/backend.testsuite.js.map +0 -1
- package/dist/database/base.d.ts +0 -12
- package/dist/database/base.d.ts.map +0 -1
- package/dist/database/pubsub.test.d.ts +0 -2
- package/dist/database/pubsub.test.d.ts.map +0 -1
- package/dist/database/pubsub.test.js +0 -86
- package/dist/database/pubsub.test.js.map +0 -1
- package/dist/execution.test.d.ts +0 -2
- package/dist/execution.test.d.ts.map +0 -1
- package/dist/execution.test.js +0 -558
- package/dist/execution.test.js.map +0 -1
- package/dist/index.d.ts.map +0 -1
- package/dist/index.js.map +0 -1
- package/dist/internal.d.ts.map +0 -1
- package/dist/internal.js.map +0 -1
- package/dist/practices/01-remote-workflow.d.ts +0 -2
- package/dist/practices/01-remote-workflow.d.ts.map +0 -1
- package/dist/practices/01-remote-workflow.js +0 -70
- package/dist/practices/01-remote-workflow.js.map +0 -1
- package/dist/registry.test.d.ts +0 -2
- package/dist/registry.test.d.ts.map +0 -1
- package/dist/registry.test.js +0 -95
- package/dist/registry.test.js.map +0 -1
- package/dist/testing/connection.d.ts +0 -7
- package/dist/testing/connection.d.ts.map +0 -1
- package/dist/testing/connection.js +0 -39
- package/dist/testing/connection.js.map +0 -1
- package/dist/worker.test.d.ts +0 -2
- package/dist/worker.test.d.ts.map +0 -1
- package/dist/worker.test.js +0 -1164
- package/dist/worker.test.js.map +0 -1
- package/dist/workflow.test.d.ts +0 -2
- package/dist/workflow.test.d.ts.map +0 -1
- package/dist/workflow.test.js +0 -73
- package/dist/workflow.test.js.map +0 -1
package/dist/database/backend.js
CHANGED
|
@@ -1,559 +1,558 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import { camelize } from "inflection";
|
|
3
|
-
import knex from "knex";
|
|
1
|
+
import { mergeRetryPolicy } from "../core/retry.js";
|
|
4
2
|
import { DEFAULT_NAMESPACE_ID } from "../backend.js";
|
|
5
|
-
import { DEFAULT_RETRY_POLICY } from "../core/retry.js";
|
|
6
3
|
import { DEFAULT_SCHEMA, migrate } from "./base.js";
|
|
7
4
|
import { PostgresPubSub } from "./pubsub.js";
|
|
8
|
-
|
|
5
|
+
import { getLogger } from "@logtape/logtape";
|
|
6
|
+
import { camelize } from "inflection";
|
|
7
|
+
import knex from "knex";
|
|
8
|
+
|
|
9
|
+
//#region src/database/backend.ts
|
|
10
|
+
const DEFAULT_LISTEN_CHANNEL = "new_tasks";
|
|
9
11
|
const DEFAULT_PAGINATION_PAGE_SIZE = 100;
|
|
10
12
|
const logger = getLogger([
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
13
|
+
"sonamu",
|
|
14
|
+
"internal",
|
|
15
|
+
"tasks"
|
|
14
16
|
]);
|
|
15
17
|
const queryLogger = getLogger([
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
18
|
+
"sonamu",
|
|
19
|
+
"internal",
|
|
20
|
+
"tasks",
|
|
21
|
+
"query"
|
|
20
22
|
]);
|
|
21
23
|
/**
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
|
|
501
|
-
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
|
|
505
|
-
|
|
506
|
-
|
|
507
|
-
|
|
508
|
-
|
|
509
|
-
|
|
510
|
-
|
|
511
|
-
|
|
512
|
-
|
|
513
|
-
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
|
|
539
|
-
}
|
|
540
|
-
return updated;
|
|
541
|
-
}
|
|
542
|
-
}
|
|
24
|
+
* Manages a connection to a Postgres database for workflow operations.
|
|
25
|
+
*/
|
|
26
|
+
var BackendPostgres = class {
|
|
27
|
+
config;
|
|
28
|
+
namespaceId;
|
|
29
|
+
usePubSub;
|
|
30
|
+
pubsub = null;
|
|
31
|
+
initialized = false;
|
|
32
|
+
runMigrations;
|
|
33
|
+
_knex = null;
|
|
34
|
+
get knex() {
|
|
35
|
+
if (!this._knex) {
|
|
36
|
+
this._knex = knex(this.config);
|
|
37
|
+
this._knex.on("query", (query) => {
|
|
38
|
+
queryLogger.debug("SQL: {query}, Values: {bindings}", {
|
|
39
|
+
query: query.sql,
|
|
40
|
+
bindings: query.bindings
|
|
41
|
+
});
|
|
42
|
+
});
|
|
43
|
+
}
|
|
44
|
+
return this._knex;
|
|
45
|
+
}
|
|
46
|
+
constructor(config, options) {
|
|
47
|
+
this.config = {
|
|
48
|
+
...config,
|
|
49
|
+
postProcessResponse: (result, _queryContext) => {
|
|
50
|
+
if (result === null || result === void 0) return result;
|
|
51
|
+
if (config?.postProcessResponse) result = config.postProcessResponse(result, _queryContext);
|
|
52
|
+
const camelizeRow = (row) => Object.fromEntries(Object.entries(row).map(([key, value]) => [camelize(key, true), value]));
|
|
53
|
+
if (Array.isArray(result)) return result.map(camelizeRow);
|
|
54
|
+
return camelizeRow(result);
|
|
55
|
+
}
|
|
56
|
+
};
|
|
57
|
+
const { namespaceId, usePubSub, runMigrations } = {
|
|
58
|
+
namespaceId: DEFAULT_NAMESPACE_ID,
|
|
59
|
+
usePubSub: true,
|
|
60
|
+
runMigrations: true,
|
|
61
|
+
...options
|
|
62
|
+
};
|
|
63
|
+
this.namespaceId = namespaceId;
|
|
64
|
+
this.usePubSub = usePubSub;
|
|
65
|
+
this.runMigrations = runMigrations;
|
|
66
|
+
}
|
|
67
|
+
async initialize() {
|
|
68
|
+
if (this.initialized) return;
|
|
69
|
+
if (this.runMigrations) await migrate(this.config, DEFAULT_SCHEMA);
|
|
70
|
+
this.initialized = true;
|
|
71
|
+
}
|
|
72
|
+
async subscribe(callback) {
|
|
73
|
+
if (!this.initialized) throw new Error("Backend not initialized");
|
|
74
|
+
if (!this.usePubSub) return;
|
|
75
|
+
if (!this.pubsub) this.pubsub = await PostgresPubSub.create(this.knex);
|
|
76
|
+
this.pubsub.listenEvent(DEFAULT_LISTEN_CHANNEL, callback);
|
|
77
|
+
}
|
|
78
|
+
async publish(payload) {
|
|
79
|
+
if (!this.initialized) throw new Error("Backend not initialized");
|
|
80
|
+
if (!this.usePubSub) return;
|
|
81
|
+
await this.knex.raw(payload ? `NOTIFY ${DEFAULT_LISTEN_CHANNEL}, '${payload}'` : `NOTIFY ${DEFAULT_LISTEN_CHANNEL}`);
|
|
82
|
+
}
|
|
83
|
+
async stop() {
|
|
84
|
+
if (!this.initialized) return;
|
|
85
|
+
await this.pubsub?.destroy();
|
|
86
|
+
this.pubsub = null;
|
|
87
|
+
await this.knex.destroy();
|
|
88
|
+
this._knex = null;
|
|
89
|
+
this.initialized = false;
|
|
90
|
+
}
|
|
91
|
+
async createWorkflowRun(params) {
|
|
92
|
+
if (!this.initialized) throw new Error("Backend not initialized");
|
|
93
|
+
logger.info("Creating workflow run: {workflowName}:{version}", {
|
|
94
|
+
workflowName: params.workflowName,
|
|
95
|
+
version: params.version
|
|
96
|
+
});
|
|
97
|
+
const configWithRetryPolicy = {
|
|
98
|
+
...typeof params.config === "object" && params.config !== null ? params.config : {},
|
|
99
|
+
retryPolicy: params.retryPolicy ?? void 0
|
|
100
|
+
};
|
|
101
|
+
const workflowRun = await this.knex.withSchema(DEFAULT_SCHEMA).table("workflow_runs").insert({
|
|
102
|
+
namespace_id: this.namespaceId,
|
|
103
|
+
id: crypto.randomUUID(),
|
|
104
|
+
workflow_name: params.workflowName,
|
|
105
|
+
version: params.version,
|
|
106
|
+
status: "pending",
|
|
107
|
+
idempotency_key: params.idempotencyKey,
|
|
108
|
+
config: JSON.stringify(configWithRetryPolicy),
|
|
109
|
+
context: params.context,
|
|
110
|
+
input: params.input,
|
|
111
|
+
attempts: 0,
|
|
112
|
+
available_at: params.availableAt ?? this.knex.fn.now(),
|
|
113
|
+
deadline_at: params.deadlineAt,
|
|
114
|
+
created_at: this.knex.fn.now(),
|
|
115
|
+
updated_at: this.knex.fn.now()
|
|
116
|
+
}).returning("*");
|
|
117
|
+
if (!workflowRun[0]) {
|
|
118
|
+
logger.error("Failed to create workflow run: {params}", { params });
|
|
119
|
+
throw new Error("Failed to create workflow run");
|
|
120
|
+
}
|
|
121
|
+
return workflowRun[0];
|
|
122
|
+
}
|
|
123
|
+
async getWorkflowRun(params) {
|
|
124
|
+
if (!this.initialized) throw new Error("Backend not initialized");
|
|
125
|
+
logger.info("Getting workflow run: {workflowRunId}", { workflowRunId: params.workflowRunId });
|
|
126
|
+
return await this.knex.withSchema(DEFAULT_SCHEMA).table("workflow_runs").where("namespace_id", this.namespaceId).where("id", params.workflowRunId).select("namespace_id", "id", "workflow_name", "version", "status", "idempotency_key", "config", "context", "input", "output", "error", "attempts", "parent_step_attempt_namespace_id", "parent_step_attempt_id", "worker_id", "available_at", "deadline_at", "started_at", "finished_at", "created_at", "updated_at").first() ?? null;
|
|
127
|
+
}
|
|
128
|
+
async listWorkflowRuns(params) {
|
|
129
|
+
if (!this.initialized) throw new Error("Backend not initialized");
|
|
130
|
+
logger.info("Listing workflow runs: {after}, {before}", {
|
|
131
|
+
after: params.after,
|
|
132
|
+
before: params.before
|
|
133
|
+
});
|
|
134
|
+
const limit = params.limit ?? DEFAULT_PAGINATION_PAGE_SIZE;
|
|
135
|
+
const { after, before } = params;
|
|
136
|
+
const order = params.order ?? "asc";
|
|
137
|
+
const reverseOrder = order === "asc" ? "desc" : "asc";
|
|
138
|
+
let cursor = null;
|
|
139
|
+
if (after) cursor = decodeCursor(after);
|
|
140
|
+
else if (before) cursor = decodeCursor(before);
|
|
141
|
+
const rows = await this.buildListWorkflowRunsWhere(params, cursor, order).orderBy("created_at", before ? reverseOrder : order).orderBy("id", before ? reverseOrder : order).limit(limit + 1);
|
|
142
|
+
return this.processPaginationResults(rows, limit, typeof after === "string", typeof before === "string");
|
|
143
|
+
}
|
|
144
|
+
buildListWorkflowRunsWhere(params, cursor, order) {
|
|
145
|
+
const { after } = params;
|
|
146
|
+
const qb = this.knex.withSchema(DEFAULT_SCHEMA).table("workflow_runs").where("namespace_id", this.namespaceId);
|
|
147
|
+
if (cursor) {
|
|
148
|
+
const operator = order === "asc" === !!after ? ">" : "<";
|
|
149
|
+
qb.whereRaw(`("created_at", "id") ${operator} (?, ?)`, [cursor.createdAt.toISOString(), cursor.id]);
|
|
150
|
+
}
|
|
151
|
+
if (params.status && params.status.length > 0) qb.whereIn("status", params.status);
|
|
152
|
+
if (params.workflowName) qb.where("workflow_name", params.workflowName);
|
|
153
|
+
if (params.createdAfter) qb.where("created_at", ">=", params.createdAfter);
|
|
154
|
+
if (params.createdBefore) qb.where("created_at", "<=", params.createdBefore);
|
|
155
|
+
return qb;
|
|
156
|
+
}
|
|
157
|
+
async claimWorkflowRun(params) {
|
|
158
|
+
if (!this.initialized) throw new Error("Backend not initialized");
|
|
159
|
+
logger.info("Claiming workflow run: {workerId}, {leaseDurationMs}", {
|
|
160
|
+
workerId: params.workerId,
|
|
161
|
+
leaseDurationMs: params.leaseDurationMs
|
|
162
|
+
});
|
|
163
|
+
return (await this.knex.with("expired", (qb) => qb.withSchema(DEFAULT_SCHEMA).table("workflow_runs").update({
|
|
164
|
+
status: "failed",
|
|
165
|
+
error: JSON.stringify({ message: "Workflow run deadline exceeded" }),
|
|
166
|
+
worker_id: null,
|
|
167
|
+
available_at: null,
|
|
168
|
+
finished_at: this.knex.raw("NOW()"),
|
|
169
|
+
updated_at: this.knex.raw("NOW()")
|
|
170
|
+
}).where("namespace_id", this.namespaceId).whereIn("status", [
|
|
171
|
+
"pending",
|
|
172
|
+
"running",
|
|
173
|
+
"sleeping"
|
|
174
|
+
]).whereNotNull("deadline_at").where("deadline_at", "<=", this.knex.raw("NOW()")).returning("id")).with("candidate", (qb) => qb.withSchema(DEFAULT_SCHEMA).select("id").from("workflow_runs").where("namespace_id", this.namespaceId).whereIn("status", [
|
|
175
|
+
"pending",
|
|
176
|
+
"running",
|
|
177
|
+
"sleeping"
|
|
178
|
+
]).where("available_at", "<=", this.knex.raw("NOW()")).where((qb2) => {
|
|
179
|
+
qb2.whereNull("deadline_at").orWhere("deadline_at", ">", this.knex.raw("NOW()"));
|
|
180
|
+
}).orderByRaw("CASE WHEN status = 'pending' THEN 0 ELSE 1 END").orderBy("available_at", "asc").orderBy("created_at", "asc").limit(1).forUpdate().skipLocked()).withSchema(DEFAULT_SCHEMA).table("workflow_runs as wr").where("wr.namespace_id", this.namespaceId).where("wr.id", this.knex.ref("candidate.id")).update({
|
|
181
|
+
status: "running",
|
|
182
|
+
attempts: this.knex.raw("wr.attempts + 1"),
|
|
183
|
+
worker_id: params.workerId,
|
|
184
|
+
available_at: this.knex.raw(`NOW() + ${params.leaseDurationMs} * INTERVAL '1 millisecond'`),
|
|
185
|
+
started_at: this.knex.raw("COALESCE(wr.started_at, NOW())"),
|
|
186
|
+
updated_at: this.knex.raw("NOW()")
|
|
187
|
+
}).updateFrom("candidate").returning("wr.*"))[0] ?? null;
|
|
188
|
+
}
|
|
189
|
+
async extendWorkflowRunLease(params) {
|
|
190
|
+
if (!this.initialized) throw new Error("Backend not initialized");
|
|
191
|
+
logger.info("Extending workflow run lease: {workflowRunId}, {workerId}, {leaseDurationMs}", {
|
|
192
|
+
workflowRunId: params.workflowRunId,
|
|
193
|
+
workerId: params.workerId,
|
|
194
|
+
leaseDurationMs: params.leaseDurationMs
|
|
195
|
+
});
|
|
196
|
+
const [updated] = await this.knex.withSchema(DEFAULT_SCHEMA).table("workflow_runs").where("namespace_id", this.namespaceId).where("id", params.workflowRunId).where("status", "running").where("worker_id", params.workerId).update({
|
|
197
|
+
available_at: this.knex.raw(`NOW() + ${params.leaseDurationMs} * INTERVAL '1 millisecond'`),
|
|
198
|
+
updated_at: this.knex.fn.now()
|
|
199
|
+
}).returning("*");
|
|
200
|
+
if (!updated) {
|
|
201
|
+
const wr = await this.getWorkflowRun({ workflowRunId: params.workflowRunId });
|
|
202
|
+
if (wr && (wr.status === "paused" || wr.status === "canceled")) throw new Error("Workflow run is paused or canceled");
|
|
203
|
+
logger.error("Failed to extend lease for workflow run: {params}", { params });
|
|
204
|
+
throw new Error("Failed to extend lease for workflow run");
|
|
205
|
+
}
|
|
206
|
+
return updated;
|
|
207
|
+
}
|
|
208
|
+
async sleepWorkflowRun(params) {
|
|
209
|
+
if (!this.initialized) throw new Error("Backend not initialized");
|
|
210
|
+
logger.info("Sleeping workflow run: {workflowRunId}, {workerId}, {availableAt}", {
|
|
211
|
+
workflowRunId: params.workflowRunId,
|
|
212
|
+
workerId: params.workerId,
|
|
213
|
+
availableAt: params.availableAt
|
|
214
|
+
});
|
|
215
|
+
const [updated] = await this.knex.withSchema(DEFAULT_SCHEMA).table("workflow_runs").where("namespace_id", this.namespaceId).where("id", params.workflowRunId).whereNotIn("status", [
|
|
216
|
+
"succeeded",
|
|
217
|
+
"completed",
|
|
218
|
+
"failed",
|
|
219
|
+
"canceled"
|
|
220
|
+
]).where("worker_id", params.workerId).update({
|
|
221
|
+
status: "sleeping",
|
|
222
|
+
available_at: params.availableAt,
|
|
223
|
+
worker_id: null,
|
|
224
|
+
updated_at: this.knex.fn.now()
|
|
225
|
+
}).returning("*");
|
|
226
|
+
if (!updated) {
|
|
227
|
+
logger.error("Failed to sleep workflow run: {params}", { params });
|
|
228
|
+
throw new Error("Failed to sleep workflow run");
|
|
229
|
+
}
|
|
230
|
+
return updated;
|
|
231
|
+
}
|
|
232
|
+
async completeWorkflowRun(params) {
|
|
233
|
+
if (!this.initialized) throw new Error("Backend not initialized");
|
|
234
|
+
logger.info("Completing workflow run: {workflowRunId}, {workerId}, {output}", {
|
|
235
|
+
workflowRunId: params.workflowRunId,
|
|
236
|
+
workerId: params.workerId,
|
|
237
|
+
output: params.output
|
|
238
|
+
});
|
|
239
|
+
const [updated] = await this.knex.withSchema(DEFAULT_SCHEMA).table("workflow_runs").where("namespace_id", this.namespaceId).where("id", params.workflowRunId).where("status", "running").where("worker_id", params.workerId).update({
|
|
240
|
+
status: "completed",
|
|
241
|
+
output: JSON.stringify(params.output),
|
|
242
|
+
error: null,
|
|
243
|
+
worker_id: params.workerId,
|
|
244
|
+
available_at: null,
|
|
245
|
+
finished_at: this.knex.fn.now(),
|
|
246
|
+
updated_at: this.knex.fn.now()
|
|
247
|
+
}).returning("*");
|
|
248
|
+
if (!updated) {
|
|
249
|
+
logger.error("Failed to complete workflow run: {params}", { params });
|
|
250
|
+
throw new Error("Failed to complete workflow run");
|
|
251
|
+
}
|
|
252
|
+
return updated;
|
|
253
|
+
}
|
|
254
|
+
async failWorkflowRun(params) {
|
|
255
|
+
if (!this.initialized) throw new Error("Backend not initialized");
|
|
256
|
+
const { workflowRunId, error, forceComplete, customDelayMs } = params;
|
|
257
|
+
logger.info("Failing workflow run: {workflowRunId}, {workerId}, {error}", {
|
|
258
|
+
workflowRunId: params.workflowRunId,
|
|
259
|
+
workerId: params.workerId,
|
|
260
|
+
error: params.error
|
|
261
|
+
});
|
|
262
|
+
const workflowRun = await this.knex.withSchema(DEFAULT_SCHEMA).table("workflow_runs").where("namespace_id", this.namespaceId).where("id", workflowRunId).first();
|
|
263
|
+
if (!workflowRun) throw new Error("Workflow run not found");
|
|
264
|
+
const savedRetryPolicy = (typeof workflowRun.config === "string" ? JSON.parse(workflowRun.config) : workflowRun.config)?.retryPolicy;
|
|
265
|
+
const { initialIntervalMs, backoffCoefficient, maximumIntervalMs, maxAttempts } = mergeRetryPolicy(savedRetryPolicy);
|
|
266
|
+
const currentAttempts = workflowRun.attempts ?? 0;
|
|
267
|
+
if (forceComplete || currentAttempts >= maxAttempts) {
|
|
268
|
+
const [updated$1] = await this.knex.withSchema(DEFAULT_SCHEMA).table("workflow_runs").where("namespace_id", this.namespaceId).where("id", workflowRunId).where("status", "running").where("worker_id", params.workerId).update({
|
|
269
|
+
status: "failed",
|
|
270
|
+
available_at: null,
|
|
271
|
+
finished_at: this.knex.fn.now(),
|
|
272
|
+
error: JSON.stringify(error),
|
|
273
|
+
worker_id: null,
|
|
274
|
+
started_at: null,
|
|
275
|
+
updated_at: this.knex.fn.now()
|
|
276
|
+
}).returning("*");
|
|
277
|
+
if (!updated$1) {
|
|
278
|
+
logger.error("Failed to mark workflow run failed: {params}", { params });
|
|
279
|
+
throw new Error("Failed to mark workflow run failed");
|
|
280
|
+
}
|
|
281
|
+
return updated$1;
|
|
282
|
+
}
|
|
283
|
+
const retryIntervalExpr = customDelayMs ? `${customDelayMs} * INTERVAL '1 millisecond'` : `LEAST(${initialIntervalMs} * POWER(${backoffCoefficient}, "attempts" - 1), ${maximumIntervalMs}) * INTERVAL '1 millisecond'`;
|
|
284
|
+
const deadlineExceededCondition = `"deadline_at" IS NOT NULL AND NOW() + (${retryIntervalExpr}) >= "deadline_at"`;
|
|
285
|
+
const [updated] = await this.knex.withSchema(DEFAULT_SCHEMA).table("workflow_runs").where("namespace_id", this.namespaceId).where("id", workflowRunId).where("status", "running").where("worker_id", params.workerId).update({
|
|
286
|
+
status: this.knex.raw(`CASE WHEN ${deadlineExceededCondition} THEN 'failed' ELSE 'pending' END`),
|
|
287
|
+
available_at: this.knex.raw(`CASE WHEN ${deadlineExceededCondition} THEN NULL ELSE NOW() + (${retryIntervalExpr}) END`),
|
|
288
|
+
finished_at: this.knex.raw(`CASE WHEN ${deadlineExceededCondition} THEN NOW() ELSE NULL END`),
|
|
289
|
+
error: JSON.stringify(error),
|
|
290
|
+
worker_id: null,
|
|
291
|
+
started_at: null,
|
|
292
|
+
updated_at: this.knex.fn.now()
|
|
293
|
+
}).returning("*");
|
|
294
|
+
if (!updated) {
|
|
295
|
+
logger.error("Failed to mark workflow run failed: {params}", { params });
|
|
296
|
+
throw new Error("Failed to mark workflow run failed");
|
|
297
|
+
}
|
|
298
|
+
return updated;
|
|
299
|
+
}
|
|
300
|
+
async cancelWorkflowRun(params) {
|
|
301
|
+
if (!this.initialized) throw new Error("Backend not initialized");
|
|
302
|
+
logger.info("Canceling workflow run: {workflowRunId}", { workflowRunId: params.workflowRunId });
|
|
303
|
+
const [updated] = await this.knex.withSchema(DEFAULT_SCHEMA).table("workflow_runs").where("namespace_id", this.namespaceId).where("id", params.workflowRunId).whereIn("status", [
|
|
304
|
+
"pending",
|
|
305
|
+
"running",
|
|
306
|
+
"sleeping",
|
|
307
|
+
"paused"
|
|
308
|
+
]).update({
|
|
309
|
+
status: "canceled",
|
|
310
|
+
worker_id: null,
|
|
311
|
+
available_at: null,
|
|
312
|
+
finished_at: this.knex.fn.now(),
|
|
313
|
+
updated_at: this.knex.fn.now()
|
|
314
|
+
}).returning("*");
|
|
315
|
+
if (!updated) {
|
|
316
|
+
const existing = await this.getWorkflowRun({ workflowRunId: params.workflowRunId });
|
|
317
|
+
if (!existing) throw new Error(`Workflow run ${params.workflowRunId} does not exist`);
|
|
318
|
+
if (existing.status === "canceled") return existing;
|
|
319
|
+
if ([
|
|
320
|
+
"succeeded",
|
|
321
|
+
"completed",
|
|
322
|
+
"failed"
|
|
323
|
+
].includes(existing.status)) {
|
|
324
|
+
logger.error("Cannot cancel workflow run: {params} with status {status}", {
|
|
325
|
+
params,
|
|
326
|
+
status: existing.status
|
|
327
|
+
});
|
|
328
|
+
throw new Error(`Cannot cancel workflow run ${params.workflowRunId} with status ${existing.status}`);
|
|
329
|
+
}
|
|
330
|
+
logger.error("Failed to cancel workflow run: {params}", { params });
|
|
331
|
+
throw new Error("Failed to cancel workflow run");
|
|
332
|
+
}
|
|
333
|
+
return updated;
|
|
334
|
+
}
|
|
335
|
+
async pauseWorkflowRun(params) {
|
|
336
|
+
if (!this.initialized) throw new Error("Backend not initialized");
|
|
337
|
+
logger.info("Pausing workflow run: {workflowRunId}", { workflowRunId: params.workflowRunId });
|
|
338
|
+
const [updated] = await this.knex.withSchema(DEFAULT_SCHEMA).table("workflow_runs").where("namespace_id", this.namespaceId).where("id", params.workflowRunId).whereIn("status", [
|
|
339
|
+
"pending",
|
|
340
|
+
"running",
|
|
341
|
+
"sleeping"
|
|
342
|
+
]).update({
|
|
343
|
+
status: "paused",
|
|
344
|
+
worker_id: null,
|
|
345
|
+
available_at: null,
|
|
346
|
+
updated_at: this.knex.fn.now()
|
|
347
|
+
}).returning("*");
|
|
348
|
+
if (!updated) {
|
|
349
|
+
const existing = await this.getWorkflowRun({ workflowRunId: params.workflowRunId });
|
|
350
|
+
if (!existing) throw new Error(`Workflow run ${params.workflowRunId} does not exist`);
|
|
351
|
+
if (existing.status === "paused") return existing;
|
|
352
|
+
if ([
|
|
353
|
+
"succeeded",
|
|
354
|
+
"completed",
|
|
355
|
+
"failed",
|
|
356
|
+
"canceled"
|
|
357
|
+
].includes(existing.status)) {
|
|
358
|
+
logger.error("Cannot pause workflow run: {params} with status {status}", {
|
|
359
|
+
params,
|
|
360
|
+
status: existing.status
|
|
361
|
+
});
|
|
362
|
+
throw new Error(`Cannot pause workflow run ${params.workflowRunId} with status ${existing.status}`);
|
|
363
|
+
}
|
|
364
|
+
logger.error("Failed to pause workflow run: {params}", { params });
|
|
365
|
+
throw new Error("Failed to pause workflow run");
|
|
366
|
+
}
|
|
367
|
+
return updated;
|
|
368
|
+
}
|
|
369
|
+
async resumeWorkflowRun(params) {
|
|
370
|
+
if (!this.initialized) throw new Error("Backend not initialized");
|
|
371
|
+
logger.info("Resuming workflow run: {workflowRunId}", { workflowRunId: params.workflowRunId });
|
|
372
|
+
const [updated] = await this.knex.withSchema(DEFAULT_SCHEMA).table("workflow_runs").where("namespace_id", this.namespaceId).where("id", params.workflowRunId).where("status", "paused").update({
|
|
373
|
+
status: "pending",
|
|
374
|
+
available_at: this.knex.fn.now(),
|
|
375
|
+
updated_at: this.knex.fn.now()
|
|
376
|
+
}).returning("*");
|
|
377
|
+
if (!updated) {
|
|
378
|
+
const existing = await this.getWorkflowRun({ workflowRunId: params.workflowRunId });
|
|
379
|
+
if (!existing) throw new Error(`Workflow run ${params.workflowRunId} does not exist`);
|
|
380
|
+
if (existing.status === "pending" || existing.status === "running") return existing;
|
|
381
|
+
if ([
|
|
382
|
+
"succeeded",
|
|
383
|
+
"completed",
|
|
384
|
+
"failed",
|
|
385
|
+
"canceled"
|
|
386
|
+
].includes(existing.status)) {
|
|
387
|
+
logger.error("Cannot resume workflow run: {params} with status {status}", {
|
|
388
|
+
params,
|
|
389
|
+
status: existing.status
|
|
390
|
+
});
|
|
391
|
+
throw new Error(`Cannot resume workflow run ${params.workflowRunId} with status ${existing.status}`);
|
|
392
|
+
}
|
|
393
|
+
logger.error("Failed to resume workflow run: {params}", { params });
|
|
394
|
+
throw new Error("Failed to resume workflow run");
|
|
395
|
+
}
|
|
396
|
+
return updated;
|
|
397
|
+
}
|
|
398
|
+
async createStepAttempt(params) {
|
|
399
|
+
if (!this.initialized) throw new Error("Backend not initialized");
|
|
400
|
+
logger.info("Creating step attempt: {workflowRunId}, {stepName}, {kind}", {
|
|
401
|
+
workflowRunId: params.workflowRunId,
|
|
402
|
+
stepName: params.stepName,
|
|
403
|
+
kind: params.kind
|
|
404
|
+
});
|
|
405
|
+
const [stepAttempt] = await this.knex.withSchema(DEFAULT_SCHEMA).table("step_attempts").insert({
|
|
406
|
+
namespace_id: this.namespaceId,
|
|
407
|
+
id: crypto.randomUUID(),
|
|
408
|
+
workflow_run_id: params.workflowRunId,
|
|
409
|
+
step_name: params.stepName,
|
|
410
|
+
kind: params.kind,
|
|
411
|
+
status: "running",
|
|
412
|
+
config: JSON.stringify(params.config),
|
|
413
|
+
context: JSON.stringify(params.context),
|
|
414
|
+
started_at: this.knex.fn.now(),
|
|
415
|
+
created_at: this.knex.raw("date_trunc('milliseconds', NOW())"),
|
|
416
|
+
updated_at: this.knex.fn.now()
|
|
417
|
+
}).returning("*");
|
|
418
|
+
if (!stepAttempt) {
|
|
419
|
+
logger.error("Failed to create step attempt: {params}", { params });
|
|
420
|
+
throw new Error("Failed to create step attempt");
|
|
421
|
+
}
|
|
422
|
+
return stepAttempt;
|
|
423
|
+
}
|
|
424
|
+
async getStepAttempt(params) {
|
|
425
|
+
if (!this.initialized) throw new Error("Backend not initialized");
|
|
426
|
+
logger.info("Getting step attempt: {stepAttemptId}", { stepAttemptId: params.stepAttemptId });
|
|
427
|
+
return await this.knex.withSchema(DEFAULT_SCHEMA).table("step_attempts").where("namespace_id", this.namespaceId).where("id", params.stepAttemptId).first() ?? null;
|
|
428
|
+
}
|
|
429
|
+
async listStepAttempts(params) {
|
|
430
|
+
if (!this.initialized) throw new Error("Backend not initialized");
|
|
431
|
+
logger.info("Listing step attempts: {workflowRunId}, {after}, {before}", {
|
|
432
|
+
workflowRunId: params.workflowRunId,
|
|
433
|
+
after: params.after,
|
|
434
|
+
before: params.before
|
|
435
|
+
});
|
|
436
|
+
const limit = params.limit ?? DEFAULT_PAGINATION_PAGE_SIZE;
|
|
437
|
+
const { after, before } = params;
|
|
438
|
+
const order = params.order ?? "asc";
|
|
439
|
+
const reverseOrder = order === "asc" ? "desc" : "asc";
|
|
440
|
+
let cursor = null;
|
|
441
|
+
if (after) cursor = decodeCursor(after);
|
|
442
|
+
else if (before) cursor = decodeCursor(before);
|
|
443
|
+
const rows = await this.buildListStepAttemptsWhere(params, cursor, order).orderBy("created_at", before ? reverseOrder : order).orderBy("id", before ? reverseOrder : order).limit(limit + 1);
|
|
444
|
+
return this.processPaginationResults(rows, limit, typeof after === "string", typeof before === "string");
|
|
445
|
+
}
|
|
446
|
+
buildListStepAttemptsWhere(params, cursor, order) {
|
|
447
|
+
const { after } = params;
|
|
448
|
+
const qb = this.knex.withSchema(DEFAULT_SCHEMA).table("step_attempts").where("namespace_id", this.namespaceId).where("workflow_run_id", params.workflowRunId);
|
|
449
|
+
if (cursor) {
|
|
450
|
+
const operator = order === "asc" === !!after ? ">" : "<";
|
|
451
|
+
return qb.whereRaw(`("created_at", "id") ${operator} (?, ?)`, [cursor.createdAt.toISOString(), cursor.id]);
|
|
452
|
+
}
|
|
453
|
+
return qb;
|
|
454
|
+
}
|
|
455
|
+
processPaginationResults(rows, limit, hasAfter, hasBefore) {
|
|
456
|
+
const data = rows;
|
|
457
|
+
let hasNext = false;
|
|
458
|
+
let hasPrev = false;
|
|
459
|
+
if (hasBefore) {
|
|
460
|
+
data.reverse();
|
|
461
|
+
if (data.length > limit) {
|
|
462
|
+
hasPrev = true;
|
|
463
|
+
data.shift();
|
|
464
|
+
}
|
|
465
|
+
hasNext = true;
|
|
466
|
+
} else {
|
|
467
|
+
if (data.length > limit) {
|
|
468
|
+
hasNext = true;
|
|
469
|
+
data.pop();
|
|
470
|
+
}
|
|
471
|
+
if (hasAfter) hasPrev = true;
|
|
472
|
+
}
|
|
473
|
+
const lastItem = data.at(-1);
|
|
474
|
+
const nextCursor = hasNext && lastItem ? encodeCursor(lastItem) : null;
|
|
475
|
+
const firstItem = data[0];
|
|
476
|
+
return {
|
|
477
|
+
data,
|
|
478
|
+
pagination: {
|
|
479
|
+
next: nextCursor,
|
|
480
|
+
prev: hasPrev && firstItem ? encodeCursor(firstItem) : null
|
|
481
|
+
}
|
|
482
|
+
};
|
|
483
|
+
}
|
|
484
|
+
async completeStepAttempt(params) {
|
|
485
|
+
if (!this.initialized) throw new Error("Backend not initialized");
|
|
486
|
+
logger.info("Marking step attempt as completed: {workflowRunId}, {stepAttemptId}, {workerId}", {
|
|
487
|
+
workflowRunId: params.workflowRunId,
|
|
488
|
+
stepAttemptId: params.stepAttemptId,
|
|
489
|
+
workerId: params.workerId
|
|
490
|
+
});
|
|
491
|
+
const [updated] = await this.knex.withSchema(DEFAULT_SCHEMA).table("step_attempts as sa").update({
|
|
492
|
+
status: "completed",
|
|
493
|
+
output: JSON.stringify(params.output),
|
|
494
|
+
error: null,
|
|
495
|
+
finished_at: this.knex.fn.now(),
|
|
496
|
+
updated_at: this.knex.fn.now()
|
|
497
|
+
}).updateFrom(`${DEFAULT_SCHEMA}.workflow_runs as wr`).where("sa.namespace_id", this.namespaceId).where("sa.workflow_run_id", params.workflowRunId).where("sa.id", params.stepAttemptId).where("sa.status", "running").where("wr.namespace_id", this.knex.ref("sa.namespace_id")).where("wr.id", this.knex.ref("sa.workflow_run_id")).where("wr.status", "running").where("wr.worker_id", params.workerId).returning("sa.*");
|
|
498
|
+
if (!updated) return this.handleStepAttemptUpdateMiss("completed", params);
|
|
499
|
+
return updated;
|
|
500
|
+
}
|
|
501
|
+
async failStepAttempt(params) {
|
|
502
|
+
if (!this.initialized) throw new Error("Backend not initialized");
|
|
503
|
+
logger.info("Marking step attempt as failed: {workflowRunId}, {stepAttemptId}, {workerId}", {
|
|
504
|
+
workflowRunId: params.workflowRunId,
|
|
505
|
+
stepAttemptId: params.stepAttemptId,
|
|
506
|
+
workerId: params.workerId
|
|
507
|
+
});
|
|
508
|
+
logger.info("Error: {error.message}", { error: params.error.message });
|
|
509
|
+
const [updated] = await this.knex.withSchema(DEFAULT_SCHEMA).table("step_attempts as sa").update({
|
|
510
|
+
status: "failed",
|
|
511
|
+
output: null,
|
|
512
|
+
error: JSON.stringify(params.error),
|
|
513
|
+
finished_at: this.knex.fn.now(),
|
|
514
|
+
updated_at: this.knex.fn.now()
|
|
515
|
+
}).updateFrom(`${DEFAULT_SCHEMA}.workflow_runs as wr`).where("sa.namespace_id", this.namespaceId).where("sa.workflow_run_id", params.workflowRunId).where("sa.id", params.stepAttemptId).where("sa.status", "running").where("wr.namespace_id", this.knex.ref("sa.namespace_id")).where("wr.id", this.knex.ref("sa.workflow_run_id")).where("wr.status", "running").where("wr.worker_id", params.workerId).returning("sa.*");
|
|
516
|
+
if (!updated) return this.handleStepAttemptUpdateMiss("failed", params);
|
|
517
|
+
return updated;
|
|
518
|
+
}
|
|
519
|
+
/**
|
|
520
|
+
* completeStepAttempt/failStepAttempt에서 UPDATE가 0건일 때,
|
|
521
|
+
* 외부 상태 변경(pause/cancel)에 의한 것인지 판단합니다.
|
|
522
|
+
* - 외부 상태 변경이면 해당 step의 상태도 워크플로우와 동일하게 맞추고 null을 반환합니다.
|
|
523
|
+
* - 그 외에는 예상하지 못한 상황이므로 에러를 throw합니다.
|
|
524
|
+
*/
|
|
525
|
+
async handleStepAttemptUpdateMiss(method, params) {
|
|
526
|
+
const wr = await this.getWorkflowRun({ workflowRunId: params.workflowRunId });
|
|
527
|
+
if (wr && (wr.status === "paused" || wr.status === "canceled")) {
|
|
528
|
+
await this.knex.withSchema(DEFAULT_SCHEMA).table("step_attempts").where("namespace_id", this.namespaceId).where("id", params.stepAttemptId).whereIn("status", ["running", "paused"]).update({
|
|
529
|
+
status: wr.status,
|
|
530
|
+
updated_at: this.knex.fn.now()
|
|
531
|
+
});
|
|
532
|
+
return null;
|
|
533
|
+
}
|
|
534
|
+
logger.error("Failed to mark step attempt {method}: {params}", {
|
|
535
|
+
method,
|
|
536
|
+
params
|
|
537
|
+
});
|
|
538
|
+
throw new Error(`Failed to mark step attempt ${method}`);
|
|
539
|
+
}
|
|
540
|
+
};
|
|
543
541
|
function encodeCursor(item) {
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
return encoded;
|
|
542
|
+
return Buffer.from(JSON.stringify({
|
|
543
|
+
createdAt: item.createdAt.toISOString(),
|
|
544
|
+
id: item.id
|
|
545
|
+
})).toString("base64");
|
|
549
546
|
}
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
|
|
556
|
-
|
|
547
|
+
function decodeCursor(cursor) {
|
|
548
|
+
const decoded = Buffer.from(cursor, "base64").toString("utf8");
|
|
549
|
+
const parsed = JSON.parse(decoded);
|
|
550
|
+
return {
|
|
551
|
+
createdAt: new Date(parsed.createdAt),
|
|
552
|
+
id: parsed.id
|
|
553
|
+
};
|
|
557
554
|
}
|
|
558
555
|
|
|
556
|
+
//#endregion
|
|
557
|
+
export { BackendPostgres };
|
|
559
558
|
//# sourceMappingURL=backend.js.map
|